HADOOP-6919. New metrics2 framework. Contributed by Luke Lu.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1100113 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Arun Murthy 2011-05-06 07:28:43 +00:00
parent a213c5676c
commit 827401a9b1
101 changed files with 9356 additions and 2 deletions

View File

@ -24,6 +24,8 @@ Trunk (unreleased changes)
HADOOP-7257 Client side mount tables (sanjay) HADOOP-7257 Client side mount tables (sanjay)
HADOOP-6919. New metrics2 framework. (Luke Lu via acmurthy)
IMPROVEMENTS IMPROVEMENTS
HADOOP-7042. Updates to test-patch.sh to include failed test names and HADOOP-7042. Updates to test-patch.sh to include failed test names and

View File

@ -519,6 +519,13 @@
</jar> </jar>
</target> </target>
<target name="metrics2.jar" depends="compile-core" description="Make the Hadoop metrics2 framework jar (for use plugin development)">
<jar jarfile="${build.dir}/hadoop-metrics2-${version}.jar"
basedir="${build.classes}">
<include name="**/metrics2/**" />
</jar>
</target>
<target name="generate-test-records" depends="compile-rcc-compiler"> <target name="generate-test-records" depends="compile-rcc-compiler">
<recordcc destdir="${test.generated.dir}"> <recordcc destdir="${test.generated.dir}">
<fileset dir="${test.src.dir}" <fileset dir="${test.src.dir}"

View File

@ -0,0 +1,16 @@
# syntax: [prefix].[source|sink].[instance].[options]
# See javadoc of package-info.java for org.apache.hadoop.metrics2 for details
*.sink.file.class=org.apache.hadoop.metrics2.sink.FileSink
#namenode.sink.file.filename=namenode-metrics.out
#datanode.sink.file.filename=datanode-metrics.out
#jobtracker.sink.file.filename=jobtracker-metrics.out
#tasktracker.sink.file.filename=tasktracker-metrics.out
#maptask.sink.file.filename=maptask-metrics.out
#reducetask.sink.file.filename=reducetask-metrics.out

12
ivy.xml
View File

@ -301,5 +301,17 @@
rev="${jsch.version}" rev="${jsch.version}"
conf="common->default"> conf="common->default">
</dependency> </dependency>
<dependency org="commons-configuration"
name="commons-configuration"
rev="${commons-configuration.version}"
conf="common->default"/>
<dependency org="org.apache.commons"
name="commons-math"
rev="${commons-math.version}"
conf="common->default"/>
<dependency org="com.google.guava"
name="guava"
rev="${guava.version}"
conf="common->default"/>
</dependencies> </dependencies>
</ivy-module> </ivy-module>

View File

@ -135,5 +135,20 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>1.6</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math</artifactId>
<version>2.1</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>r07</version>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@ -49,5 +49,10 @@
<artifactId>ftpserver-deprecated</artifactId> <artifactId>ftpserver-deprecated</artifactId>
<version>1.0.0-M2</version> <version>1.0.0-M2</version>
</dependency> </dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.8.5</version>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@ -24,6 +24,7 @@ commons-cli.version=1.2
commons-cli2.version=2.0-mahout commons-cli2.version=2.0-mahout
commons-codec.version=1.4 commons-codec.version=1.4
commons-collections.version=3.1 commons-collections.version=3.1
commons-configuration.version=1.6
commons-httpclient.version=3.1 commons-httpclient.version=3.1
commons-lang.version=2.5 commons-lang.version=2.5
commons-logging.version=1.1.1 commons-logging.version=1.1.1
@ -31,6 +32,7 @@ commons-logging-api.version=1.1
commons-el.version=1.0 commons-el.version=1.0
commons-fileupload.version=1.2 commons-fileupload.version=1.2
commons-io.version=1.4 commons-io.version=1.4
commons-math.version=2.1
commons-net.version=1.4.1 commons-net.version=1.4.1
core.version=3.1.1 core.version=3.1.1
coreplugin.version=1.3.2 coreplugin.version=1.3.2
@ -39,6 +41,8 @@ ftplet-api.version=1.0.0
ftpserver-core.version=1.0.0 ftpserver-core.version=1.0.0
ftpserver-deprecated.version=1.0.0-M2 ftpserver-deprecated.version=1.0.0-M2
guava.version=r09
hsqldb.version=1.8.0.10 hsqldb.version=1.8.0.10
ivy.version=2.1.0 ivy.version=2.1.0
@ -77,7 +81,7 @@ xerces.version=1.4.4
aspectj.version=1.6.5 aspectj.version=1.6.5
mockito-all.version=1.8.2 mockito-all.version=1.8.5
jsch.version=0.1.42 jsch.version=0.1.42

View File

@ -42,9 +42,11 @@ import org.apache.hadoop.metrics.spi.OutputRecord;
* myContextName.fileName=/tmp/metrics.log * myContextName.fileName=/tmp/metrics.log
* myContextName.period=5 * myContextName.period=5
* </pre> * </pre>
* @deprecated use {@link org.apache.hadoop.metrics2.sink.FileSink} instead.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
@Deprecated
public class FileContext extends AbstractMetricsContext { public class FileContext extends AbstractMetricsContext {
/* Configuration attribute names */ /* Configuration attribute names */

View File

@ -0,0 +1,92 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import com.google.common.base.Objects;
import static com.google.common.base.Preconditions.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The immutable metric
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class AbstractMetric implements MetricsInfo {
private final MetricsInfo info;
/**
* Construct the metric
* @param info about the metric
*/
protected AbstractMetric(MetricsInfo info) {
this.info = checkNotNull(info, "metric info");
}
@Override public String name() {
return info.name();
}
@Override public String description() {
return info.description();
}
protected MetricsInfo info() {
return info;
}
/**
* Get the value of the metric
* @return the value of the metric
*/
public abstract Number value();
/**
* Get the type of the metric
* @return the type of the metric
*/
public abstract MetricType type();
/**
* Accept a visitor interface
* @param visitor of the metric
*/
public abstract void visit(MetricsVisitor visitor);
@Override public boolean equals(Object obj) {
if (obj instanceof AbstractMetric) {
final AbstractMetric other = (AbstractMetric) obj;
return Objects.equal(info, other.info()) &&
Objects.equal(value(), other.value());
}
return false;
}
@Override public int hashCode() {
return Objects.hashCode(info, value());
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("info", info)
.add("value", value())
.toString();
}
}

View File

@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
public enum MetricType {
/**
* A monotonically increasing metric that can be used
* to calculate throughput
*/
COUNTER,
/**
* An arbitrary varying metric
*/
GAUGE
}

View File

@ -0,0 +1,43 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The metrics collector interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface MetricsCollector {
/**
* Add a metrics record
* @param name of the record
* @return a metrics record builder for the record
*/
public MetricsRecordBuilder addRecord(String name);
/**
* Add a metrics record
* @param info of the record
* @return a metrics record builder for the record
*/
public MetricsRecordBuilder addRecord(MetricsInfo info);
}

View File

@ -0,0 +1,56 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* A general metrics exception wrapper
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MetricsException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* Construct the exception with a message
* @param message for the exception
*/
public MetricsException(String message) {
super(message);
}
/**
* Construct the exception with a message and a cause
* @param message for the exception
* @param cause of the exception
*/
public MetricsException(String message, Throwable cause) {
super(message, cause);
}
/**
* Construct the exception with a cause
* @param cause of the exception
*/
public MetricsException(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,61 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The metrics filter interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class MetricsFilter implements MetricsPlugin {
/**
* Whether to accept the name
* @param name to filter on
* @return true to accept; false otherwise.
*/
public abstract boolean accepts(String name);
/**
* Whether to accept the tag
* @param tag to filter on
* @return true to accept; false otherwise
*/
public abstract boolean accepts(MetricsTag tag);
/**
* Whether to accept the tags
* @param tags to filter on
* @return true to accept; false otherwise
*/
public abstract boolean accepts(Iterable<MetricsTag> tags);
/**
* Whether to accept the record
* @param record to filter on
* @return true to accept; false otherwise.
*/
public boolean accepts(MetricsRecord record) {
return accepts(record.tags());
}
}

View File

@ -0,0 +1,39 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Interface to provide immutable meta info for metrics
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface MetricsInfo {
/**
* @return the name of the metric/tag
*/
String name();
/**
* @return the description of the metric/tag
*/
String description();
}

View File

@ -0,0 +1,36 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The plugin interface for the metrics framework
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface MetricsPlugin {
/**
* Initialize the plugin
* @param conf the configuration object for the plugin
*/
void init(SubsetConfiguration conf);
}

View File

@ -0,0 +1,66 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import java.util.Collection;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* An immutable snapshot of metrics with a timestamp
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface MetricsRecord {
/**
* Get the timestamp of the metrics
* @return the timestamp
*/
long timestamp();
/**
* @return the record name
*/
String name();
/**
* @return the description of the record
*/
String description();
/**
* @return the context name of the record
*/
String context();
/**
* Get the tags of the record
* Note: returning a collection instead of iterable as we
* need to use tags as keys (hence Collection#hashCode etc.) in maps
* @return an unmodifiable collection of tags
*/
Collection<MetricsTag> tags();
/**
* Get the metrics of the record
* @return an immutable iterable interface for metrics
*/
Iterable<AbstractMetric> metrics();
}

View File

@ -0,0 +1,117 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The metrics record builder interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class MetricsRecordBuilder {
/**
* Add a metrics tag
* @param info metadata of the tag
* @param value of the tag
* @return self
*/
public abstract MetricsRecordBuilder tag(MetricsInfo info, String value);
/**
* Add an immutable metrics tag object
* @param tag a pre-made tag object (potentially save an object construction)
* @return self
*/
public abstract MetricsRecordBuilder add(MetricsTag tag);
/**
* Add a pre-made immutable metric object
* @param metric the pre-made metric to save an object construction
* @return self
*/
public abstract MetricsRecordBuilder add(AbstractMetric metric);
/**
* Set the context tag
* @param value of the context
* @return self
*/
public abstract MetricsRecordBuilder setContext(String value);
/**
* Add an integer metric
* @param info metadata of the metric
* @param value of the metric
* @return self
*/
public abstract MetricsRecordBuilder addCounter(MetricsInfo info, int value);
/**
* Add an long metric
* @param info metadata of the metric
* @param value of the metric
* @return self
*/
public abstract MetricsRecordBuilder addCounter(MetricsInfo info, long value);
/**
* Add a integer gauge metric
* @param info metadata of the metric
* @param value of the metric
* @return self
*/
public abstract MetricsRecordBuilder addGauge(MetricsInfo info, int value);
/**
* Add a long gauge metric
* @param info metadata of the metric
* @param value of the metric
* @return self
*/
public abstract MetricsRecordBuilder addGauge(MetricsInfo info, long value);
/**
* Add a float gauge metric
* @param info metadata of the metric
* @param value of the metric
* @return self
*/
public abstract MetricsRecordBuilder addGauge(MetricsInfo info, float value);
/**
* Add a double gauge metric
* @param info metadata of the metric
* @param value of the metric
* @return self
*/
public abstract MetricsRecordBuilder addGauge(MetricsInfo info, double value);
/**
* @return the parent metrics collector object
*/
public abstract MetricsCollector parent();
/**
* Syntactic sugar to add multiple records in a collector in a one liner.
* @return the parent metrics collector object
*/
public MetricsCollector endRecord() { return parent(); }
}

View File

@ -0,0 +1,40 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The metrics sink interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface MetricsSink extends MetricsPlugin {
/**
* Put a metrics record in the sink
* @param record the record to put
*/
void putMetrics(MetricsRecord record);
/**
* Flush any buffered metrics
*/
void flush();
}

View File

@ -0,0 +1,36 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The metrics source interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface MetricsSource {
/**
* Get metrics from the source
* @param collector to contain the resulting metrics snapshot
* @param all if true, return all metrics even if unchanged.
*/
void getMetrics(MetricsCollector collector, boolean all);
}

View File

@ -0,0 +1,123 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The metrics system interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class MetricsSystem implements MetricsSystemMXBean {
@InterfaceAudience.Private
public abstract MetricsSystem init(String prefix);
/**
* Register a metrics source
* @param <T> the actual type of the source object
* @param source object to register
* @param name of the source. Must be unique or null (then extracted from
* the annotations of the source object.)
* @param desc the description of the source (or null. See above.)
* @return the source object
* @exception MetricsException
*/
public abstract <T> T register(String name, String desc, T source);
/**
* Register a metrics source (deriving name and description from the object)
* @param <T> the actual type of the source object
* @param source object to register
* @return the source object
* @exception MetricsException
*/
public <T> T register(T source) {
return register(null, null, source);
}
/**
* @param name of the metrics source
* @return the metrics source (potentially wrapped) object
*/
@InterfaceAudience.Private
public abstract MetricsSource getSource(String name);
/**
* Register a metrics sink
* @param <T> the type of the sink
* @param sink to register
* @param name of the sink. Must be unique.
* @param desc the description of the sink
* @return the sink
* @exception MetricsException
*/
public abstract <T extends MetricsSink>
T register(String name, String desc, T sink);
/**
* Register a callback interface for JMX events
* @param callback the callback object implementing the MBean interface.
*/
public abstract void register(Callback callback);
/**
* Shutdown the metrics system completely (usually during server shutdown.)
* The MetricsSystemMXBean will be unregistered.
* @return true if shutdown completed
*/
public abstract boolean shutdown();
/**
* The metrics system callback interface (needed for proxies.)
*/
public interface Callback {
/**
* Called before start()
*/
void preStart();
/**
* Called after start()
*/
void postStart();
/**
* Called before stop()
*/
void preStop();
/**
* Called after stop()
*/
void postStop();
}
/**
* Convenient abstract class for implementing callback interface
*/
public static abstract class AbstractCallback implements Callback {
@Override public void preStart() {}
@Override public void postStart() {}
@Override public void preStop() {}
@Override public void postStop() {}
}
}

View File

@ -0,0 +1,63 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* The JMX interface to the metrics system
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface MetricsSystemMXBean {
/**
* Start the metrics system
* @throws MetricsException
*/
public void start();
/**
* Stop the metrics system
* @throws MetricsException
*/
public void stop();
/**
* Start metrics MBeans
* @throws MetricsException
*/
public void startMetricsMBeans();
/**
* Stop metrics MBeans.
* Note, it doesn't stop the metrics system control MBean,
* i.e this interface.
* @throws MetricsException
*/
public void stopMetricsMBeans();
/**
* @return the current config
* Avoided getConfig, as it'll turn into a "Config" attribute,
* which doesn't support multiple line values in jconsole.
* @throws MetricsException
*/
public String currentConfig();
}

View File

@ -0,0 +1,88 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import com.google.common.base.Objects;
import static com.google.common.base.Preconditions.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Immutable tag for metrics (for grouping on host/queue/username etc.)
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MetricsTag implements MetricsInfo {
private final MetricsInfo info;
private final String value;
/**
* Construct the tag with name, description and value
* @param info of the tag
* @param value of the tag
*/
public MetricsTag(MetricsInfo info, String value) {
this.info = checkNotNull(info, "tag info");
this.value = value;
}
@Override public String name() {
return info.name();
}
@Override public String description() {
return info.description();
}
/**
* @return the info object of the tag
*/
public MetricsInfo info() {
return info;
}
/**
* Get the value of the tag
* @return the value
*/
public String value() {
return value;
}
@Override public boolean equals(Object obj) {
if (obj instanceof MetricsTag) {
final MetricsTag other = (MetricsTag) obj;
return Objects.equal(info, other.info()) &&
Objects.equal(value, other.value());
}
return false;
}
@Override public int hashCode() {
return Objects.hashCode(info, value);
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("info", info)
.add("value", value())
.toString();
}
}

View File

@ -0,0 +1,71 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* A visitor interface for metrics
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface MetricsVisitor {
/**
* Callback for integer value gauges
* @param info the metric info
* @param value of the metric
*/
public void gauge(MetricsInfo info, int value);
/**
* Callback for long value gauges
* @param info the metric info
* @param value of the metric
*/
public void gauge(MetricsInfo info, long value);
/**
* Callback for float value gauges
* @param info the metric info
* @param value of the metric
*/
public void gauge(MetricsInfo info, float value);
/**
* Callback for double value gauges
* @param info the metric info
* @param value of the metric
*/
public void gauge(MetricsInfo info, double value);
/**
* Callback for integer value counters
* @param info the metric info
* @param value of the metric
*/
public void counter(MetricsInfo info, int value);
/**
* Callback for long value counters
* @param info the metric info
* @param value of the metric
*/
public void counter(MetricsInfo info, long value);
}

View File

@ -0,0 +1,70 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.annotation;
import java.lang.annotation.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Annotation interface for a single metric
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@Documented
@Target({ElementType.FIELD, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface Metric {
public enum Type {
DEFAULT, COUNTER, GAUGE, TAG
}
/**
* Shorthand for optional name and description
* @return {description} or {name, description}
*/
String[] value() default {};
/**
* @return optional description of the metric
*/
String about() default "";
/**
* @return optional sample name for MutableStat/Rate/Rates
*/
String sampleName() default "Ops";
/**
* @return optional value name for MutableStat/Rate/Rates
*/
String valueName() default "Time";
/**
* @return true to create a metric snapshot even if unchanged.
*/
boolean always() default false;
/**
* @return optional type (counter|gauge) of the metric
*/
Type type() default Type.DEFAULT;
}

View File

@ -0,0 +1,50 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.annotation;
import java.lang.annotation.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Annotation interface for a group of metrics
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@Documented
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface Metrics {
/**
* @return the (record) name of the metrics
*/
String name() default "";
/**
* @return the optional description of metrics
*/
String about() default "";
/**
* @return the context name for a group of metrics
*/
String context();
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Annotation interfaces for metrics instrumentation
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
package org.apache.hadoop.metrics2.annotation;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -0,0 +1,167 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.filter;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.collect.Maps;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsTag;
/**
* Base class for pattern based filters
*/
@InterfaceAudience.Private
public abstract class AbstractPatternFilter extends MetricsFilter {
protected static final String INCLUDE_KEY = "include";
protected static final String EXCLUDE_KEY = "exclude";
protected static final String INCLUDE_TAGS_KEY = "include.tags";
protected static final String EXCLUDE_TAGS_KEY = "exclude.tags";
private Pattern includePattern;
private Pattern excludePattern;
private final Map<String, Pattern> includeTagPatterns;
private final Map<String, Pattern> excludeTagPatterns;
private final Pattern tagPattern = Pattern.compile("^(\\w+):(.*)");
AbstractPatternFilter() {
includeTagPatterns = Maps.newHashMap();
excludeTagPatterns = Maps.newHashMap();
}
@Override
public void init(SubsetConfiguration conf) {
String patternString = conf.getString(INCLUDE_KEY);
if (patternString != null && !patternString.isEmpty()) {
setIncludePattern(compile(patternString));
}
patternString = conf.getString(EXCLUDE_KEY);
if (patternString != null && !patternString.isEmpty()) {
setExcludePattern(compile(patternString));
}
String[] patternStrings = conf.getStringArray(INCLUDE_TAGS_KEY);
if (patternStrings != null && patternStrings.length != 0) {
for (String pstr : patternStrings) {
Matcher matcher = tagPattern.matcher(pstr);
if (!matcher.matches()) {
throw new MetricsException("Illegal tag pattern: "+ pstr);
}
setIncludeTagPattern(matcher.group(1), compile(matcher.group(2)));
}
}
patternStrings = conf.getStringArray(EXCLUDE_TAGS_KEY);
if (patternStrings != null && patternStrings.length != 0) {
for (String pstr : patternStrings) {
Matcher matcher = tagPattern.matcher(pstr);
if (!matcher.matches()) {
throw new MetricsException("Illegal tag pattern: "+ pstr);
}
setExcludeTagPattern(matcher.group(1), compile(matcher.group(2)));
}
}
}
void setIncludePattern(Pattern includePattern) {
this.includePattern = includePattern;
}
void setExcludePattern(Pattern excludePattern) {
this.excludePattern = excludePattern;
}
void setIncludeTagPattern(String name, Pattern pattern) {
includeTagPatterns.put(name, pattern);
}
void setExcludeTagPattern(String name, Pattern pattern) {
excludeTagPatterns.put(name, pattern);
}
@Override
public boolean accepts(MetricsTag tag) {
// Accept if whitelisted
Pattern ipat = includeTagPatterns.get(tag.name());
if (ipat != null && ipat.matcher(tag.value()).matches()) {
return true;
}
// Reject if blacklisted
Pattern epat = excludeTagPatterns.get(tag.name());
if (epat != null && epat.matcher(tag.value()).matches()) {
return false;
}
// Reject if no match in whitelist only mode
if (ipat != null && epat == null) {
return false;
}
return true;
}
@Override
public boolean accepts(Iterable<MetricsTag> tags) {
// Accept if any include tag pattern matches
for (MetricsTag t : tags) {
Pattern pat = includeTagPatterns.get(t.name());
if (pat != null && pat.matcher(t.value()).matches()) {
return true;
}
}
// Reject if any exclude tag pattern matches
for (MetricsTag t : tags) {
Pattern pat = excludeTagPatterns.get(t.name());
if (pat != null && pat.matcher(t.value()).matches()) {
return false;
}
}
// Reject if no match in whitelist only mode
if (!includeTagPatterns.isEmpty() && excludeTagPatterns.isEmpty()) {
return false;
}
return true;
}
@Override
public boolean accepts(String name) {
// Accept if whitelisted
if (includePattern != null && includePattern.matcher(name).matches()) {
return true;
}
// Reject if blacklisted
if ((excludePattern != null && excludePattern.matcher(name).matches())) {
return false;
}
// Reject if no match in whitelist only mode
if (includePattern != null && excludePattern == null) {
return false;
}
return true;
}
/**
* Compile a string pattern in to a pattern object
* @param s the string pattern to compile
* @return the compiled pattern object
*/
protected abstract Pattern compile(String s);
}

View File

@ -0,0 +1,40 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.filter;
import java.util.regex.Pattern;
import org.apache.hadoop.fs.GlobPattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* A glob pattern filter for metrics.
*
* The class name is used in metrics config files
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class GlobFilter extends AbstractPatternFilter {
@Override
protected Pattern compile(String s) {
return GlobPattern.compile(s);
}
}

View File

@ -0,0 +1,37 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.filter;
import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* A regex pattern filter for metrics
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class RegexFilter extends AbstractPatternFilter {
@Override
protected Pattern compile(String s) {
return Pattern.compile(s);
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Builtin metrics filters (to be used in metrics config files)
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
package org.apache.hadoop.metrics2.filter;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -0,0 +1,54 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import com.google.common.base.Objects;
import com.google.common.collect.Iterables;
import org.apache.hadoop.metrics2.MetricsRecord;
abstract class AbstractMetricsRecord implements MetricsRecord {
@Override public boolean equals(Object obj) {
if (obj instanceof MetricsRecord) {
final MetricsRecord other = (MetricsRecord) obj;
return Objects.equal(timestamp(), other.timestamp()) &&
Objects.equal(name(), other.name()) &&
Objects.equal(description(), other.description()) &&
Objects.equal(tags(), other.tags()) &&
Iterables.elementsEqual(metrics(), other.metrics());
}
return false;
}
// Should make sense most of the time when the record is used as a key
@Override public int hashCode() {
return Objects.hashCode(name(), description(), tags());
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("timestamp", timestamp())
.add("name", name())
.add("description", description())
.add("tags", tags())
.add("metrics", Iterables.toString(metrics()))
.toString();
}
}

View File

@ -0,0 +1,114 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.List;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo;
import com.google.common.collect.Lists;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.MetricsVisitor;
/**
* Helper class to build MBeanInfo from metrics records
*/
class MBeanInfoBuilder implements MetricsVisitor {
private final String name, description;
private List<MBeanAttributeInfo> attrs;
private Iterable<MetricsRecordImpl> recs;
private int curRecNo;
MBeanInfoBuilder(String name, String desc) {
this.name = name;
description = desc;
attrs = Lists.newArrayList();
}
MBeanInfoBuilder reset(Iterable<MetricsRecordImpl> recs) {
this.recs = recs;
attrs.clear();
return this;
}
MBeanAttributeInfo newAttrInfo(String name, String desc, String type) {
return new MBeanAttributeInfo(getAttrName(name), type, desc,
true, false, false); // read-only, non-is
}
MBeanAttributeInfo newAttrInfo(MetricsInfo info, String type) {
return newAttrInfo(info.name(), info.description(), type);
}
@Override
public void gauge(MetricsInfo info, int value) {
attrs.add(newAttrInfo(info, "java.lang.Integer"));
}
@Override
public void gauge(MetricsInfo info, long value) {
attrs.add(newAttrInfo(info, "java.lang.Long"));
}
@Override
public void gauge(MetricsInfo info, float value) {
attrs.add(newAttrInfo(info, "java.lang.Float"));
}
@Override
public void gauge(MetricsInfo info, double value) {
attrs.add(newAttrInfo(info, "java.lang.Double"));
}
@Override
public void counter(MetricsInfo info, int value) {
attrs.add(newAttrInfo(info, "java.lang.Integer"));
}
@Override
public void counter(MetricsInfo info, long value) {
attrs.add(newAttrInfo(info, "java.lang.Long"));
}
String getAttrName(String name) {
return curRecNo > 0 ? name +"."+ curRecNo : name;
}
MBeanInfo get() {
curRecNo = 0;
for (MetricsRecordImpl rec : recs) {
for (MetricsTag t : rec.tags()) {
attrs.add(newAttrInfo("tag."+ t.name(), t.description(),
"java.lang.String"));
}
for (AbstractMetric m : rec.metrics()) {
m.visit(this);
}
++curRecNo;
}
MetricsSystemImpl.LOG.debug(attrs);
MBeanAttributeInfo[] attrsArray = new MBeanAttributeInfo[attrs.size()];
return new MBeanInfo(name, description, attrs.toArray(attrsArray),
null, null, null); // no ops/ctors/notifications
}
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsVisitor;
class MetricCounterInt extends AbstractMetric {
final int value;
MetricCounterInt(MetricsInfo info, int value) {
super(info);
this.value = value;
}
@Override
public Integer value() {
return value;
}
@Override
public MetricType type() {
return MetricType.COUNTER;
}
@Override
public void visit(MetricsVisitor visitor) {
visitor.counter(this, value);
}
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsVisitor;
class MetricCounterLong extends AbstractMetric {
final long value;
MetricCounterLong(MetricsInfo info, long value) {
super(info);
this.value = value;
}
@Override
public Long value() {
return value;
}
@Override
public MetricType type() {
return MetricType.COUNTER;
}
@Override
public void visit(MetricsVisitor visitor) {
visitor.counter(this, value);
}
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsVisitor;
class MetricGaugeDouble extends AbstractMetric {
final double value;
MetricGaugeDouble(MetricsInfo info, double value) {
super(info);
this.value = value;
}
@Override
public Double value() {
return value;
}
@Override
public MetricType type() {
return MetricType.GAUGE;
}
@Override
public void visit(MetricsVisitor visitor) {
visitor.gauge(this, value);
}
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsVisitor;
class MetricGaugeFloat extends AbstractMetric {
final float value;
MetricGaugeFloat(MetricsInfo info, float value) {
super(info);
this.value = value;
}
@Override
public Float value() {
return value;
}
@Override
public MetricType type() {
return MetricType.GAUGE;
}
@Override
public void visit(MetricsVisitor visitor) {
visitor.gauge(this, value);
}
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsVisitor;
class MetricGaugeInt extends AbstractMetric {
final int value;
MetricGaugeInt(MetricsInfo info, int value) {
super(info);
this.value = value;
}
@Override
public Integer value() {
return value;
}
@Override
public MetricType type() {
return MetricType.GAUGE;
}
@Override
public void visit(MetricsVisitor visitor) {
visitor.gauge(this, value);
}
}

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsVisitor;
class MetricGaugeLong extends AbstractMetric {
final long value;
MetricGaugeLong(MetricsInfo info, long value) {
super(info);
this.value = value;
}
@Override
public Long value() {
return value;
}
@Override
public MetricType type() {
return MetricType.GAUGE;
}
@Override
public void visit(MetricsVisitor visitor) {
visitor.gauge(this, value);
}
}

View File

@ -0,0 +1,57 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.Iterator;
/**
* An immutable element for the sink queues.
*/
class MetricsBuffer implements Iterable<MetricsBuffer.Entry> {
private final Iterable<Entry> mutable;
MetricsBuffer(Iterable<MetricsBuffer.Entry> mutable) {
this.mutable = mutable;
}
@Override
public Iterator<Entry> iterator() {
return mutable.iterator();
}
static class Entry {
private final String sourceName;
private final Iterable<MetricsRecordImpl> records;
Entry(String name, Iterable<MetricsRecordImpl> records) {
sourceName = name;
this.records = records;
}
String name() {
return sourceName;
}
Iterable<MetricsRecordImpl> records() {
return records;
}
}
}

View File

@ -0,0 +1,36 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.ArrayList;
/**
* Builder for the immutable metrics buffers
*/
class MetricsBufferBuilder extends ArrayList<MetricsBuffer.Entry> {
private static final long serialVersionUID = 1L;
boolean add(String name, Iterable<MetricsRecordImpl> records) {
return add(new MetricsBuffer.Entry(name, records));
}
MetricsBuffer get() {
return new MetricsBuffer(this);
}
}

View File

@ -0,0 +1,79 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.Iterator;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsFilter;
import static org.apache.hadoop.metrics2.lib.Interns.*;
class MetricsCollectorImpl implements MetricsCollector,
Iterable<MetricsRecordBuilderImpl> {
private final List<MetricsRecordBuilderImpl> rbs = Lists.newArrayList();
private MetricsFilter recordFilter, metricFilter;
@Override
public MetricsRecordBuilderImpl addRecord(MetricsInfo info) {
boolean acceptable = recordFilter == null ||
recordFilter.accepts(info.name());
MetricsRecordBuilderImpl rb = new MetricsRecordBuilderImpl(this, info,
recordFilter, metricFilter, acceptable);
if (acceptable) rbs.add(rb);
return rb;
}
@Override
public MetricsRecordBuilderImpl addRecord(String name) {
return addRecord(info(name, name +" record"));
}
public List<MetricsRecordImpl> getRecords() {
List<MetricsRecordImpl> recs = Lists.newArrayListWithCapacity(rbs.size());
for (MetricsRecordBuilderImpl rb : rbs) {
MetricsRecordImpl mr = rb.getRecord();
if (mr != null) {
recs.add(mr);
}
}
return recs;
}
@Override
public Iterator<MetricsRecordBuilderImpl> iterator() {
return rbs.iterator();
}
void clear() { rbs.clear(); }
MetricsCollectorImpl setRecordFilter(MetricsFilter rf) {
recordFilter = rf;
return this;
}
MetricsCollectorImpl setMetricFilter(MetricsFilter mf) {
metricFilter = mf;
return this;
}
}

View File

@ -0,0 +1,284 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import java.net.URLClassLoader;
import static java.security.AccessController.*;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsPlugin;
import org.apache.hadoop.metrics2.filter.GlobFilter;
/**
* Metrics configuration for MetricsSystemImpl
*/
class MetricsConfig extends SubsetConfiguration {
static final Log LOG = LogFactory.getLog(MetricsConfig.class);
static final String DEFAULT_FILE_NAME = "hadoop-metrics2.properties";
static final String PREFIX_DEFAULT = "*.";
static final String PERIOD_KEY = "period";
static final int PERIOD_DEFAULT = 10; // seconds
static final String QUEUE_CAPACITY_KEY = "queue.capacity";
static final int QUEUE_CAPACITY_DEFAULT = 1;
static final String RETRY_DELAY_KEY = "retry.delay";
static final int RETRY_DELAY_DEFAULT = 10; // seconds
static final String RETRY_BACKOFF_KEY = "retry.backoff";
static final int RETRY_BACKOFF_DEFAULT = 2; // back off factor
static final String RETRY_COUNT_KEY = "retry.count";
static final int RETRY_COUNT_DEFAULT = 1;
static final String JMX_CACHE_TTL_KEY = "jmx.cache.ttl";
static final String START_MBEANS_KEY = "source.start_mbeans";
static final String PLUGIN_URLS_KEY = "plugin.urls";
static final String CONTEXT_KEY = "context";
static final String NAME_KEY = "name";
static final String DESC_KEY = "description";
static final String SOURCE_KEY = "source";
static final String SINK_KEY = "sink";
static final String METRIC_FILTER_KEY = "metric.filter";
static final String RECORD_FILTER_KEY = "record.filter";
static final String SOURCE_FILTER_KEY = "source.filter";
static final Pattern INSTANCE_REGEX = Pattern.compile("([^.*]+)\\..+");
static final Splitter SPLITTER = Splitter.on(',').trimResults();
private ClassLoader pluginLoader;
MetricsConfig(Configuration c, String prefix) {
super(c, prefix.toLowerCase(Locale.US), ".");
}
static MetricsConfig create(String prefix) {
return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US)
+".properties", DEFAULT_FILE_NAME);
}
static MetricsConfig create(String prefix, String... fileNames) {
return loadFirst(prefix, fileNames);
}
/**
* Load configuration from a list of files until the first successful load
* @param conf the configuration object
* @param files the list of filenames to try
* @return the configuration object
*/
static MetricsConfig loadFirst(String prefix, String... fileNames) {
for (String fname : fileNames) {
try {
Configuration cf = new PropertiesConfiguration(fname)
.interpolatedConfiguration();
LOG.info("loaded properties from "+ fname);
LOG.debug(toString(cf));
MetricsConfig mc = new MetricsConfig(cf, prefix);
LOG.debug(mc);
return mc;
}
catch (ConfigurationException e) {
if (e.getMessage().startsWith("Cannot locate configuration")) {
continue;
}
throw new MetricsConfigException(e);
}
}
throw new MetricsConfigException("Cannot locate configuration: tried "+
Joiner.on(",").join(fileNames));
}
@Override
public MetricsConfig subset(String prefix) {
return new MetricsConfig(this, prefix);
}
/**
* Return sub configs for instance specified in the config.
* Assuming format specified as follows:<pre>
* [type].[instance].[option] = [value]</pre>
* Note, '*' is a special default instance, which is excluded in the result.
* @param type of the instance
* @return a map with [instance] as key and config object as value
*/
Map<String, MetricsConfig> getInstanceConfigs(String type) {
Map<String, MetricsConfig> map = Maps.newHashMap();
MetricsConfig sub = subset(type);
for (String key : sub.keys()) {
Matcher matcher = INSTANCE_REGEX.matcher(key);
if (matcher.matches()) {
String instance = matcher.group(1);
if (!map.containsKey(instance)) {
map.put(instance, sub.subset(instance));
}
}
}
return map;
}
Iterable<String> keys() {
return new Iterable<String>() {
@SuppressWarnings("unchecked")
@Override
public Iterator<String> iterator() {
return (Iterator<String>) getKeys();
}
};
}
/**
* Will poke parents for defaults
* @param key to lookup
* @return the value or null
*/
@Override
public Object getProperty(String key) {
Object value = super.getProperty(key);
if (value == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("poking parent '"+ getParent().getClass().getSimpleName() +
"' for key: "+ key);
}
return getParent().getProperty(key.startsWith(PREFIX_DEFAULT) ? key
: PREFIX_DEFAULT + key);
}
if (LOG.isDebugEnabled()) {
LOG.debug("returning '"+ value +"' for key: "+ key);
}
return value;
}
<T extends MetricsPlugin> T getPlugin(String name) {
String clsName = getClassName(name);
if (clsName == null) return null;
try {
Class<?> cls = Class.forName(clsName, true, getPluginLoader());
@SuppressWarnings("unchecked")
T plugin = (T) cls.newInstance();
plugin.init(name.isEmpty() ? this : subset(name));
return plugin;
}
catch (Exception e) {
throw new MetricsConfigException("Error creating plugin: "+ clsName, e);
}
}
String getClassName(String prefix) {
String classKey = prefix.isEmpty() ? "class" : prefix +".class";
String clsName = getString(classKey);
LOG.debug(clsName);
if (clsName == null || clsName.isEmpty()) {
return null;
}
return clsName;
}
ClassLoader getPluginLoader() {
if (pluginLoader != null) return pluginLoader;
final ClassLoader defaultLoader = getClass().getClassLoader();
Object purls = super.getProperty(PLUGIN_URLS_KEY);
if (purls == null) return defaultLoader;
Iterable<String> jars = SPLITTER.split((String) purls);
int len = Iterables.size(jars);
if ( len > 0) {
final URL[] urls = new URL[len];
try {
int i = 0;
for (String jar : jars) {
LOG.debug(jar);
urls[i++] = new URL(jar);
}
}
catch (Exception e) {
throw new MetricsConfigException(e);
}
if (LOG.isDebugEnabled()) {
LOG.debug("using plugin jars: "+ Iterables.toString(jars));
}
pluginLoader = doPrivileged(new PrivilegedAction<ClassLoader>() {
@Override public ClassLoader run() {
return new URLClassLoader(urls, defaultLoader);
}
});
return pluginLoader;
}
if (parent instanceof MetricsConfig) {
return ((MetricsConfig) parent).getPluginLoader();
}
return defaultLoader;
}
@Override public void clear() {
super.clear();
// pluginLoader.close(); // jdk7 is saner
}
MetricsFilter getFilter(String prefix) {
// don't create filter instances without out options
MetricsConfig conf = subset(prefix);
if (conf.isEmpty()) return null;
MetricsFilter filter = getPlugin(prefix);
if (filter != null) return filter;
// glob filter is assumed if pattern is specified but class is not.
filter = new GlobFilter();
filter.init(conf);
return filter;
}
@Override
public String toString() {
return toString(this);
}
static String toString(Configuration c) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(buffer);
PropertiesConfiguration tmp = new PropertiesConfiguration();
tmp.copy(c);
try { tmp.save(ps); }
catch (Exception e) {
throw new MetricsConfigException(e);
}
return buffer.toString();
}
}

View File

@ -0,0 +1,40 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.metrics2.MetricsException;
/**
* The metrics configuration runtime exception
*/
class MetricsConfigException extends MetricsException {
private static final long serialVersionUID = 1L;
MetricsConfigException(String message) {
super(message);
}
MetricsConfigException(String message, Throwable cause) {
super(message, cause);
}
MetricsConfigException(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,152 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.Collections;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.lib.Interns;
class MetricsRecordBuilderImpl extends MetricsRecordBuilder {
private final MetricsCollector parent;
private final long timestamp;
private final MetricsInfo recInfo;
private final List<AbstractMetric> metrics;
private final List<MetricsTag> tags;
private final MetricsFilter recordFilter, metricFilter;
private final boolean acceptable;
MetricsRecordBuilderImpl(MetricsCollector parent, MetricsInfo info,
MetricsFilter rf, MetricsFilter mf,
boolean acceptable) {
this.parent = parent;
timestamp = System.currentTimeMillis();
recInfo = info;
metrics = Lists.newArrayList();
tags = Lists.newArrayList();
recordFilter = rf;
metricFilter = mf;
this.acceptable = acceptable;
}
@Override
public MetricsCollector parent() { return parent; }
@Override
public MetricsRecordBuilderImpl tag(MetricsInfo info, String value) {
if (acceptable) {
tags.add(Interns.tag(info, value));
}
return this;
}
@Override
public MetricsRecordBuilderImpl add(MetricsTag tag) {
tags.add(tag);
return this;
}
@Override
public MetricsRecordBuilderImpl add(AbstractMetric metric) {
metrics.add(metric);
return this;
}
@Override
public MetricsRecordBuilderImpl addCounter(MetricsInfo info, int value) {
if (acceptable && (metricFilter == null ||
metricFilter.accepts(info.name()))) {
metrics.add(new MetricCounterInt(info, value));
}
return this;
}
@Override
public MetricsRecordBuilderImpl addCounter(MetricsInfo info, long value) {
if (acceptable && (metricFilter == null ||
metricFilter.accepts(info.name()))) {
metrics.add(new MetricCounterLong(info, value));
}
return this;
}
@Override
public MetricsRecordBuilderImpl addGauge(MetricsInfo info, int value) {
if (acceptable && (metricFilter == null ||
metricFilter.accepts(info.name()))) {
metrics.add(new MetricGaugeInt(info, value));
}
return this;
}
@Override
public MetricsRecordBuilderImpl addGauge(MetricsInfo info, long value) {
if (acceptable && (metricFilter == null ||
metricFilter.accepts(info.name()))) {
metrics.add(new MetricGaugeLong(info, value));
}
return this;
}
@Override
public MetricsRecordBuilderImpl addGauge(MetricsInfo info, float value) {
if (acceptable && (metricFilter == null ||
metricFilter.accepts(info.name()))) {
metrics.add(new MetricGaugeFloat(info, value));
}
return this;
}
@Override
public MetricsRecordBuilderImpl addGauge(MetricsInfo info, double value) {
if (acceptable && (metricFilter == null ||
metricFilter.accepts(info.name()))) {
metrics.add(new MetricGaugeDouble(info, value));
}
return this;
}
@Override
public MetricsRecordBuilderImpl setContext(String value) {
return tag(MsInfo.Context, value);
}
public MetricsRecordImpl getRecord() {
if (acceptable && (recordFilter == null || recordFilter.accepts(tags))) {
return new MetricsRecordImpl(recInfo, timestamp, tags(), metrics());
}
return null;
}
List<MetricsTag> tags() {
return Collections.unmodifiableList(tags);
}
List<AbstractMetric> metrics() {
return Collections.unmodifiableList(metrics);
}
}

View File

@ -0,0 +1,78 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.Iterator;
import java.util.Collection;
import com.google.common.collect.AbstractIterator;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;
class MetricsRecordFiltered extends AbstractMetricsRecord {
private final MetricsRecord delegate;
private final MetricsFilter filter;
MetricsRecordFiltered(MetricsRecord delegate, MetricsFilter filter) {
this.delegate = delegate;
this.filter = filter;
}
@Override public long timestamp() {
return delegate.timestamp();
}
@Override public String name() {
return delegate.name();
}
@Override public String description() {
return delegate.description();
}
@Override public String context() {
return delegate.context();
}
@Override public Collection<MetricsTag> tags() {
return delegate.tags();
}
@Override public Iterable<AbstractMetric> metrics() {
return new Iterable<AbstractMetric>() {
final Iterator<AbstractMetric> it = delegate.metrics().iterator();
@Override public Iterator<AbstractMetric> iterator() {
return new AbstractIterator<AbstractMetric>() {
@Override public AbstractMetric computeNext() {
while (it.hasNext()) {
AbstractMetric next = it.next();
if (filter.accepts(next.name())) {
return next;
}
}
return endOfData();
}
};
}
};
}
}

View File

@ -0,0 +1,88 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.List;
import static com.google.common.base.Preconditions.*;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsTag;
import static org.apache.hadoop.metrics2.util.Contracts.*;
class MetricsRecordImpl extends AbstractMetricsRecord {
protected static final String DEFAULT_CONTEXT = "default";
private final long timestamp;
private final MetricsInfo info;
private final List<MetricsTag> tags;
private final Iterable<AbstractMetric> metrics;
/**
* Construct a metrics record
* @param info {@link MetricInfo} of the record
* @param timestamp of the record
* @param tags of the record
* @param metrics of the record
*/
public MetricsRecordImpl(MetricsInfo info, long timestamp,
List<MetricsTag> tags,
Iterable<AbstractMetric> metrics) {
this.timestamp = checkArg(timestamp, timestamp > 0, "timestamp");
this.info = checkNotNull(info, "info");
this.tags = checkNotNull(tags, "tags");
this.metrics = checkNotNull(metrics, "metrics");
}
@Override public long timestamp() {
return timestamp;
}
@Override public String name() {
return info.name();
}
MetricsInfo info() {
return info;
}
@Override public String description() {
return info.description();
}
@Override public String context() {
// usually the first tag
for (MetricsTag t : tags) {
if (t.info() == MsInfo.Context) {
return t.value();
}
}
return DEFAULT_CONTEXT;
}
@Override
public List<MetricsTag> tags() {
return tags; // already unmodifiable from MetricsRecordBuilderImpl#tags
}
@Override public Iterable<AbstractMetric> metrics() {
return metrics;
}
}

View File

@ -0,0 +1,197 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.Random;
import static com.google.common.base.Preconditions.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
import org.apache.hadoop.metrics2.lib.MetricsRegistry;
import org.apache.hadoop.metrics2.lib.MutableCounterInt;
import org.apache.hadoop.metrics2.lib.MutableStat;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import static org.apache.hadoop.metrics2.util.Contracts.*;
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsSink;
/**
* An adapter class for metrics sink and associated filters
*/
class MetricsSinkAdapter implements SinkQueue.Consumer<MetricsBuffer> {
private final Log LOG = LogFactory.getLog(MetricsSinkAdapter.class);
private final String name, description, context;
private final MetricsSink sink;
private final MetricsFilter sourceFilter, recordFilter, metricFilter;
private final SinkQueue<MetricsBuffer> queue;
private final Thread sinkThread;
private volatile boolean stopping = false;
private volatile boolean inError = false;
private final int period, firstRetryDelay, retryCount;
private final float retryBackoff;
private final MetricsRegistry registry = new MetricsRegistry("sinkadapter");
private final MutableStat latency;
private final MutableCounterInt dropped;
private final MutableGaugeInt qsize;
MetricsSinkAdapter(String name, String description, MetricsSink sink,
String context, MetricsFilter sourceFilter,
MetricsFilter recordFilter, MetricsFilter metricFilter,
int period, int queueCapacity, int retryDelay,
float retryBackoff, int retryCount) {
this.name = checkNotNull(name, "name");
this.description = description;
this.sink = checkNotNull(sink, "sink object");
this.context = context;
this.sourceFilter = sourceFilter;
this.recordFilter = recordFilter;
this.metricFilter = metricFilter;
this.period = checkArg(period, period > 0, "period");
firstRetryDelay = checkArg(retryDelay, retryDelay > 0, "retry delay");
this.retryBackoff = checkArg(retryBackoff, retryBackoff>1, "retry backoff");
this.retryCount = retryCount;
this.queue = new SinkQueue<MetricsBuffer>(checkArg(queueCapacity,
queueCapacity > 0, "queue capacity"));
latency = registry.newRate("Sink_"+ name, "Sink end to end latency", false);
dropped = registry.newCounter("Sink_"+ name +"Dropped",
"Dropped updates per sink", 0);
qsize = registry.newGauge("Sink_"+ name + "Qsize", "Queue size", 0);
sinkThread = new Thread() {
@Override public void run() {
publishMetricsFromQueue();
}
};
sinkThread.setName(name);
sinkThread.setDaemon(true);
}
boolean putMetrics(MetricsBuffer buffer, long logicalTime) {
if (logicalTime % period == 0) {
LOG.debug("enqueue, logicalTime="+ logicalTime);
if (queue.enqueue(buffer)) return true;
dropped.incr();
return false;
}
return true; // OK
}
void publishMetricsFromQueue() {
int retryDelay = firstRetryDelay;
int n = retryCount;
int minDelay = Math.min(500, retryDelay * 1000); // millis
Random rng = new Random(System.nanoTime());
while (!stopping) {
try {
queue.consumeAll(this);
retryDelay = firstRetryDelay;
n = retryCount;
inError = false;
}
catch (InterruptedException e) {
LOG.info(name +" thread interrupted.");
}
catch (Exception e) {
if (n > 0) {
int retryWindow = Math.max(0, 1000 / 2 * retryDelay - minDelay);
int awhile = rng.nextInt(retryWindow) + minDelay;
if (!inError) {
LOG.error("Got sink exception, retry in "+ awhile +"ms", e);
}
retryDelay *= retryBackoff;
try { Thread.sleep(awhile); }
catch (InterruptedException e2) {
LOG.info(name +" thread interrupted while waiting for retry", e2);
}
--n;
}
else {
if (!inError) {
LOG.error("Got sink exception and over retry limit, "+
"suppressing further error messages", e);
}
queue.clear();
inError = true; // Don't keep complaining ad infinitum
}
}
}
}
@Override
public void consume(MetricsBuffer buffer) {
long ts = 0;
for (MetricsBuffer.Entry entry : buffer) {
if (sourceFilter == null || sourceFilter.accepts(entry.name())) {
for (MetricsRecordImpl record : entry.records()) {
if ((context == null || context.equals(record.context())) &&
(recordFilter == null || recordFilter.accepts(record))) {
if (LOG.isDebugEnabled()) {
LOG.debug("Pushing record "+ entry.name() +"."+ record.context() +
"."+ record.name() +" to "+ name);
}
sink.putMetrics(metricFilter == null
? record
: new MetricsRecordFiltered(record, metricFilter));
if (ts == 0) ts = record.timestamp();
}
}
}
}
if (ts > 0) {
sink.flush();
latency.add(System.currentTimeMillis() - ts);
}
LOG.debug("Done");
}
void start() {
sinkThread.start();
LOG.info("Sink "+ name +" started");
}
void stop() {
stopping = true;
sinkThread.interrupt();
try {
sinkThread.join();
}
catch (InterruptedException e) {
LOG.warn("Stop interrupted", e);
}
}
String name() {
return name;
}
String description() {
return description;
}
void snapshot(MetricsRecordBuilder rb, boolean all) {
registry.snapshot(rb, all);
}
MetricsSink sink() {
return sink;
}
}

View File

@ -0,0 +1,273 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.HashMap;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanException;
import javax.management.MBeanInfo;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import static com.google.common.base.Preconditions.*;
import com.google.common.collect.Maps;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.MetricsTag;
import static org.apache.hadoop.metrics2.impl.MetricsConfig.*;
import org.apache.hadoop.metrics2.util.MBeans;
import static org.apache.hadoop.metrics2.util.Contracts.*;
/**
* An adapter class for metrics source and associated filter and jmx impl
*/
class MetricsSourceAdapter implements DynamicMBean {
private static final Log LOG = LogFactory.getLog(MetricsSourceAdapter.class);
private final String prefix, name;
private final MetricsSource source;
private final MetricsFilter recordFilter, metricFilter;
private final HashMap<String, Attribute> attrCache;
private final MBeanInfoBuilder infoBuilder;
private final Iterable<MetricsTag> injectedTags;
private Iterable<MetricsRecordImpl> lastRecs;
private long jmxCacheTS = 0;
private int jmxCacheTTL;
private MBeanInfo infoCache;
private ObjectName mbeanName;
private final boolean startMBeans;
MetricsSourceAdapter(String prefix, String name, String description,
MetricsSource source, Iterable<MetricsTag> injectedTags,
MetricsFilter recordFilter, MetricsFilter metricFilter,
int jmxCacheTTL, boolean startMBeans) {
this.prefix = checkNotNull(prefix, "prefix");
this.name = checkNotNull(name, "name");
this.source = checkNotNull(source, "source");
attrCache = Maps.newHashMap();
infoBuilder = new MBeanInfoBuilder(name, description);
this.injectedTags = injectedTags;
this.recordFilter = recordFilter;
this.metricFilter = metricFilter;
this.jmxCacheTTL = checkArg(jmxCacheTTL, jmxCacheTTL > 0, "jmxCacheTTL");
this.startMBeans = startMBeans;
}
MetricsSourceAdapter(String prefix, String name, String description,
MetricsSource source, Iterable<MetricsTag> injectedTags,
int period, MetricsConfig conf) {
this(prefix, name, description, source, injectedTags,
conf.getFilter(RECORD_FILTER_KEY),
conf.getFilter(METRIC_FILTER_KEY),
period + 1, // hack to avoid most of the "innocuous" races.
conf.getBoolean(START_MBEANS_KEY, true));
}
void start() {
if (startMBeans) startMBeans();
}
@Override
public synchronized Object getAttribute(String attribute)
throws AttributeNotFoundException, MBeanException, ReflectionException {
updateJmxCache();
Attribute a = attrCache.get(attribute);
if (a == null) {
throw new AttributeNotFoundException(attribute +" not found");
}
if (LOG.isDebugEnabled()) {
LOG.debug(attribute +": "+ a);
}
return a.getValue();
}
@Override
public void setAttribute(Attribute attribute)
throws AttributeNotFoundException, InvalidAttributeValueException,
MBeanException, ReflectionException {
throw new UnsupportedOperationException("Metrics are read-only.");
}
@Override
public synchronized AttributeList getAttributes(String[] attributes) {
updateJmxCache();
AttributeList ret = new AttributeList();
for (String key : attributes) {
Attribute attr = attrCache.get(key);
if (LOG.isDebugEnabled()) {
LOG.debug(key +": "+ attr);
}
ret.add(attr);
}
return ret;
}
@Override
public AttributeList setAttributes(AttributeList attributes) {
throw new UnsupportedOperationException("Metrics are read-only.");
}
@Override
public Object invoke(String actionName, Object[] params, String[] signature)
throws MBeanException, ReflectionException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public synchronized MBeanInfo getMBeanInfo() {
updateJmxCache();
return infoCache;
}
private synchronized void updateJmxCache() {
if (System.currentTimeMillis() - jmxCacheTS >= jmxCacheTTL) {
if (lastRecs == null) {
MetricsCollectorImpl builder = new MetricsCollectorImpl();
getMetrics(builder, true);
}
int oldCacheSize = attrCache.size();
int newCacheSize = updateAttrCache();
if (oldCacheSize < newCacheSize) {
updateInfoCache();
}
jmxCacheTS = System.currentTimeMillis();
lastRecs = null; // in case regular interval update is not running
}
}
Iterable<MetricsRecordImpl> getMetrics(MetricsCollectorImpl builder,
boolean all) {
builder.setRecordFilter(recordFilter).setMetricFilter(metricFilter);
synchronized(this) {
if (lastRecs == null && jmxCacheTS == 0) {
all = true; // Get all the metrics to populate the sink caches
}
}
try {
source.getMetrics(builder, all);
}
catch (Exception e) {
LOG.error("Error getting metrics from source "+ name, e);
}
for (MetricsRecordBuilderImpl rb : builder) {
for (MetricsTag t : injectedTags) {
rb.add(t);
}
}
synchronized(this) {
lastRecs = builder.getRecords();
return lastRecs;
}
}
synchronized void stop() {
stopMBeans();
}
synchronized void startMBeans() {
if (mbeanName != null) {
LOG.warn("MBean "+ name +" already initialized!");
LOG.debug("Stacktrace: ", new Throwable());
return;
}
mbeanName = MBeans.register(prefix, name, this);
LOG.debug("MBean for source "+ name +" registered.");
}
synchronized void stopMBeans() {
if (mbeanName != null) {
MBeans.unregister(mbeanName);
mbeanName = null;
}
}
private void updateInfoCache() {
LOG.debug("Updating info cache...");
infoCache = infoBuilder.reset(lastRecs).get();
LOG.debug("Done");
}
private int updateAttrCache() {
LOG.debug("Updating attr cache...");
int recNo = 0;
int numMetrics = 0;
for (MetricsRecordImpl record : lastRecs) {
for (MetricsTag t : record.tags()) {
setAttrCacheTag(t, recNo);
++numMetrics;
}
for (AbstractMetric m : record.metrics()) {
setAttrCacheMetric(m, recNo);
++numMetrics;
}
++recNo;
}
LOG.debug("Done. # tags & metrics="+ numMetrics);
return numMetrics;
}
private static String tagName(String name, int recNo) {
StringBuilder sb = new StringBuilder(name.length() + 16);
sb.append("tag.").append(name);
if (recNo > 0) {
sb.append('.').append(recNo);
}
return sb.toString();
}
private void setAttrCacheTag(MetricsTag tag, int recNo) {
String key = tagName(tag.name(), recNo);
attrCache.put(key, new Attribute(key, tag.value()));
}
private static String metricName(String name, int recNo) {
if (recNo == 0) {
return name;
}
StringBuilder sb = new StringBuilder(name.length() + 12);
sb.append(name);
if (recNo > 0) {
sb.append('.').append(recNo);
}
return sb.toString();
}
private void setAttrCacheMetric(AbstractMetric metric, int recNo) {
String key = metricName(metric.name(), recNo);
attrCache.put(key, new Attribute(key, metric.value()));
}
String name() {
return name;
}
MetricsSource source() {
return source;
}
}

View File

@ -0,0 +1,566 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.io.StringWriter;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.net.InetAddress;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Timer;
import java.util.TimerTask;
import javax.management.ObjectName;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.Locale;
import static com.google.common.base.Preconditions.*;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.math.util.MathUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsFilter;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsSink;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.MetricsSystem;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import static org.apache.hadoop.metrics2.impl.MetricsConfig.*;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MetricsAnnotations;
import org.apache.hadoop.metrics2.lib.MetricsRegistry;
import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder;
import org.apache.hadoop.metrics2.lib.MutableStat;
import org.apache.hadoop.metrics2.util.MBeans;
/**
* A base class for metrics system singletons
*/
@InterfaceAudience.Private
@Metrics(context="metricssystem")
public class MetricsSystemImpl extends MetricsSystem implements MetricsSource {
static final Log LOG = LogFactory.getLog(MetricsSystemImpl.class);
static final String MS_NAME = "MetricsSystem";
static final String MS_STATS_NAME = MS_NAME +",sub=Stats";
static final String MS_STATS_DESC = "Metrics system metrics";
static final String MS_CONTROL_NAME = MS_NAME +",sub=Control";
static final String MS_INIT_MODE_KEY = "hadoop.metrics.init.mode";
enum InitMode { NORMAL, STANDBY }
private final Map<String, MetricsSourceAdapter> sources;
private final Map<String, MetricsSource> allSources;
private final Map<String, MetricsSinkAdapter> sinks;
private final Map<String, MetricsSink> allSinks;
private final List<Callback> callbacks;
private final MetricsCollectorImpl collector;
private final MetricsRegistry registry = new MetricsRegistry(MS_NAME);
@Metric({"Snapshot", "Snapshot stats"}) MutableStat snapshotStat;
@Metric({"Publish", "Publishing stats"}) MutableStat publishStat;
@Metric("Dropped updates by all sinks") MutableCounterLong droppedPubAll;
private final List<MetricsTag> injectedTags;
// Things that are changed by init()/start()/stop()
private String prefix;
private MetricsFilter sourceFilter;
private MetricsConfig config;
private Map<String, MetricsConfig> sourceConfigs, sinkConfigs;
private boolean monitoring = false;
private Timer timer;
private int period; // seconds
private long logicalTime; // number of timer invocations * period
private ObjectName mbeanName;
private boolean publishSelfMetrics = true;
private MetricsSourceAdapter sysSource;
private int refCount = 0; // for mini cluster mode
/**
* Construct the metrics system
* @param prefix for the system
*/
public MetricsSystemImpl(String prefix) {
this.prefix = prefix;
allSources = Maps.newHashMap();
sources = Maps.newLinkedHashMap();
allSinks = Maps.newHashMap();
sinks = Maps.newLinkedHashMap();
sourceConfigs = Maps.newHashMap();
sinkConfigs = Maps.newHashMap();
callbacks = Lists.newArrayList();
injectedTags = Lists.newArrayList();
collector = new MetricsCollectorImpl();
if (prefix != null) {
// prefix could be null for default ctor, which requires init later
initSystemMBean();
}
}
/**
* Construct the system but not initializing (read config etc.) it.
*/
public MetricsSystemImpl() {
this(null);
}
/**
* Initialized the metrics system with a prefix.
* @param prefix the system will look for configs with the prefix
* @return the metrics system object itself
*/
@Override
public synchronized MetricsSystem init(String prefix) {
if (monitoring && !DefaultMetricsSystem.inMiniClusterMode()) {
LOG.warn(this.prefix +" metrics system already initialized!");
return this;
}
this.prefix = checkNotNull(prefix, "prefix");
++refCount;
if (monitoring) {
// in mini cluster mode
LOG.info(this.prefix +" metrics system started (again)");
return this;
}
switch (initMode()) {
case NORMAL:
try { start(); }
catch (MetricsConfigException e) {
// Usually because hadoop-metrics2.properties is missing
// We can always start the metrics system later via JMX.
LOG.warn("Metrics system not started: "+ e.getMessage());
LOG.debug("Stacktrace: ", e);
}
break;
case STANDBY:
LOG.info(prefix +" metrics system started in standby mode");
}
initSystemMBean();
return this;
}
@Override
public synchronized void start() {
checkNotNull(prefix, "prefix");
if (monitoring) {
LOG.warn(prefix +" metrics system already started!",
new MetricsException("Illegal start"));
return;
}
for (Callback cb : callbacks) cb.preStart();
configure(prefix);
startTimer();
monitoring = true;
LOG.info(prefix +" metrics system started");
for (Callback cb : callbacks) cb.postStart();
}
@Override
public synchronized void stop() {
if (!monitoring && !DefaultMetricsSystem.inMiniClusterMode()) {
LOG.warn(prefix +" metrics system not yet started!",
new MetricsException("Illegal stop"));
return;
}
if (!monitoring) {
// in mini cluster mode
LOG.info(prefix +" metrics system stopped (again)");
return;
}
for (Callback cb : callbacks) cb.preStop();
LOG.info("Stopping "+ prefix +" metrics system...");
stopTimer();
stopSources();
stopSinks();
clearConfigs();
monitoring = false;
LOG.info(prefix +" metrics system stopped.");
for (Callback cb : callbacks) cb.postStop();
}
@Override public synchronized <T>
T register(String name, String desc, T source) {
MetricsSourceBuilder sb = MetricsAnnotations.newSourceBuilder(source);
final MetricsSource s = sb.build();
MetricsInfo si = sb.info();
String name2 = name == null ? si.name() : name;
final String finalDesc = desc == null ? si.description() : desc;
final String finalName = // be friendly to non-metrics tests
DefaultMetricsSystem.sourceName(name2, !monitoring);
allSources.put(finalName, s);
LOG.debug(finalName +", "+ finalDesc);
if (monitoring) {
registerSource(finalName, finalDesc, s);
}
// We want to re-register the source to pick up new config when the
// metrics system restarts.
register(new AbstractCallback() {
@Override public void postStart() {
registerSource(finalName, finalDesc, s);
}
});
return source;
}
synchronized
void registerSource(String name, String desc, MetricsSource source) {
checkNotNull(config, "config");
MetricsConfig conf = sourceConfigs.get(name);
MetricsSourceAdapter sa = conf != null
? new MetricsSourceAdapter(prefix, name, desc, source,
injectedTags, period, conf)
: new MetricsSourceAdapter(prefix, name, desc, source,
injectedTags, period, config.subset(SOURCE_KEY));
sources.put(name, sa);
sa.start();
LOG.info("Registered source "+ name);
}
@Override public synchronized <T extends MetricsSink>
T register(final String name, final String description, final T sink) {
LOG.debug(name +", "+ description);
if (allSinks.containsKey(name)) {
LOG.warn("Sink "+ name +" already exists!");
return sink;
}
allSinks.put(name, sink);
if (config != null) {
registerSink(name, description, sink);
}
// We want to re-register the sink to pick up new config
// when the metrics system restarts.
register(new AbstractCallback() {
@Override public void postStart() {
register(name, description, sink);
}
});
return sink;
}
synchronized void registerSink(String name, String desc, MetricsSink sink) {
checkNotNull(config, "config");
MetricsConfig conf = sinkConfigs.get(name);
MetricsSinkAdapter sa = conf != null
? newSink(name, desc, sink, conf)
: newSink(name, desc, sink, config.subset(SINK_KEY));
sinks.put(name, sa);
sa.start();
LOG.info("Registered sink "+ name);
}
@Override
public synchronized void register(final Callback callback) {
callbacks.add((Callback) Proxy.newProxyInstance(
callback.getClass().getClassLoader(), new Class<?>[] { Callback.class },
new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args)
throws Throwable {
try {
return method.invoke(callback, args);
}
catch (Exception e) {
// These are not considered fatal.
LOG.warn("Caught exception in callback "+ method.getName(), e);
}
return null;
}
}));
}
@Override
public synchronized void startMetricsMBeans() {
for (MetricsSourceAdapter sa : sources.values()) {
sa.startMBeans();
}
}
@Override
public synchronized void stopMetricsMBeans() {
for (MetricsSourceAdapter sa : sources.values()) {
sa.stopMBeans();
}
}
@Override
public synchronized String currentConfig() {
PropertiesConfiguration saver = new PropertiesConfiguration();
StringWriter writer = new StringWriter();
saver.copy(config);
try { saver.save(writer); }
catch (Exception e) {
throw new MetricsConfigException("Error stringify config", e);
}
return writer.toString();
}
private synchronized void startTimer() {
if (timer != null) {
LOG.warn(prefix +" metrics system timer already started!");
return;
}
logicalTime = 0;
long millis = period * 1000;
timer = new Timer("Timer for '"+ prefix +"' metrics system", true);
timer.scheduleAtFixedRate(new TimerTask() {
public void run() {
try {
onTimerEvent();
}
catch (Exception e) {
LOG.warn(e);
}
}
}, millis, millis);
LOG.info("Scheduled snapshot period at "+ period +" second(s).");
}
synchronized void onTimerEvent() {
logicalTime += period;
if (sinks.size() > 0) {
publishMetrics(sampleMetrics());
}
}
/**
* Sample all the sources for a snapshot of metrics/tags
* @return the metrics buffer containing the snapshot
*/
synchronized MetricsBuffer sampleMetrics() {
collector.clear();
MetricsBufferBuilder bufferBuilder = new MetricsBufferBuilder();
for (Entry<String, MetricsSourceAdapter> entry : sources.entrySet()) {
if (sourceFilter == null || sourceFilter.accepts(entry.getKey())) {
snapshotMetrics(entry.getValue(), bufferBuilder);
}
}
if (publishSelfMetrics) {
snapshotMetrics(sysSource, bufferBuilder);
}
MetricsBuffer buffer = bufferBuilder.get();
return buffer;
}
private void snapshotMetrics(MetricsSourceAdapter sa,
MetricsBufferBuilder bufferBuilder) {
long startTime = System.currentTimeMillis();
bufferBuilder.add(sa.name(), sa.getMetrics(collector, false));
collector.clear();
snapshotStat.add(System.currentTimeMillis() - startTime);
LOG.debug("Snapshotted source "+ sa.name());
}
/**
* Publish a metrics snapshot to all the sinks
* @param buffer the metrics snapshot to publish
*/
synchronized void publishMetrics(MetricsBuffer buffer) {
int dropped = 0;
for (MetricsSinkAdapter sa : sinks.values()) {
long startTime = System.currentTimeMillis();
dropped += sa.putMetrics(buffer, logicalTime) ? 0 : 1;
publishStat.add(System.currentTimeMillis() - startTime);
}
droppedPubAll.incr(dropped);
}
private synchronized void stopTimer() {
if (timer == null) {
LOG.warn(prefix +" metrics system timer already stopped!");
return;
}
timer.cancel();
timer = null;
}
private synchronized void stopSources() {
for (Entry<String, MetricsSourceAdapter> entry : sources.entrySet()) {
MetricsSourceAdapter sa = entry.getValue();
LOG.info("Stopping metrics source "+ entry.getKey());
LOG.debug(sa.source().getClass());
sa.stop();
}
sysSource.stop();
sources.clear();
}
private synchronized void stopSinks() {
for (Entry<String, MetricsSinkAdapter> entry : sinks.entrySet()) {
MetricsSinkAdapter sa = entry.getValue();
LOG.info("Stopping metrics sink "+ entry.getKey());
LOG.debug(sa.sink().getClass());
sa.stop();
}
sinks.clear();
}
private synchronized void configure(String prefix) {
config = MetricsConfig.create(prefix);
configureSinks();
configureSources();
configureSystem();
}
private synchronized void configureSystem() {
injectedTags.add(Interns.tag(MsInfo.Hostname, getHostname()));
}
private synchronized void configureSinks() {
sinkConfigs = config.getInstanceConfigs(SINK_KEY);
int confPeriod = 0;
for (Entry<String, MetricsConfig> entry : sinkConfigs.entrySet()) {
MetricsConfig conf = entry.getValue();
int sinkPeriod = conf.getInt(PERIOD_KEY, PERIOD_DEFAULT);
confPeriod = confPeriod == 0 ? sinkPeriod
: MathUtils.gcd(confPeriod, sinkPeriod);
String clsName = conf.getClassName("");
if (clsName == null) continue; // sink can be registered later on
String sinkName = entry.getKey();
try {
MetricsSinkAdapter sa = newSink(sinkName,
conf.getString(DESC_KEY, sinkName), conf);
sa.start();
sinks.put(sinkName, sa);
}
catch (Exception e) {
LOG.warn("Error creating sink '"+ sinkName +"'", e);
}
}
period = confPeriod > 0 ? confPeriod
: config.getInt(PERIOD_KEY, PERIOD_DEFAULT);
}
static MetricsSinkAdapter newSink(String name, String desc, MetricsSink sink,
MetricsConfig conf) {
return new MetricsSinkAdapter(name, desc, sink, conf.getString(CONTEXT_KEY),
conf.getFilter(SOURCE_FILTER_KEY),
conf.getFilter(RECORD_FILTER_KEY),
conf.getFilter(METRIC_FILTER_KEY),
conf.getInt(PERIOD_KEY, PERIOD_DEFAULT),
conf.getInt(QUEUE_CAPACITY_KEY, QUEUE_CAPACITY_DEFAULT),
conf.getInt(RETRY_DELAY_KEY, RETRY_DELAY_DEFAULT),
conf.getFloat(RETRY_BACKOFF_KEY, RETRY_BACKOFF_DEFAULT),
conf.getInt(RETRY_COUNT_KEY, RETRY_COUNT_DEFAULT));
}
static MetricsSinkAdapter newSink(String name, String desc,
MetricsConfig conf) {
return newSink(name, desc, (MetricsSink) conf.getPlugin(""), conf);
}
private void configureSources() {
sourceFilter = config.getFilter(PREFIX_DEFAULT + SOURCE_FILTER_KEY);
sourceConfigs = config.getInstanceConfigs(SOURCE_KEY);
registerSystemSource();
}
private void clearConfigs() {
sinkConfigs.clear();
sourceConfigs.clear();
injectedTags.clear();
config = null;
}
static String getHostname() {
try {
return InetAddress.getLocalHost().getHostName();
}
catch (Exception e) {
LOG.error("Error getting localhost name. Using 'localhost'...", e);
}
return "localhost";
}
private void registerSystemSource() {
MetricsConfig sysConf = sourceConfigs.get(MS_NAME);
sysSource = new MetricsSourceAdapter(prefix, MS_STATS_NAME, MS_STATS_DESC,
MetricsAnnotations.makeSource(this), injectedTags, period,
sysConf == null ? config.subset(SOURCE_KEY) : sysConf);
sysSource.start();
}
@Override
public synchronized void getMetrics(MetricsCollector builder, boolean all) {
MetricsRecordBuilder rb = builder.addRecord(MS_NAME)
.addGauge(MsInfo.NumActiveSources, sources.size())
.addGauge(MsInfo.NumAllSources, allSources.size())
.addGauge(MsInfo.NumActiveSinks, sinks.size())
.addGauge(MsInfo.NumAllSinks, allSinks.size());
for (MetricsSinkAdapter sa : sinks.values()) {
sa.snapshot(rb, all);
}
registry.snapshot(rb, all);
}
private void initSystemMBean() {
checkNotNull(prefix, "prefix should not be null here!");
if (mbeanName == null) {
mbeanName = MBeans.register(prefix, MS_CONTROL_NAME, this);
}
}
@Override
public synchronized boolean shutdown() {
LOG.debug("refCount="+ refCount);
if (refCount <= 0) LOG.debug("Redundant shutdown", new Throwable());
if (--refCount > 0) return false;
if (monitoring) {
try { stop(); }
catch (Exception e) {
LOG.warn("Error stopping the metrics system", e);
}
}
allSources.clear();
allSinks.clear();
callbacks.clear();
if (mbeanName != null) {
MBeans.unregister(mbeanName);
mbeanName = null;
}
LOG.info(prefix +" metrics system shutdown complete.");
return true;
}
public MetricsSource getSource(String name) {
return allSources.get(name);
}
private InitMode initMode() {
LOG.debug("from system property: "+ System.getProperty(MS_INIT_MODE_KEY));
LOG.debug("from environment variable: "+ System.getenv(MS_INIT_MODE_KEY));
String m = System.getProperty(MS_INIT_MODE_KEY);
String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m;
return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2)
.toUpperCase(Locale.US));
}
}

View File

@ -0,0 +1,55 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import com.google.common.base.Objects;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsInfo;
/**
* Metrics system related metrics info instances
*/
@InterfaceAudience.Private
public enum MsInfo implements MetricsInfo {
NumActiveSources("Number of active metrics sources"),
NumAllSources("Number of all registered metrics sources"),
NumActiveSinks("Number of active metrics sinks"),
NumAllSinks("Number of all registered metrics sinks"),
Context("Metrics context"),
Hostname("Local hostname"),
SessionId("Session ID"),
ProcessName("Process name");
private final String desc;
MsInfo(String desc) {
this.desc = desc;
}
@Override public String description() {
return desc;
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("name", name()).add("description", desc)
.toString();
}
}

View File

@ -0,0 +1,170 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.ConcurrentModificationException;
/**
* A half-blocking (nonblocking for producers, blocking for consumers) queue
* for metrics sinks.
*
* New elements are dropped when the queue is full to preserve "interesting"
* elements at the onset of queue filling events
*/
class SinkQueue<T> {
interface Consumer<T> {
void consume(T object) throws InterruptedException;
}
// A fixed size circular buffer to minimize garbage
private final T[] data;
private int head; // head position
private int tail; // tail position
private int size; // number of elements
private Thread currentConsumer = null;
@SuppressWarnings("unchecked")
SinkQueue(int capacity) {
this.data = (T[]) new Object[Math.max(1, capacity)];
head = tail = size = 0;
}
synchronized boolean enqueue(T e) {
if (data.length == size) {
return false;
}
++size;
tail = (tail + 1) % data.length;
data[tail] = e;
notify();
return true;
}
/**
* Consume one element, will block if queue is empty
* Only one consumer at a time is allowed
* @param consumer the consumer callback object
*/
void consume(Consumer<T> consumer) throws InterruptedException {
T e = waitForData();
try {
consumer.consume(e); // can take forever
_dequeue();
}
finally {
clearConsumerLock();
}
}
/**
* Consume all the elements, will block if queue is empty
* @param consumer the consumer callback object
* @throws InterruptedException
*/
void consumeAll(Consumer<T> consumer) throws InterruptedException {
waitForData();
try {
for (int i = size(); i-- > 0; ) {
consumer.consume(front()); // can take forever
_dequeue();
}
}
finally {
clearConsumerLock();
}
}
/**
* Dequeue one element from head of the queue, will block if queue is empty
* @return the first element
* @throws InterruptedException
*/
synchronized T dequeue() throws InterruptedException {
checkConsumer();
while (0 == size) {
wait();
}
return _dequeue();
}
private synchronized T waitForData() throws InterruptedException {
checkConsumer();
while (0 == size) {
wait();
}
setConsumerLock();
return front();
}
private synchronized void checkConsumer() {
if (currentConsumer != null) {
throw new ConcurrentModificationException("The "+
currentConsumer.getName() +" thread is consuming the queue.");
}
}
private synchronized void setConsumerLock() {
currentConsumer = Thread.currentThread();
}
private synchronized void clearConsumerLock() {
currentConsumer = null;
}
private synchronized T _dequeue() {
if (0 == size) {
throw new IllegalStateException("Size must > 0 here.");
}
--size;
head = (head + 1) % data.length;
T ret = data[head];
data[head] = null; // hint to gc
return ret;
}
synchronized T front() {
return data[(head + 1) % data.length];
}
synchronized T back() {
return data[tail];
}
synchronized void clear() {
checkConsumer();
for (int i = data.length; i-- > 0; ) {
data[i] = null;
}
size = 0;
}
synchronized int size() {
return size;
}
int capacity() {
return data.length;
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A metrics system implementation
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
package org.apache.hadoop.metrics2.impl;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -0,0 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsException;
/**
* Experimental interface to extend metrics dynamically
*/
@InterfaceAudience.Private
public enum DefaultMetricsFactory {
INSTANCE; // the singleton
private MutableMetricsFactory mmfImpl;
public static MutableMetricsFactory getAnnotatedMetricsFactory() {
return INSTANCE.getInstance(MutableMetricsFactory.class);
}
@SuppressWarnings("unchecked")
public synchronized <T> T getInstance(Class<T> cls) {
if (cls == MutableMetricsFactory.class) {
if (mmfImpl == null) {
mmfImpl = new MutableMetricsFactory();
}
return (T) mmfImpl;
}
throw new MetricsException("Unknown metrics factory type: "+ cls.getName());
}
public synchronized void setInstance(MutableMetricsFactory factory) {
mmfImpl = factory;
}
}

View File

@ -0,0 +1,129 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import javax.management.ObjectName;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsSystem;
import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
/**
* The default metrics system singleton
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public enum DefaultMetricsSystem {
INSTANCE; // the singleton
private MetricsSystem impl = new MetricsSystemImpl();
volatile boolean miniClusterMode = false;
final UniqueNames mBeanNames = new UniqueNames();
final UniqueNames sourceNames = new UniqueNames();
/**
* Convenience method to initialize the metrics system
* @param prefix for the metrics system configuration
* @return the metrics system instance
*/
public static MetricsSystem initialize(String prefix) {
return INSTANCE.init(prefix);
}
synchronized MetricsSystem init(String prefix) {
return impl.init(prefix);
}
/**
* @return the metrics system object
*/
public static MetricsSystem instance() {
return INSTANCE.getImpl();
}
/**
* Shutdown the metrics system
*/
public static void shutdown() {
INSTANCE.shutdownInstance();
}
synchronized void shutdownInstance() {
if (impl.shutdown()) {
mBeanNames.map.clear();
sourceNames.map.clear();
}
}
@InterfaceAudience.Private
public static MetricsSystem setInstance(MetricsSystem ms) {
return INSTANCE.setImpl(ms);
}
synchronized MetricsSystem setImpl(MetricsSystem ms) {
MetricsSystem old = impl;
impl = ms;
return old;
}
synchronized MetricsSystem getImpl() { return impl; }
@InterfaceAudience.Private
public static void setMiniClusterMode(boolean choice) {
INSTANCE.miniClusterMode = choice;
}
@InterfaceAudience.Private
public static boolean inMiniClusterMode() {
return INSTANCE.miniClusterMode;
}
@InterfaceAudience.Private
public static ObjectName newMBeanName(String name) {
return INSTANCE.newObjectName(name);
}
@InterfaceAudience.Private
public static String sourceName(String name, boolean dupOK) {
return INSTANCE.newSourceName(name, dupOK);
}
synchronized ObjectName newObjectName(String name) {
try {
if (mBeanNames.map.containsKey(name) && !miniClusterMode) {
throw new MetricsException(name +" already exists!");
}
return new ObjectName(mBeanNames.uniqueName(name));
} catch (Exception e) {
throw new MetricsException(e);
}
}
synchronized String newSourceName(String name, boolean dupOK) {
if (sourceNames.map.containsKey(name)) {
if (dupOK) {
return name;
}
throw new MetricsException("Metrics source "+ name +" already exists!");
}
return sourceNames.uniqueName(name);
}
}

View File

@ -0,0 +1,165 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.util.Map;
import java.util.LinkedHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsTag;
/**
* Helpers to create interned metrics info
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class Interns {
private static final Log LOG = LogFactory.getLog(Interns.class);
// A simple intern cache with two keys
// (to avoid creating new (combined) key objects for lookup)
private static abstract class CacheWith2Keys<K1, K2, V> {
private final Map<K1, Map<K2, V>> k1Map =
new LinkedHashMap<K1, Map<K2, V>>() {
private static final long serialVersionUID = 1L;
private boolean gotOverflow = false;
@Override
protected boolean removeEldestEntry(Map.Entry<K1, Map<K2, V>> e) {
boolean overflow = expireKey1At(size());
if (overflow && !gotOverflow) {
LOG.warn("Metrics intern cache overflow at "+ size() +" for "+ e);
gotOverflow = true;
}
return overflow;
}
};
abstract protected boolean expireKey1At(int size);
abstract protected boolean expireKey2At(int size);
abstract protected V newValue(K1 k1, K2 k2);
synchronized V add(K1 k1, K2 k2) {
Map<K2, V> k2Map = k1Map.get(k1);
if (k2Map == null) {
k2Map = new LinkedHashMap<K2, V>() {
private static final long serialVersionUID = 1L;
private boolean gotOverflow = false;
@Override protected boolean removeEldestEntry(Map.Entry<K2, V> e) {
boolean overflow = expireKey2At(size());
if (overflow && !gotOverflow) {
LOG.warn("Metrics intern cache overflow at "+ size() +" for "+ e);
gotOverflow = true;
}
return overflow;
}
};
k1Map.put(k1, k2Map);
}
V v = k2Map.get(k2);
if (v == null) {
v = newValue(k1, k2);
k2Map.put(k2, v);
}
return v;
}
}
// Sanity limits in case of misuse/abuse.
static final int MAX_INFO_NAMES = 2010;
static final int MAX_INFO_DESCS = 100; // distinct per name
enum Info {
INSTANCE;
final CacheWith2Keys<String, String, MetricsInfo> cache =
new CacheWith2Keys<String, String, MetricsInfo>() {
@Override protected boolean expireKey1At(int size) {
return size > MAX_INFO_NAMES;
}
@Override protected boolean expireKey2At(int size) {
return size > MAX_INFO_DESCS;
}
@Override protected MetricsInfo newValue(String name, String desc) {
return new MetricsInfoImpl(name, desc);
}
};
}
/**
* Get a metric info object
* @param name
* @param description
* @return an interned metric info object
*/
public static MetricsInfo info(String name, String description) {
return Info.INSTANCE.cache.add(name, description);
}
// Sanity limits
static final int MAX_TAG_NAMES = 100;
static final int MAX_TAG_VALUES = 1000; // distinct per name
enum Tags {
INSTANCE;
final CacheWith2Keys<MetricsInfo, String, MetricsTag> cache =
new CacheWith2Keys<MetricsInfo, String, MetricsTag>() {
@Override protected boolean expireKey1At(int size) {
return size > MAX_TAG_NAMES;
}
@Override protected boolean expireKey2At(int size) {
return size > MAX_TAG_VALUES;
}
@Override protected MetricsTag newValue(MetricsInfo info, String value) {
return new MetricsTag(info, value);
}
};
}
/**
* Get a metrics tag
* @param info of the tag
* @param value of the tag
* @return an interned metrics tag
*/
public static MetricsTag tag(MetricsInfo info, String value) {
return Tags.INSTANCE.cache.add(info, value);
}
/**
* Get a metrics tag
* @param name of the tag
* @param description of the tag
* @param value of the tag
* @return an interned metrics tag
*/
public static MetricsTag tag(String name, String description, String value) {
return Tags.INSTANCE.cache.add(info(name, description), value);
}
}

View File

@ -0,0 +1,158 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Method;
import static com.google.common.base.Preconditions.*;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.annotation.Metric;
import static org.apache.hadoop.metrics2.util.Contracts.*;
/**
* Metric generated from a method, mostly used by annotation
*/
class MethodMetric extends MutableMetric {
private static final Log LOG = LogFactory.getLog(MethodMetric.class);
private final Object obj;
private final Method method;
private final MetricsInfo info;
private final MutableMetric impl;
MethodMetric(Object obj, Method method, MetricsInfo info, Metric.Type type) {
this.obj = checkNotNull(obj, "object");
this.method = checkArg(method, method.getParameterTypes().length == 0,
"Metric method should have no arguments");
this.method.setAccessible(true);
this.info = checkNotNull(info, "info");
impl = newImpl(checkNotNull(type, "metric type"));
}
private MutableMetric newImpl(Metric.Type metricType) {
Class<?> resType = method.getReturnType();
switch (metricType) {
case COUNTER:
return newCounter(resType);
case GAUGE:
return newGauge(resType);
case DEFAULT:
return resType == String.class ? newTag(resType) : newGauge(resType);
case TAG:
return newTag(resType);
default:
checkArg(metricType, false, "unsupported metric type");
return null;
}
}
MutableMetric newCounter(final Class<?> type) {
if (isInt(type) || isLong(type)) {
return new MutableMetric() {
@Override public void snapshot(MetricsRecordBuilder rb, boolean all) {
try {
Object ret = method.invoke(obj, (Object[])null);
if (isInt(type)) rb.addCounter(info, ((Integer) ret).intValue());
else rb.addCounter(info, ((Long) ret).longValue());
}
catch (Exception ex) {
LOG.error("Error invoking method "+ method.getName(), ex);
}
}
};
}
throw new MetricsException("Unsupported counter type: "+ type.getName());
}
static boolean isInt(Class<?> type) {
boolean ret = type == Integer.TYPE || type == Integer.class;
return ret;
}
static boolean isLong(Class<?> type) {
return type == Long.TYPE || type == Long.class;
}
static boolean isFloat(Class<?> type) {
return type == Float.TYPE || type == Float.class;
}
static boolean isDouble(Class<?> type) {
return type == Double.TYPE || type == Double.class;
}
MutableMetric newGauge(final Class<?> t) {
if (isInt(t) || isLong(t) || isFloat(t) || isDouble(t)) {
return new MutableMetric() {
@Override public void snapshot(MetricsRecordBuilder rb, boolean all) {
try {
Object ret = method.invoke(obj, (Object[]) null);
if (isInt(t)) rb.addGauge(info, ((Integer) ret).intValue());
else if (isLong(t)) rb.addGauge(info, ((Long) ret).longValue());
else if (isFloat(t)) rb.addGauge(info, ((Float) ret).floatValue());
else rb.addGauge(info, ((Double) ret).doubleValue());
}
catch (Exception ex) {
LOG.error("Error invoking method "+ method.getName(), ex);
}
}
};
}
throw new MetricsException("Unsupported gauge type: "+ t.getName());
}
MutableMetric newTag(Class<?> resType) {
if (resType == String.class) {
return new MutableMetric() {
@Override public void snapshot(MetricsRecordBuilder rb, boolean all) {
try {
Object ret = method.invoke(obj, (Object[]) null);
rb.tag(info, (String) ret);
}
catch (Exception ex) {
LOG.error("Error invoking method "+ method.getName(), ex);
}
}
};
}
throw new MetricsException("Unsupported tag type: "+ resType.getName());
}
@Override public void snapshot(MetricsRecordBuilder builder, boolean all) {
impl.snapshot(builder, all);
}
static MetricsInfo metricInfo(Method method) {
return Interns.info(nameFrom(method), "Metric for "+ method.getName());
}
static String nameFrom(Method method) {
String methodName = method.getName();
if (methodName.startsWith("get")) {
return StringUtils.capitalize(methodName.substring(3));
}
return StringUtils.capitalize(methodName);
}
}

View File

@ -0,0 +1,45 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsSource;
/**
* Metrics annotation helpers.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class MetricsAnnotations {
/**
* Make an metrics source from an annotated object.
* @param source the annotated object.
* @return a metrics source
*/
public static MetricsSource makeSource(Object source) {
return new MetricsSourceBuilder(source,
DefaultMetricsFactory.getAnnotatedMetricsFactory()).build();
}
public static MetricsSourceBuilder newSourceBuilder(Object source) {
return new MetricsSourceBuilder(source,
DefaultMetricsFactory.getAnnotatedMetricsFactory());
}
}

View File

@ -0,0 +1,62 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import com.google.common.base.Objects;
import static com.google.common.base.Preconditions.*;
import org.apache.hadoop.metrics2.MetricsInfo;
/**
* Making implementing metric info a little easier
*/
class MetricsInfoImpl implements MetricsInfo {
private final String name, description;
MetricsInfoImpl(String name, String description) {
this.name = checkNotNull(name, "name");
this.description = checkNotNull(description, "description");
}
@Override public String name() {
return name;
}
@Override public String description() {
return description;
}
@Override public boolean equals(Object obj) {
if (obj instanceof MetricsInfo) {
MetricsInfo other = (MetricsInfo) obj;
return Objects.equal(name, other.name()) &&
Objects.equal(description, other.description());
}
return false;
}
@Override public int hashCode() {
return Objects.hashCode(name, description);
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("name", name).add("description", description)
.toString();
}
}

View File

@ -0,0 +1,378 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.util.Collection;
import java.util.Map;
import com.google.common.collect.Maps;
import com.google.common.base.Objects;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.impl.MsInfo;
/**
* An optional metrics registry class for creating and maintaining a
* collection of MetricsMutables, making writing metrics source easier.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MetricsRegistry {
private final Map<String, MutableMetric> metricsMap = Maps.newLinkedHashMap();
private final Map<String, MetricsTag> tagsMap = Maps.newLinkedHashMap();
private final MetricsInfo metricsInfo;
/**
* Construct the registry with a record name
* @param name of the record of the metrics
*/
public MetricsRegistry(String name) {
metricsInfo = Interns.info(name, name);
}
/**
* Construct the registry with a metadata object
* @param info the info object for the metrics record/group
*/
public MetricsRegistry(MetricsInfo info) {
metricsInfo = info;
}
/**
* @return the info object of the metrics registry
*/
public MetricsInfo info() {
return metricsInfo;
}
/**
* Get a metric by name
* @param name of the metric
* @return the metric object
*/
public synchronized MutableMetric get(String name) {
return metricsMap.get(name);
}
/**
* Get a tag by name
* @param name of the tag
* @return the tag object
*/
public synchronized MetricsTag getTag(String name) {
return tagsMap.get(name);
}
/**
* Create a mutable integer counter
* @param name of the metric
* @param desc metric description
* @param iVal initial value
* @return a new counter object
*/
public MutableCounterInt newCounter(String name, String desc, int iVal) {
return newCounter(Interns.info(name, desc), iVal);
}
/**
* Create a mutable integer counter
* @param info metadata of the metric
* @param iVal initial value
* @return a new counter object
*/
public synchronized MutableCounterInt newCounter(MetricsInfo info, int iVal) {
checkMetricName(info.name());
MutableCounterInt ret = new MutableCounterInt(info, iVal);
metricsMap.put(info.name(), ret);
return ret;
}
/**
* Create a mutable long integer counter
* @param name of the metric
* @param desc metric description
* @param iVal initial value
* @return a new counter object
*/
public MutableCounterLong newCounter(String name, String desc, long iVal) {
return newCounter(Interns.info(name, desc), iVal);
}
/**
* Create a mutable long integer counter
* @param info metadata of the metric
* @param iVal initial value
* @return a new counter object
*/
public synchronized
MutableCounterLong newCounter(MetricsInfo info, long iVal) {
checkMetricName(info.name());
MutableCounterLong ret = new MutableCounterLong(info, iVal);
metricsMap.put(info.name(), ret);
return ret;
}
/**
* Create a mutable integer gauge
* @param name of the metric
* @param desc metric description
* @param iVal initial value
* @return a new gauge object
*/
public MutableGaugeInt newGauge(String name, String desc, int iVal) {
return newGauge(Interns.info(name, desc), iVal);
}
/**
* Create a mutable integer gauge
* @param info metadata of the metric
* @param iVal initial value
* @return a new gauge object
*/
public synchronized MutableGaugeInt newGauge(MetricsInfo info, int iVal) {
checkMetricName(info.name());
MutableGaugeInt ret = new MutableGaugeInt(info, iVal);
metricsMap.put(info.name(), ret);
return ret;
}
/**
* Create a mutable long integer gauge
* @param name of the metric
* @param desc metric description
* @param iVal initial value
* @return a new gauge object
*/
public MutableGaugeLong newGauge(String name, String desc, long iVal) {
return newGauge(Interns.info(name, desc), iVal);
}
/**
* Create a mutable long integer gauge
* @param info metadata of the metric
* @param iVal initial value
* @return a new gauge object
*/
public synchronized MutableGaugeLong newGauge(MetricsInfo info, long iVal) {
checkMetricName(info.name());
MutableGaugeLong ret = new MutableGaugeLong(info, iVal);
metricsMap.put(info.name(), ret);
return ret;
}
/**
* Create a mutable metric with stats
* @param name of the metric
* @param desc metric description
* @param sampleName of the metric (e.g., "Ops")
* @param valueName of the metric (e.g., "Time" or "Latency")
* @param extended produce extended stat (stdev, min/max etc.) if true.
* @return a new mutable stat metric object
*/
public synchronized MutableStat newStat(String name, String desc,
String sampleName, String valueName, boolean extended) {
checkMetricName(name);
MutableStat ret =
new MutableStat(name, desc, sampleName, valueName, extended);
metricsMap.put(name, ret);
return ret;
}
/**
* Create a mutable metric with stats
* @param name of the metric
* @param desc metric description
* @param sampleName of the metric (e.g., "Ops")
* @param valueName of the metric (e.g., "Time" or "Latency")
* @return a new mutable metric object
*/
public MutableStat newStat(String name, String desc,
String sampleName, String valueName) {
return newStat(name, desc, sampleName, valueName, false);
}
/**
* Create a mutable rate metric
* @param name of the metric
* @return a new mutable metric object
*/
public MutableRate newRate(String name) {
return newRate(name, name, false);
}
/**
* Create a mutable rate metric
* @param name of the metric
* @param description of the metric
* @return a new mutable rate metric object
*/
public MutableRate newRate(String name, String description) {
return newRate(name, description, false);
}
/**
* Create a mutable rate metric (for throughput measurement)
* @param name of the metric
* @param desc description
* @param extended produce extended stat (stdev/min/max etc.) if true
* @return a new mutable rate metric object
*/
public MutableRate newRate(String name, String desc, boolean extended) {
return newRate(name, desc, extended, true);
}
@InterfaceAudience.Private
public synchronized MutableRate newRate(String name, String desc,
boolean extended, boolean returnExisting) {
if (returnExisting) {
MutableMetric rate = metricsMap.get(name);
if (rate != null) {
if (rate instanceof MutableRate) return (MutableRate) rate;
throw new MetricsException("Unexpected metrics type "+ rate.getClass()
+" for "+ name);
}
}
checkMetricName(name);
MutableRate ret = new MutableRate(name, desc, extended);
metricsMap.put(name, ret);
return ret;
}
synchronized void add(String name, MutableMetric metric) {
checkMetricName(name);
metricsMap.put(name, metric);
}
/**
* Add sample to a stat metric by name.
* @param name of the metric
* @param value of the snapshot to add
*/
public synchronized void add(String name, long value) {
MutableMetric m = metricsMap.get(name);
if (m != null) {
if (m instanceof MutableStat) {
((MutableStat) m).add(value);
}
else {
throw new MetricsException("Unsupported add(value) for metric "+ name);
}
}
else {
metricsMap.put(name, newRate(name)); // default is a rate metric
add(name, value);
}
}
/**
* Set the metrics context tag
* @param name of the context
* @return the registry itself as a convenience
*/
public MetricsRegistry setContext(String name) {
return tag(MsInfo.Context, name, true);
}
/**
* Add a tag to the metrics
* @param name of the tag
* @param description of the tag
* @param value of the tag
* @return the registry (for keep adding tags)
*/
public MetricsRegistry tag(String name, String description, String value) {
return tag(name, description, value, false);
}
/**
* Add a tag to the metrics
* @param name of the tag
* @param description of the tag
* @param value of the tag
* @param override existing tag if true
* @return the registry (for keep adding tags)
*/
public MetricsRegistry tag(String name, String description, String value,
boolean override) {
return tag(Interns.info(name, description), value, override);
}
/**
* Add a tag to the metrics
* @param info metadata of the tag
* @param value of the tag
* @param override existing tag if true
* @return the registry (for keep adding tags etc.)
*/
public synchronized
MetricsRegistry tag(MetricsInfo info, String value, boolean override) {
if (!override) checkTagName(info.name());
tagsMap.put(info.name(), Interns.tag(info, value));
return this;
}
public MetricsRegistry tag(MetricsInfo info, String value) {
return tag(info, value, false);
}
Collection<MetricsTag> tags() {
return tagsMap.values();
}
Collection<MutableMetric> metrics() {
return metricsMap.values();
}
private void checkMetricName(String name) {
if (metricsMap.containsKey(name)) {
throw new MetricsException("Metric name "+ name +" already exists!");
}
}
private void checkTagName(String name) {
if (tagsMap.containsKey(name)) {
throw new MetricsException("Tag "+ name +" already exists!");
}
}
/**
* Sample all the mutable metrics and put the snapshot in the builder
* @param builder to contain the metrics snapshot
* @param all get all the metrics even if the values are not changed.
*/
public synchronized void snapshot(MetricsRecordBuilder builder, boolean all) {
for (MetricsTag tag : tags()) {
builder.add(tag);
}
for (MutableMetric metric : metrics()) {
metric.snapshot(builder, all);
}
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("info", metricsInfo).add("tags", tags()).add("metrics", metrics())
.toString();
}
}

View File

@ -0,0 +1,154 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import static com.google.common.base.Preconditions.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
/**
* Helper class to build metrics source object from annotations
*/
@InterfaceAudience.Private
public class MetricsSourceBuilder {
private static final Log LOG = LogFactory.getLog(MetricsSourceBuilder.class);
private final Object source;
private final MutableMetricsFactory factory;
private final MetricsRegistry registry;
private MetricsInfo info;
private boolean hasAtMetric = false;
private boolean hasRegistry = false;
MetricsSourceBuilder(Object source, MutableMetricsFactory factory) {
this.source = checkNotNull(source, "source");
this.factory = checkNotNull(factory, "mutable metrics factory");
Class<?> cls = source.getClass();
registry = initRegistry(source);
for (Field field : cls.getDeclaredFields()) {
add(source, field);
}
for (Method method : cls.getDeclaredMethods()) {
add(source, method);
}
}
public MetricsSource build() {
if (source instanceof MetricsSource) {
if (hasAtMetric && !hasRegistry) {
throw new MetricsException("Hybrid metrics: registry required.");
}
return (MetricsSource) source;
}
else if (!hasAtMetric) {
throw new MetricsException("No valid @Metric annotation found.");
}
return new MetricsSource() {
@Override
public void getMetrics(MetricsCollector builder, boolean all) {
registry.snapshot(builder.addRecord(registry.info()), all);
}
};
}
public MetricsInfo info() {
return info;
}
private MetricsRegistry initRegistry(Object source) {
Class<?> cls = source.getClass();
MetricsRegistry r = null;
// Get the registry if it already exists.
for (Field field : cls.getDeclaredFields()) {
if (field.getType() != MetricsRegistry.class) continue;
try {
field.setAccessible(true);
r = (MetricsRegistry) field.get(source);
hasRegistry = r != null;
break;
}
catch (Exception e) {
LOG.warn("Error accessing field "+ field, e);
continue;
}
}
// Create a new registry according to annotation
for (Annotation annotation : cls.getAnnotations()) {
if (annotation instanceof Metrics) {
Metrics ma = (Metrics) annotation;
info = factory.getInfo(cls, ma);
if (r == null) {
r = new MetricsRegistry(info);
}
r.setContext(ma.context());
}
}
if (r == null) return new MetricsRegistry(cls.getSimpleName());
return r;
}
private void add(Object source, Field field) {
for (Annotation annotation : field.getAnnotations()) {
if (!(annotation instanceof Metric)) continue;
try {
// skip fields already set
field.setAccessible(true);
if (field.get(source) != null) continue;
}
catch (Exception e) {
LOG.warn("Error accessing field "+ field +" annotated with"+
annotation, e);
continue;
}
MutableMetric mutable = factory.newForField(field, (Metric) annotation,
registry);
if (mutable != null) {
try {
field.set(source, mutable);
hasAtMetric = true;
}
catch (Exception e) {
throw new MetricsException("Error setting field "+ field +
" annotated with "+ annotation, e);
}
}
}
}
private void add(Object source, Method method) {
for (Annotation annotation : method.getAnnotations()) {
if (!(annotation instanceof Metric)) continue;
factory.newForMethod(source, method, (Metric) annotation, registry);
hasAtMetric = true;
}
}
}

View File

@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import static com.google.common.base.Preconditions.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
/**
* The mutable counter (monotonically increasing) metric interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class MutableCounter extends MutableMetric {
private final MetricsInfo info;
protected MutableCounter(MetricsInfo info) {
this.info = checkNotNull(info, "counter info");
}
protected MetricsInfo info() {
return info;
}
/**
* Increment the metric value by 1.
*/
public abstract void incr();
}

View File

@ -0,0 +1,66 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
/**
* A mutable int counter for implementing metrics sources
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MutableCounterInt extends MutableCounter {
private volatile int value;
MutableCounterInt(MetricsInfo info, int initValue) {
super(info);
this.value = initValue;
}
@Override
public synchronized void incr() {
++value;
setChanged();
}
/**
* Increment the value by a delta
* @param delta of the increment
*/
public synchronized void incr(int delta) {
value += delta;
setChanged();
}
public int value() {
return value;
}
@Override
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addCounter(info(), value);
clearChanged();
}
}
}

View File

@ -0,0 +1,67 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
/**
* A mutable long counter
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MutableCounterLong extends MutableCounter {
private volatile long value;
MutableCounterLong(MetricsInfo info, long initValue) {
super(info);
this.value = initValue;
}
@Override
public synchronized void incr() {
++value;
setChanged();
}
/**
* Increment the value by a delta
* @param delta of the increment
*/
public synchronized void incr(long delta) {
value += delta;
setChanged();
}
public long value() {
return value;
}
@Override
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addCounter(info(), value);
clearChanged();
}
}
}

View File

@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import static com.google.common.base.Preconditions.*;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
/**
* The mutable gauge metric interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class MutableGauge extends MutableMetric {
private final MetricsInfo info;
protected MutableGauge(MetricsInfo info) {
this.info = checkNotNull(info, "metric info");
}
protected MetricsInfo info() {
return info;
}
/**
* Increment the value of the metric by 1
*/
public abstract void incr();
/**
* Decrement the value of the metric by 1
*/
public abstract void decr();
}

View File

@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
/**
* A mutable int gauge
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MutableGaugeInt extends MutableGauge {
private volatile int value;
MutableGaugeInt(MetricsInfo info, int initValue) {
super(info);
this.value = initValue;
}
public int value() {
return value;
}
@Override
public synchronized void incr() {
++value;
setChanged();
}
/**
* Increment by delta
* @param delta of the increment
*/
public synchronized void incr(int delta) {
value += delta;
setChanged();
}
@Override
public synchronized void decr() {
--value;
setChanged();
}
/**
* decrement by delta
* @param delta of the decrement
*/
public synchronized void decr(int delta) {
value -= delta;
setChanged();
}
/**
* Set the value of the metric
* @param value to set
*/
public void set(int value) {
this.value = value;
setChanged();
}
@Override
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addGauge(info(), value);
clearChanged();
}
}
}

View File

@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
/**
* A mutable long gauge
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MutableGaugeLong extends MutableGauge {
private volatile long value;
MutableGaugeLong(MetricsInfo info, long initValue) {
super(info);
this.value = initValue;
}
public long value() {
return value;
}
@Override
public synchronized void incr() {
++value;
setChanged();
}
/**
* Increment by delta
* @param delta of the increment
*/
public synchronized void incr(long delta) {
value += delta;
setChanged();
}
@Override
public synchronized void decr() {
--value;
setChanged();
}
/**
* decrement by delta
* @param delta of the decrement
*/
public synchronized void decr(long delta) {
value -= delta;
setChanged();
}
/**
* Set the value of the metric
* @param value to set
*/
public void set(long value) {
this.value = value;
setChanged();
}
public void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
builder.addGauge(info(), value);
clearChanged();
}
}
}

View File

@ -0,0 +1,62 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
/**
* The mutable metric interface
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class MutableMetric {
private volatile boolean changed = true;
/**
* Get a snapshot of the metric
* @param builder the metrics record builder
* @param all if true, snapshot unchanged metrics as well
*/
public abstract void snapshot(MetricsRecordBuilder builder, boolean all);
/**
* Get a snapshot of metric if changed
* @param builder the metrics record builder
*/
public void snapshot(MetricsRecordBuilder builder) {
snapshot(builder, false);
}
/**
* Set the changed flag in mutable operations
*/
protected void setChanged() { changed = true; }
/**
* Clear the changed flag in the snapshot operations
*/
protected void clearChanged() { changed = false; }
/**
* @return true if metric is changed since last snapshot/snapshot
*/
public boolean changed() { return changed; }
}

View File

@ -0,0 +1,151 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class MutableMetricsFactory {
private static final Log LOG = LogFactory.getLog(MutableMetricsFactory.class);
MutableMetric newForField(Field field, Metric annotation,
MetricsRegistry registry) {
if (LOG.isDebugEnabled()) {
LOG.debug("field "+ field +" with annotation "+ annotation);
}
MetricsInfo info = getInfo(annotation, field);
MutableMetric metric = newForField(field, annotation);
if (metric != null) {
registry.add(info.name(), metric);
return metric;
}
final Class<?> cls = field.getType();
if (cls == MutableCounterInt.class) {
return registry.newCounter(info, 0);
}
if (cls == MutableCounterLong.class) {
return registry.newCounter(info, 0L);
}
if (cls == MutableGaugeInt.class) {
return registry.newGauge(info, 0);
}
if (cls == MutableGaugeLong.class) {
return registry.newGauge(info, 0L);
}
if (cls == MutableRate.class) {
return registry.newRate(info.name(), info.description(),
annotation.always());
}
if (cls == MutableRates.class) {
return new MutableRates(registry);
}
if (cls == MutableStat.class) {
return registry.newStat(info.name(), info.description(),
annotation.sampleName(), annotation.valueName(),
annotation.always());
}
throw new MetricsException("Unsupported metric field "+ field.getName() +
" of type "+ field.getType().getName());
}
MutableMetric newForMethod(Object source, Method method, Metric annotation,
MetricsRegistry registry) {
if (LOG.isDebugEnabled()) {
LOG.debug("method "+ method +" with annotation "+ annotation);
}
MetricsInfo info = getInfo(annotation, method);
MutableMetric metric = newForMethod(source, method, annotation);
metric = metric != null ? metric :
new MethodMetric(source, method, info, annotation.type());
registry.add(info.name(), metric);
return metric;
}
/**
* Override to handle custom mutable metrics for fields
* @param field of the metric
* @param annotation of the field
* @return a new metric object or null
*/
protected MutableMetric newForField(Field field, Metric annotation) {
return null;
}
/**
* Override to handle custom mutable metrics for methods
* @param source the metrics source object
* @param method to return the metric
* @param annotation of the method
* @return a new metric object or null
*/
protected MutableMetric newForMethod(Object source, Method method,
Metric annotation) {
return null;
}
protected MetricsInfo getInfo(Metric annotation, Field field) {
return getInfo(annotation, getName(field));
}
protected String getName(Field field) {
return StringUtils.capitalize(field.getName());
}
protected MetricsInfo getInfo(Metric annotation, Method method) {
return getInfo(annotation, getName(method));
}
protected MetricsInfo getInfo(Class<?> cls, Metrics annotation) {
String name = annotation.name();
String about = annotation.about();
String name2 = name.isEmpty() ? cls.getSimpleName() : name;
return Interns.info(name2, about.isEmpty() ? name2 : about);
}
protected String getName(Method method) {
String methodName = method.getName();
if (methodName.startsWith("get")) {
return StringUtils.capitalize(methodName.substring(3));
}
return StringUtils.capitalize(methodName);
}
protected MetricsInfo getInfo(Metric annotation, String defaultName) {
String[] value = annotation.value();
if (value.length == 2) {
return Interns.info(value[0], value[1]);
}
if (value.length == 1) {
return Interns.info(defaultName, value[0]);
}
return Interns.info(defaultName, defaultName);
}
}

View File

@ -0,0 +1,34 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* A convenient mutable metric for throughput measurement
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MutableRate extends MutableStat {
MutableRate(String name, String description, boolean extended) {
super(name, description, "Ops", "Time", extended);
}
}

View File

@ -0,0 +1,80 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Method;
import java.util.Set;
import static com.google.common.base.Preconditions.*;
import com.google.common.collect.Sets;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
/**
* Helper class to manage a group of mutable rate metrics
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MutableRates extends MutableMetric {
static final Log LOG = LogFactory.getLog(MutableRates.class);
private final MetricsRegistry registry;
private final Set<Class<?>> protocolCache = Sets.newHashSet();
MutableRates(MetricsRegistry registry) {
this.registry = checkNotNull(registry, "metrics registry");
}
/**
* Initialize the registry with all the methods in a protocol
* so they all show up in the first snapshot.
* Convenient for JMX implementations.
* @param protocol the protocol class
*/
public void init(Class<?> protocol) {
if (protocolCache.contains(protocol)) return;
protocolCache.add(protocol);
for (Method method : protocol.getDeclaredMethods()) {
String name = method.getName();
LOG.debug(name);
try { registry.newRate(name, name, false, true); }
catch (Exception e) {
LOG.error("Error creating rate metrics for "+ method.getName(), e);
}
}
}
/**
* Add a rate sample for a rate metric
* @param name of the rate metric
* @param elapsed time
*/
public void add(String name, long elapsed) {
registry.add(name, elapsed);
}
@Override
public void snapshot(MetricsRecordBuilder rb, boolean all) {
registry.snapshot(rb, all);
}
}

View File

@ -0,0 +1,145 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.util.SampleStat;
import static org.apache.hadoop.metrics2.lib.Interns.*;
/**
* A mutable metric with stats.
*
* Useful for keeping throughput/latency stats.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MutableStat extends MutableMetric {
private final MetricsInfo numInfo;
private final MetricsInfo avgInfo;
private final MetricsInfo stdevInfo;
private final MetricsInfo iMinInfo;
private final MetricsInfo iMaxInfo;
private final MetricsInfo minInfo;
private final MetricsInfo maxInfo;
private final SampleStat intervalStat = new SampleStat();
private final SampleStat prevStat = new SampleStat();
private final SampleStat.MinMax minMax = new SampleStat.MinMax();
private long numSamples = 0;
private boolean extended = false;
/**
* Construct a sample statistics metric
* @param name of the metric
* @param description of the metric
* @param sampleName of the metric (e.g. "Ops")
* @param valueName of the metric (e.g. "Time", "Latency")
* @param extended create extended stats (stdev, min/max etc.) by default.
*/
public MutableStat(String name, String description,
String sampleName, String valueName, boolean extended) {
String ucName = StringUtils.capitalize(name);
String usName = StringUtils.capitalize(sampleName);
String uvName = StringUtils.capitalize(valueName);
String desc = StringUtils.uncapitalize(description);
String lsName = StringUtils.uncapitalize(sampleName);
String lvName = StringUtils.uncapitalize(valueName);
numInfo = info(ucName +"Num"+ usName, "Number of "+ lsName +" for "+ desc);
avgInfo = info(ucName +"Avg"+ uvName, "Average "+ lvName +" for "+ desc);
stdevInfo = info(ucName +"Stdev"+ uvName,
"Standard deviation of "+ lvName +" for "+ desc);
iMinInfo = info(ucName +"IMin"+ uvName,
"Interval min "+ lvName +" for "+ desc);
iMaxInfo = info(ucName + "IMax"+ uvName,
"Interval max "+ lvName +" for "+ desc);
minInfo = info(ucName +"Min"+ uvName, "Min "+ lvName +" for "+ desc);
maxInfo = info(ucName +"Max"+ uvName, "Max "+ lvName +" for "+ desc);
this.extended = extended;
}
/**
* Construct a snapshot stat metric with extended stat off by default
* @param name of the metric
* @param description of the metric
* @param sampleName of the metric (e.g. "Ops")
* @param valueName of the metric (e.g. "Time", "Latency")
*/
public MutableStat(String name, String description,
String sampleName, String valueName) {
this(name, description, sampleName, valueName, false);
}
/**
* Add a number of samples and their sum to the running stat
* @param numSamples number of samples
* @param sum of the samples
*/
public synchronized void add(long numSamples, long sum) {
intervalStat.add(numSamples, sum);
setChanged();
}
/**
* Add a snapshot to the metric
* @param value of the metric
*/
public synchronized void add(long value) {
intervalStat.add(value);
minMax.add(value);
setChanged();
}
public synchronized void snapshot(MetricsRecordBuilder builder, boolean all) {
if (all || changed()) {
numSamples += intervalStat.numSamples();
builder.addCounter(numInfo, numSamples)
.addGauge(avgInfo, lastStat().mean());
if (extended) {
builder.addGauge(stdevInfo, lastStat().stddev())
.addGauge(iMinInfo, lastStat().min())
.addGauge(iMaxInfo, lastStat().max())
.addGauge(minInfo, minMax.min())
.addGauge(maxInfo, minMax.max());
}
if (changed()) {
if (numSamples > 0) {
intervalStat.copyTo(prevStat);
intervalStat.reset();
}
clearChanged();
}
}
}
private SampleStat lastStat() {
return changed() ? intervalStat : prevStat;
}
/**
* Reset the all time min max of the metric
*/
public void resetMinMax() {
minMax.reset();
}
}

View File

@ -0,0 +1,66 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import java.util.Map;
import com.google.common.base.Joiner;
import com.google.common.collect.Maps;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Generates predictable and user-friendly unique names
*/
@InterfaceAudience.Private
public class UniqueNames {
static class Count {
final String baseName;
int value;
Count(String name, int value) {
baseName = name;
this.value = value;
}
}
static final Joiner joiner = Joiner.on('-');
final Map<String, Count> map = Maps.newHashMap();
public synchronized String uniqueName(String name) {
Count c = map.get(name);
if (c == null) {
c = new Count(name, 0);
map.put(name, c);
return name;
}
if (!c.baseName.equals(name)) c = new Count(name, 0);
do {
String newName = joiner.join(name, ++c.value);
Count c2 = map.get(newName);
if (c2 == null) {
map.put(newName, c);
return newName;
}
// handle collisons, assume to be rare cases,
// eg: people explicitly passed in name-\d+ names.
} while (true);
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A collection of library classes for implementing metrics sources
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -0,0 +1,349 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
<h1>Metrics 2.0</h1>
<ul id="toc">
<li><a href="#overview">Overview</a></li>
<li><a href="#gettingstarted">Getting Started</a></li>
<li><a href="#config">Configuration</a></li>
<li><a href="#filtering">Metrics Filtering</a></li>
<li><a href="#instrumentation">Metrics Instrumentation Strategy</a></li>
<li><a href="#migration">Migration from previous system</a></li>
</ul>
<h2><a name="overview">Overview</a></h2>
<p>This package provides a framework for metrics instrumentation
and publication.
</p>
<p>The framework provides a variety of ways to implement metrics
instrumentation easily via the simple
{@link org.apache.hadoop.metrics2.MetricsSource} interface
or the even simpler and more concise and declarative metrics annotations.
The consumers of metrics just need to implement the simple
{@link org.apache.hadoop.metrics2.MetricsSink} interface. Producers
register the metrics sources with a metrics system, while consumers
register the sinks. A default metrics system is provided to marshal
metrics from sources to sinks based on (per source/sink) configuration
options. All the metrics are also published and queryable via the
standard JMX MBean interface. This document targets the framework users.
Framework developers could also consult the
<a href="http://wiki.apache.org/hadoop/HADOOP-6728-MetricsV2">design
document</a> for architecture and implementation notes.
</p>
<h3>Sub-packages</h3>
<dl>
<dt><code>org.apache.hadoop.metrics2.annotation</code></dt>
<dd>Public annotation interfaces for simpler metrics instrumentation.
</dd>
<dt><code>org.apache.hadoop.metrics2.impl</code></dt>
<dd>Implementation classes of the framework for interface and/or
abstract classes defined in the top-level package. Sink plugin code
usually does not need to reference any class here.
</dd>
<dt> <code>org.apache.hadoop.metrics2.lib</code></dt>
<dd>Convenience classes for implementing metrics sources, including the
Mutable[{@link org.apache.hadoop.metrics2.lib.MutableGauge Gauge}*|
{@link org.apache.hadoop.metrics2.lib.MutableCounter Counter}*|
{@link org.apache.hadoop.metrics2.lib.MutableStat Stat}] and
{@link org.apache.hadoop.metrics2.lib.MetricsRegistry}.
</dd>
<dt> <code>org.apache.hadoop.metrics2.filter</code></dt>
<dd>Builtin metrics filter implementations include the
{@link org.apache.hadoop.metrics2.filter.GlobFilter} and
{@link org.apache.hadoop.metrics2.filter.RegexFilter}.
</dd>
<dt><code>org.apache.hadoop.metrics2.source</code></dt>
<dd>Builtin metrics source implementations including the
{@link org.apache.hadoop.metrics2.source.JvmMetrics}.
</dd>
<dt> <code>org.apache.hadoop.metrics2.sink</code></dt>
<dd>Builtin metrics sink implementations including the
{@link org.apache.hadoop.metrics2.sink.FileSink}.
</dd>
<dt> <code>org.apache.hadoop.metrics2.util</code></dt>
<dd>General utilities for implementing metrics sinks etc., including the
{@link org.apache.hadoop.metrics2.util.MetricsCache}.
</dd>
</dl>
<h2><a name="gettingstarted">Getting started</a></h2>
<h3>Implementing metrics sources</h3>
<table width="99%" border="1" cellspacing="0" cellpadding="4">
<tbody>
<tr>
<th>Using annotations</th><th>Using MetricsSource interface</th>
</tr>
<tr><td>
<pre>
&#064;Metrics(context="MyContext")
class MyStat {
&#064;Metric("My metric description")
public int getMyMetric() {
return 42;
}
}</pre></td><td>
<pre>
class MyStat implements MetricsSource {
&#064;Override
public void getMetrics(MetricsCollector collector, boolean all) {
collector.addRecord("MyStat")
.setContext("MyContext")
.addGauge(info("MyMetric", "My metric description"), 42);
}
}
</pre>
</td>
</tr>
</tbody>
</table>
<p>In this example we introduced the following:</p>
<dl>
<dt><em>&#064;Metrics</em></dt>
<dd>The {@link org.apache.hadoop.metrics2.annotation.Metrics} annotation is
used to indicate that the class is a metrics source.
</dd>
<dt><em>MyContext</em></dt>
<dd>The optional context name typically identifies either the
application, or a group of modules within an application or
library.
</dd>
<dt><em>MyStat</em></dt>
<dd>The class name is used (by default, or specified by name=value parameter
in the Metrics annotation) as the metrics record name for
which a set of metrics are to be reported. For example, you could have a
record named "CacheStat" for reporting a number of statistics relating to
the usage of some cache in your application.</dd>
<dt><em>&#064;Metric</em></dt>
<dd>The {@link org.apache.hadoop.metrics2.annotation.Metric} annotation
identifies a particular metric, which in this case, is the
result of the method call getMyMetric of the "gauge" (default) type,
which means it can vary in both directions, compared with a "counter"
type, which can only increase or stay the same. The name of the metric
is "MyMetric" (inferred from getMyMetric method name by default.) The 42
here is the value of the metric which can be substituted with any valid
java expressions.
</dd>
</dl>
<p>Note, the {@link org.apache.hadoop.metrics2.MetricsSource} interface is
more verbose but more flexible,
allowing generated metrics names and multiple records. In fact, the
annotation interface is implemented with the MetricsSource interface
internally.</p>
<h3>Implementing metrics sinks</h3>
<pre>
public class MySink implements MetricsSink {
public void putMetrics(MetricsRecord record) {
System.out.print(record);
}
public void init(SubsetConfiguration conf) {}
public void flush() {}
}</pre>
<p>In this example there are three additional concepts:</p>
<dl>
<dt><em>record</em></dt>
<dd>This object corresponds to the record created in metrics sources
e.g., the "MyStat" in previous example.
</dd>
<dt><em>conf</em></dt>
<dd>The configuration object for the sink instance with prefix removed.
So you can get any sink specific configuration using the usual
get* method.
</dd>
<dt><em>flush</em></dt>
<dd>This method is called for each update cycle, which may involve
more than one record. The sink should try to flush any buffered metrics
to its backend upon the call. But it's not required that the
implementation is synchronous.
</dd>
</dl>
<p>In order to make use our <code>MyMetrics</code> and <code>MySink</code>,
they need to be hooked up to a metrics system. In this case (and most
cases), the <code>DefaultMetricsSystem</code> would suffice.
</p>
<pre>
DefaultMetricsSystem.initialize("test"); // called once per application
DefaultMetricsSystem.register(new MyStat());</pre>
<h2><a name="config">Metrics system configuration</a></h2>
<p>Sinks are usually specified in a configuration file, say,
"hadoop-metrics2-test.properties", as:
</p>
<pre>
test.sink.mysink0.class=com.example.hadoop.metrics.MySink</pre>
<p>The configuration syntax is:</p>
<pre>
[prefix].[source|sink|jmx|].[instance].[option]</pre>
<p>In the previous example, <code>test</code> is the prefix and
<code>mysink0</code> is an instance name.
<code>DefaultMetricsSystem</code> would try to load
<code>hadoop-metrics2-[prefix].properties</code> first, and if not found,
try the default <code>hadoop-metrics2.properties</code> in the class path.
Note, the <code>[instance]</code> is an arbitrary name to uniquely
identify a particular sink instance. The asterisk (<code>*</code>) can be
used to specify default options.
</p>
<p>Consult the metrics instrumentation in jvm, rpc, hdfs and mapred, etc.
for more examples.
</p>
<h2><a name="filtering">Metrics Filtering</a></h2>
<p>One of the features of the default metrics system is metrics filtering
configuration by source, context, record/tags and metrics. The least
expensive way to filter out metrics would be at the source level, e.g.,
filtering out source named "MyMetrics". The most expensive way would be
per metric filtering.
</p>
<p>Here are some examples:</p>
<pre>
test.sink.file0.class=org.apache.hadoop.metrics2.sink.FileSink
test.sink.file0.context=foo</pre>
<p>In this example, we configured one sink instance that would
accept metrics from context <code>foo</code> only.
</p>
<pre>
*.source.filter.class=org.apache.hadoop.metrics2.filter.GlobFilter
test.*.source.filter.include=foo
test.*.source.filter.exclude=bar</pre>
<p>In this example, we specify a source filter that includes source
<code>foo</code> and excludes <code>bar</code>. When only include
patterns are specified, the filter operates in the white listing mode,
where only matched sources are included. Likewise, when only exclude
patterns are specified, only matched sources are excluded. Sources that
are not matched in either patterns are included as well when both patterns
are present. Note, the include patterns have precedence over the exclude
patterns.
</p>
<p>Similarly, you can specify the <code>record.filter</code> and
<code>metrics.filter</code> options, which operate at record and metric
level, respectively. Filters can be combined to optimize
the filtering efficiency.</p>
<h2><a name="instrumentation">Metrics instrumentation strategy</a></h2>
In previous examples, we showed a minimal example to use the
metrics framework. In a larger system (like Hadoop) that allows
custom metrics instrumentation, we recommend the following strategy:
<pre>
&#064;Metrics(about="My metrics description", context="MyContext")
class MyMetrics extends MyInstrumentation {
&#064;Metric("My gauge description") MutableGaugeInt gauge0;
&#064;Metric("My counter description") MutableCounterLong counter0;
&#064;Metric("My rate description") MutableRate rate0;
&#064;Override public void setGauge0(int value) { gauge0.set(value); }
&#064;Override public void incrCounter0() { counter0.incr(); }
&#064;Override public void addRate0(long elapsed) { rate0.add(elapsed); }
}
</pre>
Note, in this example we introduced the following:
<dl>
<dt><em>MyInstrumentation</em></dt>
<dd>This is usually an abstract class (or interface) to define an
instrumentation interface (incrCounter0 etc.) that allows different
implementations. This could be a mechanism to allow different metrics
systems to be used at runtime via configuration.
</dd>
<dt><em>Mutable[Gauge*|Counter*|Rate]</em></dt>
<dd>These are library classes to manage mutable metrics for
implementations of metrics sources. They produce immutable gauge and
counters (Metric[Gauge*|Counter*]) for downstream consumption (sinks)
upon <code>snapshot</code>. The <code>MutableRate</code>
in particular, provides a way to measure latency and throughput of an
operation. In this particular case, it produces a long counter
"Rate0NumOps" and double gauge "Rate0AvgTime" when snapshotted.
</dd>
</dl>
<h2><a name="migration">Migration from previous system</a></h2>
<p>Users of the previous metrics system would notice the lack of
<code>context</code> prefix in the configuration examples. The new
metrics system decouples the concept for context (for grouping) with the
implementation where a particular context object does the updating and
publishing of metrics, which causes problems when you want to have a
single context to be consumed by multiple backends. You would also have to
configure an implementation instance per context, even if you have a
backend that can handle multiple contexts (file, gangalia etc.):
</p>
<table width="99%" border="1" cellspacing="0" cellpadding="4">
<tbody>
<tr>
<th width="40%">Before</th><th>After</th>
</tr>
<tr>
<td><pre>
context1.class=org.hadoop.metrics.file.FileContext
context2.class=org.hadoop.metrics.file.FileContext
...
contextn.class=org.hadoop.metrics.file.FileContext</pre>
</td>
<td><pre>
myprefix.sink.file.class=org.hadoop.metrics2.sink.FileSink</pre>
</td>
</tr>
</tbody>
</table>
<p>In the new metrics system, you can simulate the previous behavior by
using the context option in the sink options like the following:
</p>
<table width="99%" border="1" cellspacing="0" cellpadding="4">
<tbody>
<tr>
<th width="40%">Before</th><th>After</th>
</tr>
<tr>
<td><pre>
context0.class=org.hadoop.metrics.file.FileContext
context0.fileName=context0.out
context1.class=org.hadoop.metrics.file.FileContext
context1.fileName=context1.out
...
contextn.class=org.hadoop.metrics.file.FileContext
contextn.fileName=contextn.out</pre>
</td>
<td><pre>
myprefix.sink.*.class=org.apache.hadoop.metrics2.sink.FileSink
myprefix.sink.file0.context=context0
myprefix.sink.file0.filename=context1.out
myprefix.sink.file1.context=context1
myprefix.sink.file1.filename=context1.out
...
myprefix.sink.filen.context=contextn
myprefix.sink.filen.filename=contextn.out</pre>
</td>
</tr>
</tbody>
</table>
<p>to send metrics of a particular context to a particular backend. Note,
<code>myprefix</code> is an arbitrary prefix for configuration groupings,
typically they are the name of a particular process
(<code>namenode</code>, <code>jobtracker</code>, etc.)
</p>
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
package org.apache.hadoop.metrics2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -0,0 +1,86 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.sink;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.PrintWriter;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsSink;
import org.apache.hadoop.metrics2.MetricsTag;
/**
* A metrics sink that writes to a file
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class FileSink implements MetricsSink {
private static final String FILENAME_KEY = "filename";
private PrintWriter writer;
@Override
public void init(SubsetConfiguration conf) {
String filename = conf.getString(FILENAME_KEY);
try {
writer = filename == null
? new PrintWriter(System.out)
: new PrintWriter(new FileWriter(new File(filename), true));
}
catch (Exception e) {
throw new MetricsException("Error creating "+ filename, e);
}
}
@Override
public void putMetrics(MetricsRecord record) {
writer.print(record.timestamp());
writer.print(" ");
writer.print(record.context());
writer.print(".");
writer.print(record.name());
String separator = ": ";
for (MetricsTag tag : record.tags()) {
writer.print(separator);
separator = ", ";
writer.print(tag.name());
writer.print("=");
writer.print(tag.value());
}
for (AbstractMetric metric : record.metrics()) {
writer.print(separator);
separator = ", ";
writer.print(metric.name());
writer.print("=");
writer.print(metric.value());
}
writer.println();
}
@Override
public void flush() {
writer.flush();
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Builtin metrics sinks
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
package org.apache.hadoop.metrics2.sink;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.source;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* JVM related metrics. Mostly used by various servers as part of the metrics
* they export.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class JvmMetrics {
// placeholder for javadoc to prevent broken links, until
// HADOOP-6920
}

View File

@ -0,0 +1,101 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.util;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Additional helpers (besides guava Preconditions) for programming by contract
*/
@InterfaceAudience.Private
public class Contracts {
private Contracts() {}
/**
* Check an argument for false conditions
* @param <T> type of the argument
* @param arg the argument to check
* @param expression the boolean expression for the condition
* @param msg the error message if {@code expression} is false
* @return the argument for convenience
*/
public static <T> T checkArg(T arg, boolean expression, Object msg) {
if (!expression) {
throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
}
return arg;
}
/**
* Check an argument for false conditions
* @param arg the argument to check
* @param expression the boolean expression for the condition
* @param msg the error message if {@code expression} is false
* @return the argument for convenience
*/
public static int checkArg(int arg, boolean expression, Object msg) {
if (!expression) {
throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
}
return arg;
}
/**
* Check an argument for false conditions
* @param arg the argument to check
* @param expression the boolean expression for the condition
* @param msg the error message if {@code expression} is false
* @return the argument for convenience
*/
public static long checkArg(long arg, boolean expression, Object msg) {
if (!expression) {
throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
}
return arg;
}
/**
* Check an argument for false conditions
* @param arg the argument to check
* @param expression the boolean expression for the condition
* @param msg the error message if {@code expression} is false
* @return the argument for convenience
*/
public static float checkArg(float arg, boolean expression, Object msg) {
if (!expression) {
throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
}
return arg;
}
/**
* Check an argument for false conditions
* @param arg the argument to check
* @param expression the boolean expression for the condition
* @param msg the error message if {@code expression} is false
* @return the argument for convenience
*/
public static double checkArg(double arg, boolean expression, Object msg) {
if (!expression) {
throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
}
return arg;
}
}

View File

@ -0,0 +1,88 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.util;
import java.lang.management.ManagementFactory;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
/**
* This util class provides a method to register an MBean using
* our standard naming convention as described in the doc
* for {link {@link #register(String, String, Object)}
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class MBeans {
private static final Log LOG = LogFactory.getLog(MBeans.class);
/**
* Register the MBean using our standard MBeanName format
* "hadoop:service=<serviceName>,name=<nameName>"
* Where the <serviceName> and <nameName> are the supplied parameters
*
* @param serviceName
* @param nameName
* @param theMbean - the MBean to register
* @return the named used to register the MBean
*/
static public ObjectName register(String serviceName, String nameName,
Object theMbean) {
final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName name = getMBeanName(serviceName, nameName);
try {
mbs.registerMBean(theMbean, name);
LOG.debug("Registered "+ name);
return name;
} catch (Exception e) {
LOG.warn("Error registering "+ name, e);
}
return null;
}
static public void unregister(ObjectName mbeanName) {
LOG.debug("Unregistering "+ mbeanName);
final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
if (mbeanName == null) {
LOG.debug("Stacktrace: ", new Throwable());
return;
}
try {
mbs.unregisterMBean(mbeanName);
} catch (Exception e) {
LOG.warn("Error unregistering "+ mbeanName, e);
}
}
static private ObjectName getMBeanName(String serviceName, String nameName) {
ObjectName name = null;
String nameStr = "Hadoop:service="+ serviceName +",name="+ nameName;
try {
name = DefaultMetricsSystem.newMBeanName(nameStr);
} catch (Exception e) {
LOG.warn("Error creating MBean object name: "+ nameStr, e);
}
return name;
}
}

View File

@ -0,0 +1,175 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.util;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import com.google.common.base.Objects;
import com.google.common.collect.Maps;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;
/**
* A metrics cache for sinks that don't support sparse updates.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MetricsCache {
static final Log LOG = LogFactory.getLog(MetricsCache.class);
static final int MAX_RECS_PER_NAME_DEFAULT = 1000;
private final Map<String, RecordCache> map = Maps.newHashMap();
private final int maxRecsPerName;
class RecordCache
extends LinkedHashMap<Collection<MetricsTag>, Record> {
private static final long serialVersionUID = 1L;
private boolean gotOverflow = false;
@Override
protected boolean removeEldestEntry(Map.Entry<Collection<MetricsTag>,
Record> eldest) {
boolean overflow = size() > maxRecsPerName;
if (overflow && !gotOverflow) {
LOG.warn("Metrics cache overflow at "+ size() +" for "+ eldest);
gotOverflow = true;
}
return overflow;
}
}
/**
* Cached record
*/
public static class Record {
final Map<String, String> tags = Maps.newHashMap();
final Map<String, Number> metrics = Maps.newHashMap();
/**
* Lookup a tag value
* @param key name of the tag
* @return the tag value
*/
public String getTag(String key) {
return tags.get(key);
}
/**
* Lookup a metric value
* @param key name of the metric
* @return the metric value
*/
public Number getMetric(String key) {
return metrics.get(key);
}
/**
* @return the entry set of the tags of the record
*/
public Set<Map.Entry<String, String>> tags() {
return tags.entrySet();
}
/**
* @return entry set of the metrics of the record
*/
public Set<Map.Entry<String, Number>> metrics() {
return metrics.entrySet();
}
@Override public String toString() {
return Objects.toStringHelper(this)
.add("tags", tags).add("metrics", metrics)
.toString();
}
}
public MetricsCache() {
this(MAX_RECS_PER_NAME_DEFAULT);
}
/**
* Construct a metrics cache
* @param maxRecsPerName limit of the number records per record name
*/
public MetricsCache(int maxRecsPerName) {
this.maxRecsPerName = maxRecsPerName;
}
/**
* Update the cache and return the current cached record
* @param mr the update record
* @param includingTags cache tag values (for later lookup by name) if true
* @return the updated cache record
*/
public Record update(MetricsRecord mr, boolean includingTags) {
String name = mr.name();
RecordCache recordCache = map.get(name);
if (recordCache == null) {
recordCache = new RecordCache();
map.put(name, recordCache);
}
Collection<MetricsTag> tags = mr.tags();
Record record = recordCache.get(tags);
if (record == null) {
record = new Record();
recordCache.put(tags, record);
}
for (AbstractMetric m : mr.metrics()) {
record.metrics.put(m.name(), m.value());
}
if (includingTags) {
// mostly for some sinks that include tags as part of a dense schema
for (MetricsTag t : mr.tags()) {
record.tags.put(t.name(), t.value());
}
}
return record;
}
/**
* Update the cache and return the current cache record
* @param mr the update record
* @return the updated cache record
*/
public Record update(MetricsRecord mr) {
return update(mr, false);
}
/**
* Get the cached record
* @param name of the record
* @param tags of the record
* @return the cached record or null
*/
public Record get(String name, Collection<MetricsTag> tags) {
RecordCache rc = map.get(name);
if (rc == null) return null;
return rc.get(tags);
}
}

View File

@ -0,0 +1,167 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.util;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Helper to compute running sample stats
*/
@InterfaceAudience.Private
public class SampleStat {
private final MinMax minmax = new MinMax();
private long numSamples = 0;
private double a0, a1, s0, s1;
/**
* Construct a new running sample stat
*/
public SampleStat() {
a0 = s0 = 0.0;
}
public void reset() {
numSamples = 0;
a0 = s0 = 0.0;
minmax.reset();
}
// We want to reuse the object, sometimes.
void reset(long numSamples, double a0, double a1, double s0, double s1,
MinMax minmax) {
this.numSamples = numSamples;
this.a0 = a0;
this.a1 = a1;
this.s0 = s0;
this.s1 = s1;
this.minmax.reset(minmax);
}
/**
* Copy the values to other (saves object creation and gc.)
* @param other the destination to hold our values
*/
public void copyTo(SampleStat other) {
other.reset(numSamples, a0, a1, s0, s1, minmax);
}
/**
* Add a sample the running stat.
* @param x the sample number
* @return self
*/
public SampleStat add(double x) {
minmax.add(x);
return add(1, x);
}
/**
* Add some sample and a partial sum to the running stat.
* Note, min/max is not evaluated using this method.
* @param nSamples number of samples
* @param x the partial sum
* @return self
*/
public SampleStat add(long nSamples, double x) {
numSamples += nSamples;
if (numSamples == 1) {
a0 = a1 = x;
s0 = 0.0;
}
else {
// The Welford method for numerical stability
a1 = a0 + (x - a0) / numSamples;
s1 = s0 + (x - a0) * (x - a1);
a0 = a1;
s0 = s1;
}
return this;
}
/**
* @return the total number of samples
*/
public long numSamples() {
return numSamples;
}
/**
* @return the arithmetic mean of the samples
*/
public double mean() {
return numSamples > 0 ? a1 : 0.0;
}
/**
* @return the variance of the samples
*/
public double variance() {
return numSamples > 1 ? s1 / (numSamples - 1) : 0.0;
}
/**
* @return the standard deviation of the samples
*/
public double stddev() {
return Math.sqrt(variance());
}
/**
* @return the minimum value of the samples
*/
public double min() {
return minmax.min();
}
/**
* @return the maximum value of the samples
*/
public double max() {
return minmax.max();
}
/**
* Helper to keep running min/max
*/
@SuppressWarnings("PublicInnerClass")
public static class MinMax {
private double min = Double.MAX_VALUE;
private double max = Double.MIN_VALUE;
public void add(double value) {
if (value > max) max = value;
if (value < min) min = value;
}
public double min() { return min; }
public double max() { return max; }
public void reset() {
min = Double.MAX_VALUE;
max = Double.MIN_VALUE;
}
public void reset(MinMax other) {
min = other.min();
max = other.max();
}
}
}

View File

@ -0,0 +1,72 @@
/*
* Util.java
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.util;
import java.net.InetSocketAddress;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Helpers to handle server addresses
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class Servers {
/**
* This class is not intended to be instantiated
*/
private Servers() {}
/**
* Parses a space and/or comma separated sequence of server specifications
* of the form <i>hostname</i> or <i>hostname:port</i>. If
* the specs string is null, defaults to localhost:defaultPort.
*
* @param specs server specs (see description)
* @param defaultPort the default port if not specified
* @return a list of InetSocketAddress objects.
*/
public static List<InetSocketAddress> parse(String specs, int defaultPort) {
List<InetSocketAddress> result = Lists.newArrayList();
if (specs == null) {
result.add(new InetSocketAddress("localhost", defaultPort));
}
else {
String[] specStrings = specs.split("[ ,]+");
for (String specString : specStrings) {
int colon = specString.indexOf(':');
if (colon < 0 || colon == specString.length() - 1) {
result.add(new InetSocketAddress(specString, defaultPort));
} else {
String hostname = specString.substring(0, colon);
int port = Integer.parseInt(specString.substring(colon+1));
result.add(new InetSocketAddress(hostname, port));
}
}
}
return result;
}
}

View File

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* General helpers for implementing source and sinks
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
package org.apache.hadoop.metrics2.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -0,0 +1,144 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.filter;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.configuration.SubsetConfiguration;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.impl.ConfigBuilder;
import static org.apache.hadoop.metrics2.lib.Interns.*;
public class TestPatternFilter {
/**
* Filters should default to accept
*/
@Test public void emptyConfigShouldAccept() {
SubsetConfiguration empty = new ConfigBuilder().subset("");
shouldAccept(empty, "anything");
shouldAccept(empty, Arrays.asList(tag("key", "desc", "value")));
}
/**
* Filters should handle white-listing correctly
*/
@Test public void includeOnlyShouldOnlyIncludeMatched() {
SubsetConfiguration wl = new ConfigBuilder()
.add("p.include", "foo")
.add("p.include.tags", "foo:f").subset("p");
shouldAccept(wl, "foo");
shouldAccept(wl, Arrays.asList(tag("bar", "", ""),
tag("foo", "", "f")));
shouldReject(wl, "bar");
shouldReject(wl, Arrays.asList(tag("bar", "", "")));
shouldReject(wl, Arrays.asList(tag("foo", "", "boo")));
}
/**
* Filters should handle black-listing correctly
*/
@Test public void excludeOnlyShouldOnlyExcludeMatched() {
SubsetConfiguration bl = new ConfigBuilder()
.add("p.exclude", "foo")
.add("p.exclude.tags", "foo:f").subset("p");
shouldAccept(bl, "bar");
shouldAccept(bl, Arrays.asList(tag("bar", "", "")));
shouldReject(bl, "foo");
shouldReject(bl, Arrays.asList(tag("bar", "", ""),
tag("foo", "", "f")));
}
/**
* Filters should accepts unmatched item when both include and
* exclude patterns are present.
*/
@Test public void shouldAcceptUnmatchedWhenBothAreConfigured() {
SubsetConfiguration c = new ConfigBuilder()
.add("p.include", "foo")
.add("p.include.tags", "foo:f")
.add("p.exclude", "bar")
.add("p.exclude.tags", "bar:b").subset("p");
shouldAccept(c, "foo");
shouldAccept(c, Arrays.asList(tag("foo", "", "f")));
shouldReject(c, "bar");
shouldReject(c, Arrays.asList(tag("bar", "", "b")));
shouldAccept(c, "foobar");
shouldAccept(c, Arrays.asList(tag("foobar", "", "")));
}
/**
* Include patterns should take precedence over exclude patterns
*/
@Test public void includeShouldOverrideExclude() {
SubsetConfiguration c = new ConfigBuilder()
.add("p.include", "foo")
.add("p.include.tags", "foo:f")
.add("p.exclude", "foo")
.add("p.exclude.tags", "foo:f").subset("p");
shouldAccept(c, "foo");
shouldAccept(c, Arrays.asList(tag("foo", "", "f")));
}
static void shouldAccept(SubsetConfiguration conf, String s) {
assertTrue("accepts "+ s, newGlobFilter(conf).accepts(s));
assertTrue("accepts "+ s, newRegexFilter(conf).accepts(s));
}
static void shouldAccept(SubsetConfiguration conf, List<MetricsTag> tags) {
assertTrue("accepts "+ tags, newGlobFilter(conf).accepts(tags));
assertTrue("accepts "+ tags, newRegexFilter(conf).accepts(tags));
}
static void shouldReject(SubsetConfiguration conf, String s) {
assertTrue("rejects "+ s, !newGlobFilter(conf).accepts(s));
assertTrue("rejects "+ s, !newRegexFilter(conf).accepts(s));
}
static void shouldReject(SubsetConfiguration conf, List<MetricsTag> tags) {
assertTrue("rejects "+ tags, !newGlobFilter(conf).accepts(tags));
assertTrue("rejects "+ tags, !newRegexFilter(conf).accepts(tags));
}
/**
* Create a new glob filter with a config object
* @param conf the config object
* @return the filter
*/
public static GlobFilter newGlobFilter(SubsetConfiguration conf) {
GlobFilter f = new GlobFilter();
f.init(conf);
return f;
}
/**
* Create a new regex filter with a config object
* @param conf the config object
* @return the filter
*/
public static RegexFilter newRegexFilter(SubsetConfiguration conf) {
RegexFilter f = new RegexFilter();
f.init(conf);
return f;
}
}

View File

@ -0,0 +1,74 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration.SubsetConfiguration;
/**
* Helper class for building configs, mostly used in tests
*/
public class ConfigBuilder {
/** The built config */
public final PropertiesConfiguration config;
/**
* Default constructor
*/
public ConfigBuilder() {
config = new PropertiesConfiguration();
}
/**
* Add a property to the config
* @param key of the property
* @param value of the property
* @return self
*/
public ConfigBuilder add(String key, Object value) {
config.addProperty(key, value);
return this;
}
/**
* Save the config to a file
* @param filename to save
* @return self
* @throws RuntimeException
*/
public ConfigBuilder save(String filename) {
try {
config.save(filename);
}
catch (Exception e) {
throw new RuntimeException("Error saving config", e);
}
return this;
}
/**
* Return a subset configuration (so getParent() can be used.)
* @param prefix of the subset
* @return the subset config
*/
public SubsetConfiguration subset(String prefix) {
return new SubsetConfiguration(config, prefix, ".");
}
}

View File

@ -0,0 +1,67 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.io.PrintStream;
import java.util.Iterator;
import static org.junit.Assert.*;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration;
/**
* Helpers for config tests and debugging
*/
class ConfigUtil {
static void dump(Configuration c) {
dump(null, c, System.out);
}
static void dump(String header, Configuration c) {
dump(header, c, System.out);
}
static void dump(String header, Configuration c, PrintStream out) {
PropertiesConfiguration p = new PropertiesConfiguration();
p.copy(c);
if (header != null) {
out.println(header);
}
try { p.save(out); }
catch (Exception e) {
throw new RuntimeException("Error saving config", e);
}
}
static void assertEq(Configuration expected, Configuration actual) {
// Check that the actual config contains all the properties of the expected
for (Iterator<?> it = expected.getKeys(); it.hasNext();) {
String key = (String) it.next();
assertTrue("actual should contain "+ key, actual.containsKey(key));
assertEquals("value of "+ key, expected.getProperty(key),
actual.getProperty(key));
}
// Check that the actual config has no extra properties
for (Iterator<?> it = actual.getKeys(); it.hasNext();) {
String key = (String) it.next();
assertTrue("expected should contain "+ key, expected.containsKey(key));
}
}
}

View File

@ -0,0 +1,29 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
/**
* Helper to create metrics list for testing
*/
class MetricsLists {
static MetricsRecordBuilderImpl builder(String name) {
return new MetricsCollectorImpl().addRecord(name);
}
}

View File

@ -0,0 +1,56 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.configuration.SubsetConfiguration;
import static org.apache.hadoop.metrics2.filter.TestPatternFilter.*;
import static org.apache.hadoop.metrics2.lib.Interns.*;
public class TestMetricsCollectorImpl {
@Test public void recordBuilderShouldNoOpIfFiltered() {
SubsetConfiguration fc = new ConfigBuilder()
.add("p.exclude", "foo").subset("p");
MetricsCollectorImpl mb = new MetricsCollectorImpl();
mb.setRecordFilter(newGlobFilter(fc));
MetricsRecordBuilderImpl rb = mb.addRecord("foo");
rb.tag(info("foo", ""), "value").addGauge(info("g0", ""), 1);
assertEquals("no tags", 0, rb.tags().size());
assertEquals("no metrics", 0, rb.metrics().size());
assertNull("null record", rb.getRecord());
assertEquals("no records", 0, mb.getRecords().size());
}
@Test public void testPerMetricFiltering() {
SubsetConfiguration fc = new ConfigBuilder()
.add("p.exclude", "foo").subset("p");
MetricsCollectorImpl mb = new MetricsCollectorImpl();
mb.setMetricFilter(newGlobFilter(fc));
MetricsRecordBuilderImpl rb = mb.addRecord("foo");
rb.tag(info("foo", ""), "").addCounter(info("c0", ""), 0)
.addGauge(info("foo", ""), 1);
assertEquals("1 tag", 1, rb.tags().size());
assertEquals("1 metric", 1, rb.metrics().size());
assertEquals("expect foo tag", "foo", rb.tags().get(0).name());
assertEquals("expect c0", "c0", rb.metrics().get(0).name());
}
}

View File

@ -0,0 +1,150 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.Map;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import static org.apache.hadoop.metrics2.impl.ConfigUtil.*;
/**
* Test metrics configuration
*/
public class TestMetricsConfig {
static final Log LOG = LogFactory.getLog(TestMetricsConfig.class);
/**
* Common use cases
* @throws Exception
*/
@Test public void testCommon() throws Exception {
String filename = getTestFilename("test-metrics2");
new ConfigBuilder()
.add("*.foo", "default foo")
.add("p1.*.bar", "p1 default bar")
.add("p1.t1.*.bar", "p1.t1 default bar")
.add("p1.t1.i1.name", "p1.t1.i1.name")
.add("p1.t1.42.bar", "p1.t1.42.bar")
.add("p1.t2.i1.foo", "p1.t2.i1.foo")
.add("p2.*.foo", "p2 default foo")
.save(filename);
MetricsConfig mc = MetricsConfig.create("p1", filename);
LOG.debug("mc:"+ mc);
Configuration expected = new ConfigBuilder()
.add("*.bar", "p1 default bar")
.add("t1.*.bar", "p1.t1 default bar")
.add("t1.i1.name", "p1.t1.i1.name")
.add("t1.42.bar", "p1.t1.42.bar")
.add("t2.i1.foo", "p1.t2.i1.foo")
.config;
assertEq(expected, mc);
testInstances(mc);
}
private void testInstances(MetricsConfig c) throws Exception {
Map<String, MetricsConfig> map = c.getInstanceConfigs("t1");
Map<String, MetricsConfig> map2 = c.getInstanceConfigs("t2");
assertEquals("number of t1 instances", 2, map.size());
assertEquals("number of t2 instances", 1, map2.size());
assertTrue("contains t1 instance i1", map.containsKey("i1"));
assertTrue("contains t1 instance 42", map.containsKey("42"));
assertTrue("contains t2 instance i1", map2.containsKey("i1"));
MetricsConfig t1i1 = map.get("i1");
MetricsConfig t1i42 = map.get("42");
MetricsConfig t2i1 = map2.get("i1");
LOG.debug("--- t1 instance i1:"+ t1i1);
LOG.debug("--- t1 instance 42:"+ t1i42);
LOG.debug("--- t2 instance i1:"+ t2i1);
Configuration t1expected1 = new ConfigBuilder()
.add("name", "p1.t1.i1.name").config;
Configuration t1expected42 = new ConfigBuilder()
.add("bar", "p1.t1.42.bar").config;
Configuration t2expected1 = new ConfigBuilder()
.add("foo", "p1.t2.i1.foo").config;
assertEq(t1expected1, t1i1);
assertEq(t1expected42, t1i42);
assertEq(t2expected1, t2i1);
LOG.debug("asserting foo == default foo");
// Check default lookups
assertEquals("value of foo in t1 instance i1", "default foo",
t1i1.getString("foo"));
assertEquals("value of bar in t1 instance i1", "p1.t1 default bar",
t1i1.getString("bar"));
assertEquals("value of foo in t1 instance 42", "default foo",
t1i42.getString("foo"));
assertEquals("value of foo in t2 instance i1", "p1.t2.i1.foo",
t2i1.getString("foo"));
assertEquals("value of bar in t2 instance i1", "p1 default bar",
t2i1.getString("bar"));
}
/**
* Should throw if missing config files
*/
@Test public void testMissingFiles() {
try {
MetricsConfig.create("JobTracker", "non-existent.properties");
}
catch (MetricsConfigException e) {
assertTrue("expected the 'cannot locate configuration' exception",
e.getMessage().startsWith("Cannot locate configuration"));
return;
}
fail("should've thrown");
}
/**
* Test the config file load order
* @throws Exception
*/
@Test public void testLoadFirst() throws Exception {
String filename = getTestFilename("hadoop-metrics2-p1");
new ConfigBuilder().add("p1.foo", "p1foo").save(filename);
MetricsConfig mc = MetricsConfig.create("p1");
MetricsConfig mc2 = MetricsConfig.create("p1", "na1", "na2", filename);
Configuration expected = new ConfigBuilder().add("foo", "p1foo").config;
assertEq(expected, mc);
assertEq(expected, mc2);
}
/**
* Return a test filename in the class path
* @param basename
* @return the filename
*/
public static String getTestFilename(String basename) {
return "build/classes/"+ basename +".properties";
}
}

View File

@ -0,0 +1,165 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.runners.MockitoJUnitRunner;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import com.google.common.collect.Iterables;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsException;
import static org.apache.hadoop.test.MoreAsserts.*;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsSink;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.MetricsSystem;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.annotation.*;
import static org.apache.hadoop.metrics2.lib.Interns.*;
import org.apache.hadoop.metrics2.lib.MetricsRegistry;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableRate;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
/**
* Test the MetricsSystemImpl class
*/
@RunWith(MockitoJUnitRunner.class)
public class TestMetricsSystemImpl {
private static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class);
@Captor private ArgumentCaptor<MetricsRecord> r1;
@Captor private ArgumentCaptor<MetricsRecord> r2;
private static String hostname = MetricsSystemImpl.getHostname();
public static class TestSink implements MetricsSink {
@Override public void putMetrics(MetricsRecord record) {
LOG.debug(record);
}
@Override public void flush() {}
@Override public void init(SubsetConfiguration conf) {
LOG.debug(MetricsConfig.toString(conf));
}
}
@Test public void testInitFirst() throws Exception {
ConfigBuilder cb = new ConfigBuilder().add("*.period", 8)
//.add("test.sink.plugin.urls", getPluginUrlsAsString())
.add("test.sink.test.class", TestSink.class.getName())
.add("test.*.source.filter.exclude", "s0")
.add("test.source.s1.metric.filter.exclude", "X*")
.add("test.sink.sink1.metric.filter.exclude", "Y*")
.add("test.sink.sink2.metric.filter.exclude", "Y*")
.save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
MetricsSystemImpl ms = new MetricsSystemImpl("Test");
ms.start();
ms.register("s0", "s0 desc", new TestSource("s0rec"));
TestSource s1 = ms.register("s1", "s1 desc", new TestSource("s1rec"));
s1.c1.incr();
s1.xxx.incr();
s1.g1.set(2);
s1.yyy.incr(2);
s1.s1.add(0);
MetricsSink sink1 = mock(MetricsSink.class);
MetricsSink sink2 = mock(MetricsSink.class);
ms.registerSink("sink1", "sink1 desc", sink1);
ms.registerSink("sink2", "sink2 desc", sink2);
ms.onTimerEvent(); // trigger something interesting
ms.stop();
verify(sink1, times(2)).putMetrics(r1.capture());
List<MetricsRecord> mr1 = r1.getAllValues();
verify(sink2, times(2)).putMetrics(r2.capture());
List<MetricsRecord> mr2 = r2.getAllValues();
checkMetricsRecords(mr1);
assertEquals("output", mr1, mr2);
}
@Test public void testRegisterDups() {
MetricsSystem ms = new MetricsSystemImpl();
TestSource ts1 = new TestSource("ts1");
TestSource ts2 = new TestSource("ts2");
ms.register("ts1", "", ts1);
MetricsSource s1 = ms.getSource("ts1");
assertNotNull(s1);
// should work when metrics system is not started
ms.register("ts1", "", ts2);
MetricsSource s2 = ms.getSource("ts1");
assertNotNull(s2);
assertNotSame(s1, s2);
}
@Test(expected=MetricsException.class) public void testRegisterDupError() {
MetricsSystem ms = new MetricsSystemImpl("test");
TestSource ts = new TestSource("ts");
ms.register(ts);
ms.register(ts);
}
private void checkMetricsRecords(List<MetricsRecord> recs) {
LOG.debug(recs);
MetricsRecord r = recs.get(0);
assertEquals("name", "s1rec", r.name());
assertEquals("tags", new MetricsTag[] {
tag(MsInfo.Context, "test"),
tag(MsInfo.Hostname, hostname)}, r.tags());
assertEquals("metrics", MetricsLists.builder("")
.addCounter(info("C1", "C1 desc"), 1L)
.addGauge(info("G1", "G1 desc"), 2L)
.addCounter(info("S1NumOps", "Number of ops for s1"), 1L)
.addGauge(info("S1AvgTime", "Average time for s1"), 0.0)
.metrics(), r.metrics());
r = recs.get(1);
assertTrue("NumActiveSinks should be 3", Iterables.contains(r.metrics(),
new MetricGaugeInt(MsInfo.NumActiveSinks, 3)));
}
@Metrics(context="test")
private static class TestSource {
@Metric("C1 desc") MutableCounterLong c1;
@Metric("XXX desc") MutableCounterLong xxx;
@Metric("G1 desc") MutableGaugeLong g1;
@Metric("YYY desc") MutableGaugeLong yyy;
@Metric MutableRate s1;
final MetricsRegistry registry;
TestSource(String recName) {
registry = new MetricsRegistry(recName);
}
}
private static String getPluginUrlsAsString() {
return "file:metrics2-test-plugin.jar";
}
}

View File

@ -0,0 +1,89 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.List;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.runner.RunWith;
import static org.mockito.Mockito.*;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.runners.MockitoJUnitRunner;
import org.apache.hadoop.metrics2.MetricsVisitor;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsInfo;
import static org.apache.hadoop.metrics2.lib.Interns.*;
import org.apache.hadoop.metrics2.lib.MetricsRegistry;
/**
* Test the metric visitor interface
*/
@RunWith(MockitoJUnitRunner.class)
public class TestMetricsVisitor {
@Captor private ArgumentCaptor<MetricsInfo> c1;
@Captor private ArgumentCaptor<MetricsInfo> c2;
@Captor private ArgumentCaptor<MetricsInfo> g1;
@Captor private ArgumentCaptor<MetricsInfo> g2;
@Captor private ArgumentCaptor<MetricsInfo> g3;
@Captor private ArgumentCaptor<MetricsInfo> g4;
/**
* Test the common use cases
*/
@Test public void testCommon() {
MetricsVisitor visitor = mock(MetricsVisitor.class);
MetricsRegistry registry = new MetricsRegistry("test");
List<AbstractMetric> metrics = MetricsLists.builder("test")
.addCounter(info("c1", "int counter"), 1)
.addCounter(info("c2", "long counter"), 2L)
.addGauge(info("g1", "int gauge"), 5)
.addGauge(info("g2", "long gauge"), 6L)
.addGauge(info("g3", "float gauge"), 7f)
.addGauge(info("g4", "double gauge"), 8d)
.metrics();
for (AbstractMetric metric : metrics) {
metric.visit(visitor);
}
verify(visitor).counter(c1.capture(), eq(1));
assertEquals("c1 name", "c1", c1.getValue().name());
assertEquals("c1 description", "int counter", c1.getValue().description());
verify(visitor).counter(c2.capture(), eq(2L));
assertEquals("c2 name", "c2", c2.getValue().name());
assertEquals("c2 description", "long counter", c2.getValue().description());
verify(visitor).gauge(g1.capture(), eq(5));
assertEquals("g1 name", "g1", g1.getValue().name());
assertEquals("g1 description", "int gauge", g1.getValue().description());
verify(visitor).gauge(g2.capture(), eq(6L));
assertEquals("g2 name", "g2", g2.getValue().name());
assertEquals("g2 description", "long gauge", g2.getValue().description());
verify(visitor).gauge(g3.capture(), eq(7f));
assertEquals("g3 name", "g3", g3.getValue().name());
assertEquals("g3 description", "float gauge", g3.getValue().description());
verify(visitor).gauge(g4.capture(), eq(8d));
assertEquals("g4 name", "g4", g4.getValue().name());
assertEquals("g4 description", "double gauge", g4.getValue().description());
}
}

View File

@ -0,0 +1,267 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;
import java.util.ConcurrentModificationException;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import static org.apache.hadoop.metrics2.impl.SinkQueue.*;
/**
* Test the half-blocking metrics sink queue
*/
public class TestSinkQueue {
private final Log LOG = LogFactory.getLog(TestSinkQueue.class);
/**
* Test common use case
* @throws Exception
*/
@Test public void testCommon() throws Exception {
final SinkQueue<Integer> q = new SinkQueue<Integer>(2);
q.enqueue(1);
assertEquals("queue front", 1, (int) q.front());
assertEquals("queue back", 1, (int) q.back());
assertEquals("element", 1, (int) q.dequeue());
assertTrue("should enqueue", q.enqueue(2));
q.consume(new Consumer<Integer>() {
@Override public void consume(Integer e) {
assertEquals("element", 2, (int) e);
}
});
assertTrue("should enqueue", q.enqueue(3));
assertEquals("element", 3, (int) q.dequeue());
assertEquals("queue size", 0, q.size());
assertEquals("queue front", null, q.front());
assertEquals("queue back", null, q.back());
}
/**
* Test blocking when queue is empty
* @throws Exception
*/
@Test public void testEmptyBlocking() throws Exception {
final SinkQueue<Integer> q = new SinkQueue<Integer>(2);
final Runnable trigger = mock(Runnable.class);
// try consuming emtpy equeue and blocking
Thread t = new Thread() {
@Override public void run() {
try {
assertEquals("element", 1, (int) q.dequeue());
q.consume(new Consumer<Integer>() {
@Override public void consume(Integer e) {
assertEquals("element", 2, (int) e);
trigger.run();
}
});
}
catch (InterruptedException e) {
LOG.warn("Interrupted", e);
}
}
};
t.start();
Thread.yield(); // Let the other block
q.enqueue(1);
q.enqueue(2);
t.join();
verify(trigger).run();
}
/**
* Test nonblocking enqueue when queue is full
* @throws Exception
*/
@Test public void testFull() throws Exception {
final SinkQueue<Integer> q = new SinkQueue<Integer>(1);
q.enqueue(1);
assertTrue("should drop", !q.enqueue(2));
assertEquals("element", 1, (int) q.dequeue());
q.enqueue(3);
q.consume(new Consumer<Integer>() {
@Override public void consume(Integer e) {
assertEquals("element", 3, (int) e);
}
});
assertEquals("queue size", 0, q.size());
}
/**
* Test the consumeAll method
* @throws Exception
*/
@Test public void testConsumeAll() throws Exception {
final int capacity = 64; // arbitrary
final SinkQueue<Integer> q = new SinkQueue<Integer>(capacity);
for (int i = 0; i < capacity; ++i) {
assertTrue("should enqueue", q.enqueue(i));
}
assertTrue("should not enqueue", !q.enqueue(capacity));
final Runnable trigger = mock(Runnable.class);
q.consumeAll(new Consumer<Integer>() {
private int expected = 0;
@Override public void consume(Integer e) {
assertEquals("element", expected++, (int) e);
trigger.run();
}
});
verify(trigger, times(capacity)).run();
}
/**
* Test the consumer throwing exceptions
* @throws Exception
*/
@Test public void testConsumerException() throws Exception {
final SinkQueue<Integer> q = new SinkQueue<Integer>(1);
final RuntimeException ex = new RuntimeException("expected");
q.enqueue(1);
try {
q.consume(new Consumer<Integer>() {
@Override public void consume(Integer e) {
throw ex;
}
});
}
catch (Exception expected) {
assertSame("consumer exception", ex, expected);
}
// The queue should be in consistent state after exception
assertEquals("queue size", 1, q.size());
assertEquals("element", 1, (int) q.dequeue());
}
/**
* Test the clear method
*/
@Test public void testClear() {
final SinkQueue<Integer> q = new SinkQueue<Integer>(128);
for (int i = 0; i < q.capacity() + 97; ++i) {
q.enqueue(i);
}
assertEquals("queue size", q.capacity(), q.size());
q.clear();
assertEquals("queue size", 0, q.size());
}
/**
* Test consumers that take their time.
* @throws Exception
*/
@Test public void testHangingConsumer() throws Exception {
SinkQueue<Integer> q = newSleepingConsumerQueue(2, 1, 2);
assertEquals("queue back", 2, (int) q.back());
assertTrue("should drop", !q.enqueue(3)); // should not block
assertEquals("queue size", 2, q.size());
assertEquals("queue head", 1, (int) q.front());
assertEquals("queue back", 2, (int) q.back());
}
/**
* Test concurrent consumer access, which is illegal
* @throws Exception
*/
@Test public void testConcurrentConsumers() throws Exception {
final SinkQueue<Integer> q = newSleepingConsumerQueue(2, 1);
assertTrue("should enqueue", q.enqueue(2));
assertEquals("queue back", 2, (int) q.back());
assertTrue("should drop", !q.enqueue(3)); // should not block
shouldThrowCME(new Fun() {
@Override public void run() {
q.clear();
}
});
shouldThrowCME(new Fun() {
@Override public void run() throws Exception {
q.consume(null);
}
});
shouldThrowCME(new Fun() {
@Override public void run() throws Exception {
q.consumeAll(null);
}
});
shouldThrowCME(new Fun() {
@Override public void run() throws Exception {
q.dequeue();
}
});
// The queue should still be in consistent state after all the exceptions
assertEquals("queue size", 2, q.size());
assertEquals("queue front", 1, (int) q.front());
assertEquals("queue back", 2, (int) q.back());
}
private void shouldThrowCME(Fun callback) throws Exception {
try {
callback.run();
}
catch (ConcurrentModificationException e) {
LOG.info(e);
return;
}
fail("should've thrown");
}
private SinkQueue<Integer> newSleepingConsumerQueue(int capacity,
int... values) {
final SinkQueue<Integer> q = new SinkQueue<Integer>(capacity);
for (int i : values) {
q.enqueue(i);
}
Thread t = new Thread() {
@Override public void run() {
try {
q.consume(new Consumer<Integer>() {
@Override
public void consume(Integer e) throws InterruptedException {
LOG.info("sleeping");
Thread.sleep(1000 * 86400); // a long time
}
});
}
catch (InterruptedException ex) {
LOG.warn("Interrupted", ex);
}
}
};
t.setName("Sleeping consumer");
t.setDaemon(true); // so jvm can exit
t.start();
Thread.yield(); // Let the consumer consume
LOG.debug("Returning new sleeping consumer queue");
return q;
}
static interface Fun {
void run() throws Exception;
}
}

View File

@ -0,0 +1,79 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsTag;
import static org.apache.hadoop.metrics2.lib.Interns.*;
public class TestInterns {
@Test public void testInfo() {
MetricsInfo info = info("m", "m desc");
assertSame("same info", info, info("m", "m desc"));
}
@Test public void testTag() {
MetricsTag tag = tag("t", "t desc", "t value");
assertSame("same tag", tag, tag("t", "t desc", "t value"));
}
@Test public void testInfoOverflow() {
MetricsInfo i0 = info("m0", "m desc");
for (int i = 0; i < MAX_INFO_NAMES + 1; ++i) {
info("m"+ i, "m desc");
if (i < MAX_INFO_NAMES) {
assertSame("m0 is still there", i0, info("m0", "m desc"));
}
}
assertNotSame("m0 is gone", i0, info("m0", "m desc"));
MetricsInfo i1 = info("m1", "m desc");
for (int i = 0; i < MAX_INFO_DESCS; ++i) {
info("m1", "m desc"+ i);
if (i < MAX_INFO_DESCS - 1) {
assertSame("i1 is still there", i1, info("m1", "m desc"));
}
}
assertNotSame("i1 is gone", i1, info("m1", "m desc"));
}
@Test public void testTagOverflow() {
MetricsTag t0 = tag("t0", "t desc", "t value");
for (int i = 0; i < MAX_TAG_NAMES + 1; ++i) {
tag("t"+ i, "t desc", "t value");
if (i < MAX_TAG_NAMES) {
assertSame("t0 still there", t0, tag("t0", "t desc", "t value"));
}
}
assertNotSame("t0 is gone", t0, tag("t0", "t desc", "t value"));
MetricsTag t1 = tag("t1", "t desc", "t value");
for (int i = 0; i < MAX_TAG_VALUES; ++i) {
tag("t1", "t desc", "t value"+ i);
if (i < MAX_TAG_VALUES -1) {
assertSame("t1 is still there", t1, tag("t1", "t desc", "t value"));
}
}
assertNotSame("t1 is gone", t1, tag("t1", "t desc", "t value"));
}
}

View File

@ -0,0 +1,202 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.apache.hadoop.test.MockitoMaker.*;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metric.*;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.impl.MsInfo;
import static org.apache.hadoop.metrics2.lib.Interns.*;
import static org.apache.hadoop.test.MetricsAsserts.*;
public class TestMetricsAnnotations {
static class MyMetrics {
@Metric MutableCounterInt c1;
@Metric({"Counter2", "Counter2 desc"}) MutableCounterLong c2;
@Metric MutableGaugeInt g1, g2;
@Metric("g3 desc") MutableGaugeLong g3;
@Metric MutableRate r1;
@Metric MutableStat s1;
@Metric MutableRates rs1;
}
@Test public void testFields() {
MyMetrics metrics = new MyMetrics();
MetricsSource source = MetricsAnnotations.makeSource(metrics);
metrics.c1.incr();
metrics.c2.incr();
metrics.g1.incr();
metrics.g2.incr();
metrics.g3.incr();
metrics.r1.add(1);
metrics.s1.add(1);
metrics.rs1.add("rs1", 1);
MetricsRecordBuilder rb = getMetrics(source);
verify(rb).addCounter(info("C1", "C1"), 1);
verify(rb).addCounter(info("Counter2", "Counter2 desc"), 1L);
verify(rb).addGauge(info("G1", "G1"), 1);
verify(rb).addGauge(info("G2", "G2"), 1);
verify(rb).addGauge(info("G3", "g3 desc"), 1L);
verify(rb).addCounter(info("R1NumOps", "Number of ops for r1"), 1L);
verify(rb).addGauge(info("R1AvgTime", "Average time for r1"), 1.0);
verify(rb).addCounter(info("S1NumOps", "Number of ops for s1"), 1L);
verify(rb).addGauge(info("S1AvgTime", "Average time for s1"), 1.0);
verify(rb).addCounter(info("Rs1NumOps", "Number of ops for rs1"), 1L);
verify(rb).addGauge(info("Rs1AvgTime", "Average time for rs1"), 1.0);
}
static class BadMetrics {
@Metric Integer i0;
}
@Test(expected=MetricsException.class) public void testBadFields() {
MetricsAnnotations.makeSource(new BadMetrics());
}
static class MyMetrics2 {
@Metric int getG1() { return 1; }
@Metric long getG2() { return 2; }
@Metric float getG3() { return 3; }
@Metric double getG4() { return 4; }
@Metric(type=Type.COUNTER) int getC1() { return 1; }
@Metric(type=Type.COUNTER) long getC2() { return 2; }
@Metric(type=Type.TAG) String getT1() { return "t1"; }
}
@Test public void testMethods() {
MyMetrics2 metrics = new MyMetrics2();
MetricsSource source = MetricsAnnotations.makeSource(metrics);
MetricsRecordBuilder rb = getMetrics(source);
verify(rb).addGauge(info("G1", "G1"), 1);
verify(rb).addGauge(info("G2", "G2"), 2L);
verify(rb).addGauge(info("G3", "G3"), 3.0f);
verify(rb).addGauge(info("G4", "G4"), 4.0);
verify(rb).addCounter(info("C1", "C1"), 1);
verify(rb).addCounter(info("C2", "C2"), 2L);
verify(rb).tag(info("T1", "T1"), "t1");
}
static class BadMetrics2 {
@Metric int foo(int i) { return i; }
}
@Test(expected=IllegalArgumentException.class)
public void testBadMethodWithArgs() {
MetricsAnnotations.makeSource(new BadMetrics2());
}
static class BadMetrics3 {
@Metric boolean foo() { return true; }
}
@Test(expected=MetricsException.class)
public void testBadMethodReturnType() {
MetricsAnnotations.makeSource(new BadMetrics3());
}
@Metrics(about="My metrics", context="foo")
static class MyMetrics3 {
@Metric int getG1() { return 1; }
}
@Test public void testClasses() {
MetricsRecordBuilder rb = getMetrics(
MetricsAnnotations.makeSource(new MyMetrics3()));
MetricsCollector collector = rb.parent();
verify(collector).addRecord(info("MyMetrics3", "My metrics"));
verify(rb).add(tag(MsInfo.Context, "foo"));
}
static class HybridMetrics implements MetricsSource {
final MetricsRegistry registry = new MetricsRegistry("HybridMetrics")
.setContext("hybrid");
@Metric("C0 desc") MutableCounterInt C0;
@Metric int getG0() { return 0; }
public void getMetrics(MetricsCollector collector, boolean all) {
collector.addRecord("foo")
.setContext("foocontext")
.addCounter(info("C1", "C1 desc"), 1)
.endRecord()
.addRecord("bar")
.setContext("barcontext")
.addGauge(info("G1", "G1 desc"), 1);
registry.snapshot(collector.addRecord(registry.info()), all);
}
}
@Test public void testHybrid() {
HybridMetrics metrics = new HybridMetrics();
MetricsSource source = MetricsAnnotations.makeSource(metrics);
assertSame(metrics, source);
metrics.C0.incr();
MetricsRecordBuilder rb = getMetrics(source);
MetricsCollector collector = rb.parent();
verify(collector).addRecord("foo");
verify(collector).addRecord("bar");
verify(collector).addRecord(info("HybridMetrics", "HybridMetrics"));
verify(rb).setContext("foocontext");
verify(rb).addCounter(info("C1", "C1 desc"), 1);
verify(rb).setContext("barcontext");
verify(rb).addGauge(info("G1", "G1 desc"), 1);
verify(rb).add(tag(MsInfo.Context, "hybrid"));
verify(rb).addCounter(info("C0", "C0 desc"), 1);
verify(rb).addGauge(info("G0", "G0"), 0);
}
@Metrics(context="hybrid")
static class BadHybridMetrics implements MetricsSource {
@Metric MutableCounterInt c1;
public void getMetrics(MetricsCollector collector, boolean all) {
collector.addRecord("foo");
}
}
@Test(expected=MetricsException.class) public void testBadHybrid() {
MetricsAnnotations.makeSource(new BadHybridMetrics());
}
static class EmptyMetrics {
int foo;
}
@Test(expected=MetricsException.class) public void testEmptyMetrics() {
MetricsAnnotations.makeSource(new EmptyMetrics());
}
}

View File

@ -0,0 +1,91 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import static org.apache.hadoop.metrics2.lib.Interns.*;
import static org.apache.hadoop.test.MetricsAsserts.*;
/**
* Test the metric registry class
*/
public class TestMetricsRegistry {
/**
* Test various factory methods
*/
@Test public void testNewMetrics() {
final MetricsRegistry r = new MetricsRegistry("test");
r.newCounter("c1", "c1 desc", 1);
r.newCounter("c2", "c2 desc", 2L);
r.newGauge("g1", "g1 desc", 3);
r.newGauge("g2", "g2 desc", 4L);
r.newStat("s1", "s1 desc", "ops", "time");
assertEquals("num metrics in registry", 5, r.metrics().size());
assertTrue("c1 found", r.get("c1") instanceof MutableCounterInt);
assertTrue("c2 found", r.get("c2") instanceof MutableCounterLong);
assertTrue("g1 found", r.get("g1") instanceof MutableGaugeInt);
assertTrue("g2 found", r.get("g2") instanceof MutableGaugeLong);
assertTrue("s1 found", r.get("s1") instanceof MutableStat);
expectMetricsException("Metric name c1 already exists", new Runnable() {
public void run() { r.newCounter("c1", "test dup", 0); }
});
}
/**
* Test the add by name method
*/
@Test public void testAddByName() {
MetricsRecordBuilder rb = mockMetricsRecordBuilder();
final MetricsRegistry r = new MetricsRegistry("test");
r.add("s1", 42);
r.get("s1").snapshot(rb);
verify(rb).addCounter(info("S1NumOps", "Number of ops for s1"), 1L);
verify(rb).addGauge(info("S1AvgTime", "Average time for s1"), 42.0);
r.newCounter("c1", "test add", 1);
r.newGauge("g1", "test add", 1);
expectMetricsException("Unsupported add", new Runnable() {
public void run() { r.add("c1", 42); }
});
expectMetricsException("Unsupported add", new Runnable() {
public void run() { r.add("g1", 42); }
});
}
private void expectMetricsException(String prefix, Runnable fun) {
try {
fun.run();
}
catch (MetricsException e) {
assertTrue("expected exception", e.getMessage().startsWith(prefix));
return;
}
fail("should've thrown '"+ prefix +"...'");
}
}

View File

@ -0,0 +1,106 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.junit.Test;
import static org.mockito.Mockito.*;
import static org.mockito.AdditionalMatchers.*;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import static org.apache.hadoop.metrics2.lib.Interns.*;
import static org.apache.hadoop.test.MetricsAsserts.*;
/**
* Test metrics record builder interface and mutable metrics
*/
public class TestMutableMetrics {
private final double EPSILON = 1e-42;
/**
* Test the snapshot method
*/
@Test public void testSnapshot() {
MetricsRecordBuilder mb = mockMetricsRecordBuilder();
MetricsRegistry registry = new MetricsRegistry("test");
registry.newCounter("c1", "int counter", 1);
registry.newCounter("c2", "long counter", 2L);
registry.newGauge("g1", "int gauge", 3);
registry.newGauge("g2", "long gauge", 4L);
registry.newStat("s1", "stat", "Ops", "Time", true).add(0);
registry.newRate("s2", "stat", false).add(0);
registry.snapshot(mb, true);
MutableStat s2 = (MutableStat) registry.get("s2");
s2.snapshot(mb, true); // should get the same back.
s2.add(1);
s2.snapshot(mb, true); // should get new interval values back
verify(mb).addCounter(info("c1", "int counter"), 1);
verify(mb).addCounter(info("c2", "long counter"), 2L);
verify(mb).addGauge(info("g1", "int gauge"), 3);
verify(mb).addGauge(info("g2", "long gauge"), 4L);
verify(mb).addCounter(info("S1NumOps", "Number of ops for stat"), 1L);
verify(mb).addGauge(eq(info("S1AvgTime", "Average time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1StdevTime",
"Standard deviation of time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1IMinTime",
"Interval min time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1IMaxTime",
"Interval max time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1MinTime","Min time for stat")),
eq(0.0, EPSILON));
verify(mb).addGauge(eq(info("S1MaxTime","Max time for stat")),
eq(0.0, EPSILON));
verify(mb, times(2))
.addCounter(info("S2NumOps", "Number of ops for stat"), 1L);
verify(mb, times(2)).addGauge(eq(info("S2AvgTime",
"Average time for stat")),
eq(0.0, EPSILON));
verify(mb).addCounter(info("S2NumOps", "Number of ops for stat"), 2L);
verify(mb).addGauge(eq(info("S2AvgTime", "Average time for stat")),
eq(1.0, EPSILON));
}
interface TestProtocol {
void foo();
void bar();
}
@Test public void testMutableRates() {
MetricsRecordBuilder rb = mockMetricsRecordBuilder();
MetricsRegistry registry = new MetricsRegistry("test");
MutableRates rates = new MutableRates(registry);
rates.init(TestProtocol.class);
registry.snapshot(rb, false);
assertCounter("FooNumOps", 0L, rb);
assertGauge("FooAvgTime", 0.0, rb);
assertCounter("BarNumOps", 0L, rb);
assertGauge("BarAvgTime", 0.0, rb);
}
}

View File

@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.lib;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestUniqNames {
@Test public void testCommonCases() {
UniqueNames u = new UniqueNames();
assertEquals("foo", u.uniqueName("foo"));
assertEquals("foo-1", u.uniqueName("foo"));
}
@Test public void testCollisions() {
UniqueNames u = new UniqueNames();
u.uniqueName("foo");
assertEquals("foo-1", u.uniqueName("foo-1"));
assertEquals("foo-2", u.uniqueName("foo"));
assertEquals("foo-1-1", u.uniqueName("foo-1"));
assertEquals("foo-2-1", u.uniqueName("foo-2"));
}
}

View File

@ -0,0 +1,139 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.util;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;
import static org.apache.hadoop.metrics2.lib.Interns.*;
public class TestMetricsCache {
private static final Log LOG = LogFactory.getLog(TestMetricsCache.class);
@Test public void testUpdate() {
MetricsCache cache = new MetricsCache();
MetricsRecord mr = makeRecord("r",
Arrays.asList(makeTag("t", "tv")),
Arrays.asList(makeMetric("m", 0), makeMetric("m1", 1)));
MetricsCache.Record cr = cache.update(mr);
verify(mr).name();
verify(mr).tags();
verify(mr).metrics();
assertEquals("same record size", cr.metrics().size(),
((Collection<AbstractMetric>)mr.metrics()).size());
assertEquals("same metric value", 0, cr.getMetric("m"));
MetricsRecord mr2 = makeRecord("r",
Arrays.asList(makeTag("t", "tv")),
Arrays.asList(makeMetric("m", 2), makeMetric("m2", 42)));
cr = cache.update(mr2);
assertEquals("contains 3 metric", 3, cr.metrics().size());
assertEquals("updated metric value", 2, cr.getMetric("m"));
assertEquals("old metric value", 1, cr.getMetric("m1"));
assertEquals("new metric value", 42, cr.getMetric("m2"));
MetricsRecord mr3 = makeRecord("r",
Arrays.asList(makeTag("t", "tv3")), // different tag value
Arrays.asList(makeMetric("m3", 3)));
cr = cache.update(mr3); // should get a new record
assertEquals("contains 1 metric", 1, cr.metrics().size());
assertEquals("updated metric value", 3, cr.getMetric("m3"));
// tags cache should be empty so far
assertEquals("no tags", 0, cr.tags().size());
// until now
cr = cache.update(mr3, true);
assertEquals("Got 1 tag", 1, cr.tags().size());
assertEquals("Tag value", "tv3", cr.getTag("t"));
assertEquals("Metric value", 3, cr.getMetric("m3"));
}
@Test public void testGet() {
MetricsCache cache = new MetricsCache();
assertNull("empty", cache.get("r", Arrays.asList(makeTag("t", "t"))));
MetricsRecord mr = makeRecord("r",
Arrays.asList(makeTag("t", "t")),
Arrays.asList(makeMetric("m", 1)));
cache.update(mr);
MetricsCache.Record cr = cache.get("r", mr.tags());
LOG.debug("tags="+ mr.tags() +" cr="+ cr);
assertNotNull("Got record", cr);
assertEquals("contains 1 metric", 1, cr.metrics().size());
assertEquals("new metric value", 1, cr.getMetric("m"));
}
/**
* Make sure metrics tag has a sane hashCode impl
*/
@Test public void testNullTag() {
MetricsCache cache = new MetricsCache();
MetricsRecord mr = makeRecord("r",
Arrays.asList(makeTag("t", null)),
Arrays.asList(makeMetric("m", 0), makeMetric("m1", 1)));
MetricsCache.Record cr = cache.update(mr);
assertTrue("t value should be null", null == cr.getTag("t"));
}
@Test public void testOverflow() {
MetricsCache cache = new MetricsCache();
MetricsCache.Record cr;
Collection<MetricsTag> t0 = Arrays.asList(makeTag("t0", "0"));
for (int i = 0; i < MetricsCache.MAX_RECS_PER_NAME_DEFAULT + 1; ++i) {
cr = cache.update(makeRecord("r",
Arrays.asList(makeTag("t"+ i, ""+ i)),
Arrays.asList(makeMetric("m", i))));
assertEquals("new metrics value", i, cr.getMetric("m"));
if (i < MetricsCache.MAX_RECS_PER_NAME_DEFAULT) {
assertNotNull("t0 is still there", cache.get("r", t0));
}
}
assertNull("t0 is gone", cache.get("r", t0));
}
private MetricsRecord makeRecord(String name, Collection<MetricsTag> tags,
Collection<AbstractMetric> metrics) {
MetricsRecord mr = mock(MetricsRecord.class);
when(mr.name()).thenReturn(name);
when(mr.tags()).thenReturn(tags);
when(mr.metrics()).thenReturn(metrics);
return mr;
}
private MetricsTag makeTag(String name, String value) {
return new MetricsTag(info(name, ""), value);
}
private AbstractMetric makeMetric(String name, Number value) {
AbstractMetric metric = mock(AbstractMetric.class);
when(metric.name()).thenReturn(name);
when(metric.value()).thenReturn(value);
return metric;
}
}

View File

@ -0,0 +1,67 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.util;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Test the running sample stat computation
*/
public class TestSampleStat {
private static final double EPSILON = 1e-42;
/**
* Some simple use cases
*/
@Test public void testSimple() {
SampleStat stat = new SampleStat();
assertEquals("num samples", 0, stat.numSamples());
assertEquals("mean", 0.0, stat.mean(), EPSILON);
assertEquals("variance", 0.0, stat.variance(), EPSILON);
assertEquals("stddev", 0.0, stat.stddev(), EPSILON);
assertEquals("min", Double.MAX_VALUE, stat.min(), EPSILON);
assertEquals("max", Double.MIN_VALUE, stat.max(), EPSILON);
stat.add(3);
assertEquals("num samples", 1L, stat.numSamples());
assertEquals("mean", 3.0, stat.mean(), EPSILON);
assertEquals("variance", 0.0, stat.variance(), EPSILON);
assertEquals("stddev", 0.0, stat.stddev(), EPSILON);
assertEquals("min", 3.0, stat.min(), EPSILON);
assertEquals("max", 3.0, stat.max(), EPSILON);
stat.add(2).add(1);
assertEquals("num samples", 3L, stat.numSamples());
assertEquals("mean", 2.0, stat.mean(), EPSILON);
assertEquals("variance", 1.0, stat.variance(), EPSILON);
assertEquals("stddev", 1.0, stat.stddev(), EPSILON);
assertEquals("min", 1.0, stat.min(), EPSILON);
assertEquals("max", 3.0, stat.max(), EPSILON);
stat.reset();
assertEquals("num samples", 0, stat.numSamples());
assertEquals("mean", 0.0, stat.mean(), EPSILON);
assertEquals("variance", 0.0, stat.variance(), EPSILON);
assertEquals("stddev", 0.0, stat.stddev(), EPSILON);
assertEquals("min", Double.MAX_VALUE, stat.min(), EPSILON);
assertEquals("max", Double.MIN_VALUE, stat.max(), EPSILON);
}
}

View File

@ -0,0 +1,276 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.test;
import static com.google.common.base.Preconditions.*;
import org.hamcrest.Description;
import static org.mockito.Mockito.*;
import org.mockito.stubbing.Answer;
import org.mockito.invocation.InvocationOnMock;
import static org.mockito.AdditionalMatchers.*;
import org.mockito.ArgumentMatcher;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsSystem;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import static org.apache.hadoop.metrics2.lib.Interns.*;
/**
* Helpers for metrics source tests
*/
public class MetricsAsserts {
final static Log LOG = LogFactory.getLog(MetricsAsserts.class);
public static MetricsSystem mockMetricsSystem() {
MetricsSystem ms = mock(MetricsSystem.class);
DefaultMetricsSystem.setInstance(ms);
return ms;
}
public static MetricsRecordBuilder mockMetricsRecordBuilder() {
final MetricsCollector mc = mock(MetricsCollector.class);
MetricsRecordBuilder rb = mock(MetricsRecordBuilder.class,
new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) {
Object[] args = invocation.getArguments();
StringBuilder sb = new StringBuilder();
for (Object o : args) {
if (sb.length() > 0) sb.append(", ");
sb.append(String.valueOf(o));
}
String methodName = invocation.getMethod().getName();
LOG.debug(methodName +": "+ sb);
return methodName.equals("parent") || methodName.equals("endRecord") ?
mc : invocation.getMock();
}
});
when(mc.addRecord(anyString())).thenReturn(rb);
when(mc.addRecord(anyInfo())).thenReturn(rb);
return rb;
}
/**
* Call getMetrics on source and get a record builder mock to verify
* @param source the metrics source
* @param all if true, return all metrics even if not changed
* @return the record builder mock to verify
*/
public static MetricsRecordBuilder getMetrics(MetricsSource source,
boolean all) {
MetricsRecordBuilder rb = mockMetricsRecordBuilder();
MetricsCollector mc = rb.parent();
source.getMetrics(mc, all);
return rb;
}
public static MetricsRecordBuilder getMetrics(String name) {
return getMetrics(DefaultMetricsSystem.instance().getSource(name));
}
public static MetricsRecordBuilder getMetrics(MetricsSource source) {
return getMetrics(source, true);
}
private static class InfoWithSameName extends ArgumentMatcher<MetricsInfo> {
private final String expected;
InfoWithSameName(MetricsInfo info) {
expected = checkNotNull(info.name(), "info name");
}
@Override public boolean matches(Object info) {
return expected.equals(((MetricsInfo)info).name());
}
@Override public void describeTo(Description desc) {
desc.appendText("Info with name="+ expected);
}
}
/**
* MetricInfo with the same name
* @param info to match
* @return <code>null</code>
*/
public static MetricsInfo eqName(MetricsInfo info) {
return argThat(new InfoWithSameName(info));
}
private static class AnyInfo extends ArgumentMatcher<MetricsInfo> {
@Override public boolean matches(Object info) {
return info instanceof MetricsInfo; // not null as well
}
}
public static MetricsInfo anyInfo() {
return argThat(new AnyInfo());
}
/**
* Assert an int gauge metric as expected
* @param name of the metric
* @param expected value of the metric
* @param rb the record builder mock used to getMetrics
*/
public static void assertGauge(String name, int expected,
MetricsRecordBuilder rb) {
verify(rb).addGauge(eqName(info(name, "")), eq(expected));
}
/**
* Assert an int counter metric as expected
* @param name of the metric
* @param expected value of the metric
* @param rb the record builder mock used to getMetrics
*/
public static void assertCounter(String name, int expected,
MetricsRecordBuilder rb) {
verify(rb).addCounter(eqName(info(name, "")), eq(expected));
}
/**
* Assert a long gauge metric as expected
* @param name of the metric
* @param expected value of the metric
* @param rb the record builder mock used to getMetrics
*/
public static void assertGauge(String name, long expected,
MetricsRecordBuilder rb) {
verify(rb).addGauge(eqName(info(name, "")), eq(expected));
}
/**
* Assert a double gauge metric as expected
* @param name of the metric
* @param expected value of the metric
* @param rb the record builder mock used to getMetrics
*/
public static void assertGauge(String name, double expected,
MetricsRecordBuilder rb) {
verify(rb).addGauge(eqName(info(name, "")), eq(expected));
}
/**
* Assert a long counter metric as expected
* @param name of the metric
* @param expected value of the metric
* @param rb the record builder mock used to getMetrics
*/
public static void assertCounter(String name, long expected,
MetricsRecordBuilder rb) {
verify(rb).addCounter(eqName(info(name, "")), eq(expected));
}
/**
* Assert an int gauge metric as expected
* @param name of the metric
* @param expected value of the metric
* @param source to get metrics from
*/
public static void assertGauge(String name, int expected,
MetricsSource source) {
assertGauge(name, expected, getMetrics(source));
}
/**
* Assert an int counter metric as expected
* @param name of the metric
* @param expected value of the metric
* @param source to get metrics from
*/
public static void assertCounter(String name, int expected,
MetricsSource source) {
assertCounter(name, expected, getMetrics(source));
}
/**
* Assert a long gauge metric as expected
* @param name of the metric
* @param expected value of the metric
* @param source to get metrics from
*/
public static void assertGauge(String name, long expected,
MetricsSource source) {
assertGauge(name, expected, getMetrics(source));
}
/**
* Assert a long counter metric as expected
* @param name of the metric
* @param expected value of the metric
* @param source to get metrics from
*/
public static void assertCounter(String name, long expected,
MetricsSource source) {
assertCounter(name, expected, getMetrics(source));
}
/**
* Assert that a long counter metric is greater than a value
* @param name of the metric
* @param greater value of the metric should be greater than this
* @param rb the record builder mock used to getMetrics
*/
public static void assertCounterGt(String name, long greater,
MetricsRecordBuilder rb) {
verify(rb).addCounter(eqName(info(name, "")), gt(greater));
}
/**
* Assert that a long counter metric is greater than a value
* @param name of the metric
* @param greater value of the metric should be greater than this
* @param source the metrics source
*/
public static void assertCounterGt(String name, long greater,
MetricsSource source) {
assertCounterGt(name, greater, getMetrics(source));
}
/**
* Assert that a double gauge metric is greater than a value
* @param name of the metric
* @param greater value of the metric should be greater than this
* @param rb the record builder mock used to getMetrics
*/
public static void assertGaugeGt(String name, double greater,
MetricsRecordBuilder rb) {
verify(rb).addGauge(eqName(info(name, "")), gt(greater));
}
/**
* Assert that a double gauge metric is greater than a value
* @param name of the metric
* @param greater value of the metric should be greater than this
* @param source the metrics source
*/
public static void assertGaugeGt(String name, double greater,
MetricsSource source) {
assertGaugeGt(name, greater, getMetrics(source));
}
}

Some files were not shown because too many files have changed in this diff Show More