MAPREDUCE-2930. Added the ability to be able to generate graphs from the state-machine definitions. Contributed by Binglin Chang.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1173524 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2011-09-21 08:37:00 +00:00
parent 7d8e9d6960
commit 01fbb0fb45
8 changed files with 445 additions and 1 deletions

View File

@ -70,6 +70,9 @@ Release 0.23.0 - Unreleased
MAPREDUCE-2037. Capture intermediate progress, CPU and memory usage for
tasks. (Dick King via acmurthy)
MAPREDUCE-2930. Added the ability to be able to generate graphs from the
state-machine definitions. (Binglin Chang via vinodkv)
IMPROVEMENTS
MAPREDUCE-2187. Reporter sends progress during sort/merge. (Anupam Seth via

View File

@ -113,4 +113,41 @@
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>visualize</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.2</version>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>java</goal>
</goals>
<configuration>
<classpathScope>test</classpathScope>
<mainClass>org.apache.hadoop.yarn.util.VisualizeStateMachine</mainClass>
<arguments>
<argument>MapReduce</argument>
<argument>org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl,
org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl,
org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl</argument>
<argument>MapReduce.gv</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -30,7 +30,6 @@ clean and test: mvn clean install
run selected test after compile: mvn test -Dtest=TestClassName (combined: mvn clean install -Dtest=TestClassName)
create runnable binaries after install: mvn assembly:assembly (combined: mvn clean install assembly:assembly)
Eclipse Projects
----------------
http://maven.apache.org/guides/mini/guide-ide-eclipse.html
@ -71,3 +70,16 @@ hadoop-yarn-server - Implementation of the hadoop-yarn-api
hadoop-yarn-server-common - APIs shared between resourcemanager and nodemanager
hadoop-yarn-server-nodemanager (TaskTracker replacement)
hadoop-yarn-server-resourcemanager (JobTracker replacement)
Utilities for understanding the code
------------------------------------
Almost all of the yarn components as well as the mapreduce framework use
state-machines for all the data objects. To understand those central pieces of
the code, a visual representation of the state-machines helps much. You can first
convert the state-machines into graphviz(.gv) format by
running:
mvn compile -Pvisualize
Then you can use the dot program for generating directed graphs and convert the above
.gv files to images. The graphviz package has the needed dot program and related
utilites.For e.g., to generate png files you can run:
dot -Tpng NodeManager.gv > NodeManager.png

View File

@ -20,10 +20,14 @@ package org.apache.hadoop.yarn.state;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
import org.apache.hadoop.yarn.util.Graph;
/**
* State machine topology.
* This object is semantically immutable. If you have a
@ -441,4 +445,39 @@ final public class StateMachineFactory
return currentState;
}
}
/**
* Generate a graph represents the state graph of this StateMachine
* @param name graph name
* @return Graph object generated
*/
public Graph generateStateGraph(String name) {
maybeMakeStateMachineTable();
Graph g = new Graph(name);
for (STATE startState : stateMachineTable.keySet()) {
Map<EVENTTYPE, Transition<OPERAND, STATE, EVENTTYPE, EVENT>> transitions
= stateMachineTable.get(startState);
for (Entry<EVENTTYPE, Transition<OPERAND, STATE, EVENTTYPE, EVENT>> entry :
transitions.entrySet()) {
Transition<OPERAND, STATE, EVENTTYPE, EVENT> transition = entry.getValue();
if (transition instanceof StateMachineFactory.SingleInternalArc) {
StateMachineFactory.SingleInternalArc sa
= (StateMachineFactory.SingleInternalArc) transition;
Graph.Node fromNode = g.getNode(startState.toString());
Graph.Node toNode = g.getNode(sa.postState.toString());
fromNode.addEdge(toNode, entry.getKey().toString());
} else if (transition instanceof StateMachineFactory.MultipleInternalArc) {
StateMachineFactory.MultipleInternalArc ma
= (StateMachineFactory.MultipleInternalArc) transition;
Iterator<STATE> iter = ma.validPostStates.iterator();
while (iter.hasNext()) {
Graph.Node fromNode = g.getNode(startState.toString());
Graph.Node toNode = g.getNode(iter.next().toString());
fromNode.addEdge(toNode, entry.getKey().toString());
}
}
}
}
return g;
}
}

View File

@ -0,0 +1,210 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.util;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.StringEscapeUtils;
public class Graph {
public class Edge {
Node from;
Node to;
String label;
public Edge(Node from, Node to, String info) {
this.from = from;
this.to = to;
this.label = info;
}
public boolean sameAs(Edge rhs) {
if (this.from == rhs.from &&
this.to == rhs.to) {
return true;
}
return false;
}
public Edge combine(Edge rhs) {
String newlabel = this.label + "," + rhs.label;
return new Edge(this.from, this.to, newlabel);
}
}
public class Node {
Graph parent;
String id;
List<Edge> ins;
List<Edge> outs;
public Node(String id) {
this.id = id;
this.parent = Graph.this;
this.ins = new ArrayList<Graph.Edge>();
this.outs = new ArrayList<Graph.Edge>();
}
public Graph getParent() {
return parent;
}
public Node addEdge(Node to, String info) {
Edge e = new Edge(this, to, info);
outs.add(e);
to.ins.add(e);
return this;
}
public String getUniqueId() {
return Graph.this.name + "." + id;
}
}
private String name;
private Graph parent;
private Set<Graph.Node> nodes = new HashSet<Graph.Node>();
private Set<Graph> subgraphs = new HashSet<Graph>();
public Graph(String name, Graph parent) {
this.name = name;
this.parent = parent;
}
public Graph(String name) {
this(name, null);
}
public Graph() {
this("graph", null);
}
public String getName() {
return name;
}
public Graph getParent() {
return parent;
}
private Node newNode(String id) {
Node ret = new Node(id);
nodes.add(ret);
return ret;
}
public Node getNode(String id) {
for (Node node : nodes) {
if (node.id.equals(id)) {
return node;
}
}
return newNode(id);
}
public Graph newSubGraph(String name) {
Graph ret = new Graph(name, this);
subgraphs.add(ret);
return ret;
}
public void addSubGraph(Graph graph) {
subgraphs.add(graph);
graph.parent = this;
}
private static String wrapSafeString(String label) {
if (label.indexOf(',') >= 0) {
if (label.length()>14) {
label = label.replaceAll(",", ",\n");
}
}
label = "\"" + StringEscapeUtils.escapeJava(label) + "\"";
return label;
}
public String generateGraphViz(String indent) {
StringBuilder sb = new StringBuilder();
if (this.parent == null) {
sb.append("digraph " + name + " {\n");
sb.append(String.format("graph [ label=%s, fontsize=24, fontname=Helvetica];\n",
wrapSafeString(name)));
sb.append("node [fontsize=12, fontname=Helvetica];\n");
sb.append("edge [fontsize=9, fontcolor=blue, fontname=Arial];\n");
} else {
sb.append("subgraph cluster_" + name + " {\nlabel=\"" + name + "\"\n");
}
for (Graph g : subgraphs) {
String ginfo = g.generateGraphViz(indent+" ");
sb.append(ginfo);
sb.append("\n");
}
for (Node n : nodes) {
sb.append(String.format(
"%s%s [ label = %s ];\n",
indent,
wrapSafeString(n.getUniqueId()),
n.id));
List<Edge> combinedOuts = combineEdges(n.outs);
for (Edge e : combinedOuts) {
sb.append(String.format(
"%s%s -> %s [ label = %s ];\n",
indent,
wrapSafeString(e.from.getUniqueId()),
wrapSafeString(e.to.getUniqueId()),
wrapSafeString(e.label)));
}
}
sb.append("}\n");
return sb.toString();
}
public String generateGraphViz() {
return generateGraphViz("");
}
public void save(String filepath) throws IOException {
FileWriter fout = new FileWriter(filepath);
fout.write(generateGraphViz());
fout.close();
}
public static List<Edge> combineEdges(List<Edge> edges) {
List<Edge> ret = new ArrayList<Edge>();
for (Edge edge : edges) {
boolean found = false;
for (int i = 0; i < ret.size(); i++) {
Edge current = ret.get(i);
if (edge.sameAs(current)) {
ret.set(i, current.combine(edge));
found = true;
break;
}
}
if (!found) {
ret.add(edge);
}
}
return ret;
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.util;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.yarn.state.StateMachineFactory;
public class VisualizeStateMachine {
/**
* @param classes list of classes which have static field
* stateMachineFactory of type StateMachineFactory
* @return graph represent this StateMachine
*/
public static Graph getGraphFromClasses(String graphName, List<String> classes)
throws Exception {
Graph ret = null;
if (classes.size() != 1) {
ret = new Graph(graphName);
}
for (String className : classes) {
Class clz = Class.forName(className);
Field factoryField = clz.getDeclaredField("stateMachineFactory");
factoryField.setAccessible(true);
StateMachineFactory factory = (StateMachineFactory) factoryField.get(null);
if (classes.size() == 1) {
return factory.generateStateGraph(graphName);
}
String gname = clz.getSimpleName();
if (gname.endsWith("Impl")) {
gname = gname.substring(0, gname.length()-4);
}
ret.addSubGraph(factory.generateStateGraph(gname));
}
return ret;
}
public static void main(String [] args) throws Exception {
if (args.length < 3) {
System.err.printf("Usage: %s <GraphName> <class[,class[,...]]> <OutputFile>\n",
VisualizeStateMachine.class.getName());
System.exit(1);
}
String [] classes = args[1].split(",");
ArrayList<String> validClasses = new ArrayList<String>();
for (String c : classes) {
String vc = c.trim();
if (vc.length()>0) {
validClasses.add(vc);
}
}
Graph g = getGraphFromClasses(args[0], validClasses);
g.save(args[2]);
}
}

View File

@ -103,6 +103,39 @@
<activeByDefault>true</activeByDefault>
</activation>
</profile>
<profile>
<id>visualize</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.2</version>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>java</goal>
</goals>
<configuration>
<mainClass>org.apache.hadoop.yarn.util.VisualizeStateMachine</mainClass>
<arguments>
<argument>NodeManager</argument>
<argument>org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationImpl,
org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerImpl,
org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalizedResource</argument>
<argument>NodeManager.gv</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<build>

View File

@ -98,4 +98,41 @@
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>visualize</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.2</version>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>java</goal>
</goals>
<configuration>
<mainClass>org.apache.hadoop.yarn.util.VisualizeStateMachine</mainClass>
<arguments>
<argument>ResourceManager</argument>
<argument>org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl,
org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl,
org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerImpl,
org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl</argument>
<argument>ResourceManager.gv</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>