MAPREDUCE-4375. Show Configuration Tracability in MR UI (bobby via tgraves)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1367539 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
556be2af92
commit
a3e695ab2c
|
@ -505,6 +505,9 @@ Release 0.23.3 - UNRELEASED
|
||||||
|
|
||||||
MAPREDUCE-4267. mavenize pipes (tgraves via bobby)
|
MAPREDUCE-4267. mavenize pipes (tgraves via bobby)
|
||||||
|
|
||||||
|
MAPREDUCE-4375. Show Configuration Tracability in MR UI (bobby
|
||||||
|
via tgraves)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
MAPREDUCE-3850. Avoid redundant calls for tokens in TokenCache (Daryn
|
MAPREDUCE-3850. Avoid redundant calls for tokens in TokenCache (Daryn
|
||||||
|
|
|
@ -1561,7 +1561,7 @@ public class JobImpl implements org.apache.hadoop.mapreduce.v2.app.job.Job,
|
||||||
Path confPath = getConfFile();
|
Path confPath = getConfFile();
|
||||||
FileContext fc = FileContext.getFileContext(confPath.toUri(), conf);
|
FileContext fc = FileContext.getFileContext(confPath.toUri(), conf);
|
||||||
Configuration jobConf = new Configuration(false);
|
Configuration jobConf = new Configuration(false);
|
||||||
jobConf.addResource(fc.open(confPath));
|
jobConf.addResource(fc.open(confPath), confPath.toString());
|
||||||
return jobConf;
|
return jobConf;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,14 +78,29 @@ public class ConfBlock extends HtmlBlock {
|
||||||
tr().
|
tr().
|
||||||
th(_TH, "key").
|
th(_TH, "key").
|
||||||
th(_TH, "value").
|
th(_TH, "value").
|
||||||
|
th(_TH, "source chain").
|
||||||
_().
|
_().
|
||||||
_().
|
_().
|
||||||
tbody();
|
tbody();
|
||||||
for (ConfEntryInfo entry : info.getProperties()) {
|
for (ConfEntryInfo entry : info.getProperties()) {
|
||||||
|
StringBuffer buffer = new StringBuffer();
|
||||||
|
String[] sources = entry.getSource();
|
||||||
|
//Skip the last entry, because it is always the same HDFS file, and
|
||||||
|
// output them in reverse order so most recent is output first
|
||||||
|
boolean first = true;
|
||||||
|
for(int i = (sources.length - 2); i >= 0; i--) {
|
||||||
|
if(!first) {
|
||||||
|
// \u2B05 is an arrow <--
|
||||||
|
buffer.append(" \u2B05 ");
|
||||||
|
}
|
||||||
|
first = false;
|
||||||
|
buffer.append(sources[i]);
|
||||||
|
}
|
||||||
tbody.
|
tbody.
|
||||||
tr().
|
tr().
|
||||||
td(entry.getName()).
|
td(entry.getName()).
|
||||||
td(entry.getValue()).
|
td(entry.getValue()).
|
||||||
|
td(buffer.toString()).
|
||||||
_();
|
_();
|
||||||
}
|
}
|
||||||
tbody._().
|
tbody._().
|
||||||
|
@ -93,6 +108,7 @@ public class ConfBlock extends HtmlBlock {
|
||||||
tr().
|
tr().
|
||||||
th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
|
th().input("search_init").$type(InputType.text).$name("key").$value("key")._()._().
|
||||||
th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
|
th().input("search_init").$type(InputType.text).$name("value").$value("value")._()._().
|
||||||
|
th().input("search_init").$type(InputType.text).$name("source chain").$value("source chain")._()._().
|
||||||
_().
|
_().
|
||||||
_().
|
_().
|
||||||
_();
|
_();
|
||||||
|
|
|
@ -27,13 +27,19 @@ public class ConfEntryInfo {
|
||||||
|
|
||||||
protected String name;
|
protected String name;
|
||||||
protected String value;
|
protected String value;
|
||||||
|
protected String[] source;
|
||||||
|
|
||||||
public ConfEntryInfo() {
|
public ConfEntryInfo() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public ConfEntryInfo(String key, String value) {
|
public ConfEntryInfo(String key, String value) {
|
||||||
|
this(key, value, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ConfEntryInfo(String key, String value, String[] source) {
|
||||||
this.name = key;
|
this.name = key;
|
||||||
this.value = value;
|
this.value = value;
|
||||||
|
this.source = source;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
|
@ -43,4 +49,8 @@ public class ConfEntryInfo {
|
||||||
public String getValue() {
|
public String getValue() {
|
||||||
return this.value;
|
return this.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String[] getSource() {
|
||||||
|
return source;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,8 @@ public class ConfInfo {
|
||||||
Configuration jobConf = job.loadConfFile();
|
Configuration jobConf = job.loadConfFile();
|
||||||
this.path = job.getConfFile().toString();
|
this.path = job.getConfFile().toString();
|
||||||
for (Map.Entry<String, String> entry : jobConf) {
|
for (Map.Entry<String, String> entry : jobConf) {
|
||||||
this.property.add(new ConfEntryInfo(entry.getKey(), entry.getValue()));
|
this.property.add(new ConfEntryInfo(entry.getKey(), entry.getValue(),
|
||||||
|
jobConf.getPropertySources(entry.getKey())));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -603,7 +603,7 @@ public class MockJobs extends MockApps {
|
||||||
public Configuration loadConfFile() throws IOException {
|
public Configuration loadConfFile() throws IOException {
|
||||||
FileContext fc = FileContext.getFileContext(configFile.toUri(), conf);
|
FileContext fc = FileContext.getFileContext(configFile.toUri(), conf);
|
||||||
Configuration jobConf = new Configuration(false);
|
Configuration jobConf = new Configuration(false);
|
||||||
jobConf.addResource(fc.open(configFile));
|
jobConf.addResource(fc.open(configFile), configFile.toString());
|
||||||
return jobConf;
|
return jobConf;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -336,7 +336,7 @@ public class HistoryFileManager extends AbstractService {
|
||||||
public synchronized Configuration loadConfFile() throws IOException {
|
public synchronized Configuration loadConfFile() throws IOException {
|
||||||
FileContext fc = FileContext.getFileContext(confFile.toUri(), conf);
|
FileContext fc = FileContext.getFileContext(confFile.toUri(), conf);
|
||||||
Configuration jobConf = new Configuration(false);
|
Configuration jobConf = new Configuration(false);
|
||||||
jobConf.addResource(fc.open(confFile));
|
jobConf.addResource(fc.open(confFile), confFile.toString());
|
||||||
return jobConf;
|
return jobConf;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1261,6 +1261,9 @@ History Server REST API's.
|
||||||
*---------------+--------------+-------------------------------+
|
*---------------+--------------+-------------------------------+
|
||||||
| value | string | The value of the configuration property |
|
| value | string | The value of the configuration property |
|
||||||
*---------------+--------------+-------------------------------+
|
*---------------+--------------+-------------------------------+
|
||||||
|
| source | string | The location this configuration object came from. If there is more then one of these it shows the history with the latest source at the end of the list. |
|
||||||
|
*---------------+--------------+-------------------------------+
|
||||||
|
|
||||||
|
|
||||||
*** Response Examples
|
*** Response Examples
|
||||||
|
|
||||||
|
@ -1293,14 +1296,17 @@ History Server REST API's.
|
||||||
{
|
{
|
||||||
"value" : "/home/hadoop/hdfs/data",
|
"value" : "/home/hadoop/hdfs/data",
|
||||||
"name" : "dfs.datanode.data.dir"
|
"name" : "dfs.datanode.data.dir"
|
||||||
|
"source" : ["hdfs-site.xml", "job.xml"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"value" : "org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer",
|
"value" : "org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer",
|
||||||
"name" : "hadoop.http.filter.initializers"
|
"name" : "hadoop.http.filter.initializers"
|
||||||
|
"source" : ["programatically", "job.xml"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"value" : "/home/hadoop/tmp",
|
"value" : "/home/hadoop/tmp",
|
||||||
"name" : "mapreduce.cluster.temp.dir"
|
"name" : "mapreduce.cluster.temp.dir"
|
||||||
|
"source" : ["mapred-site.xml"]
|
||||||
},
|
},
|
||||||
...
|
...
|
||||||
]
|
]
|
||||||
|
@ -1335,14 +1341,19 @@ History Server REST API's.
|
||||||
<property>
|
<property>
|
||||||
<name>dfs.datanode.data.dir</name>
|
<name>dfs.datanode.data.dir</name>
|
||||||
<value>/home/hadoop/hdfs/data</value>
|
<value>/home/hadoop/hdfs/data</value>
|
||||||
|
<source>hdfs-site.xml</source>
|
||||||
|
<source>job.xml</source>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>hadoop.http.filter.initializers</name>
|
<name>hadoop.http.filter.initializers</name>
|
||||||
<value>org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer</value>
|
<value>org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer</value>
|
||||||
|
<source>programatically</source>
|
||||||
|
<source>job.xml</source>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>mapreduce.cluster.temp.dir</name>
|
<name>mapreduce.cluster.temp.dir</name>
|
||||||
<value>/home/hadoop/tmp</value>
|
<value>/home/hadoop/tmp</value>
|
||||||
|
<source>mapred-site.xml</source>
|
||||||
</property>
|
</property>
|
||||||
...
|
...
|
||||||
</conf>
|
</conf>
|
||||||
|
|
|
@ -1296,6 +1296,8 @@ MapReduce Application Master REST API's.
|
||||||
*---------------+--------------+-------------------------------+
|
*---------------+--------------+-------------------------------+
|
||||||
| value | string | The value of the configuration property |
|
| value | string | The value of the configuration property |
|
||||||
*---------------+--------------+-------------------------------+
|
*---------------+--------------+-------------------------------+
|
||||||
|
| source | string | The location this configuration object came from. If there is more then one of these it shows the history with the latest source at the end of the list. |
|
||||||
|
*---------------+--------------+-------------------------------+
|
||||||
|
|
||||||
** Response Examples
|
** Response Examples
|
||||||
|
|
||||||
|
@ -1327,15 +1329,18 @@ MapReduce Application Master REST API's.
|
||||||
"property" : [
|
"property" : [
|
||||||
{
|
{
|
||||||
"value" : "/home/hadoop/hdfs/data",
|
"value" : "/home/hadoop/hdfs/data",
|
||||||
"name" : "dfs.datanode.data.dir"
|
"name" : "dfs.datanode.data.dir",
|
||||||
|
"source" : ["hdfs-site.xml", "job.xml"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"value" : "org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer",
|
"value" : "org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer",
|
||||||
"name" : "hadoop.http.filter.initializers"
|
"name" : "hadoop.http.filter.initializers"
|
||||||
|
"source" : ["programatically", "job.xml"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"value" : "/home/hadoop/tmp",
|
"value" : "/home/hadoop/tmp",
|
||||||
"name" : "mapreduce.cluster.temp.dir"
|
"name" : "mapreduce.cluster.temp.dir"
|
||||||
|
"source" : ["mapred-site.xml"]
|
||||||
},
|
},
|
||||||
...
|
...
|
||||||
]
|
]
|
||||||
|
@ -1370,14 +1375,19 @@ MapReduce Application Master REST API's.
|
||||||
<property>
|
<property>
|
||||||
<name>dfs.datanode.data.dir</name>
|
<name>dfs.datanode.data.dir</name>
|
||||||
<value>/home/hadoop/hdfs/data</value>
|
<value>/home/hadoop/hdfs/data</value>
|
||||||
|
<source>hdfs-site.xml</source>
|
||||||
|
<source>job.xml</source>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>hadoop.http.filter.initializers</name>
|
<name>hadoop.http.filter.initializers</name>
|
||||||
<value>org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer</value>
|
<value>org.apache.hadoop.yarn.server.webproxy.amfilter.AmFilterInitializer</value>
|
||||||
|
<source>programatically</source>
|
||||||
|
<source>job.xml</source>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>mapreduce.cluster.temp.dir</name>
|
<name>mapreduce.cluster.temp.dir</name>
|
||||||
<value>/home/hadoop/tmp</value>
|
<value>/home/hadoop/tmp</value>
|
||||||
|
<source>mapred-site.xml</source>
|
||||||
</property>
|
</property>
|
||||||
...
|
...
|
||||||
</conf>
|
</conf>
|
||||||
|
|
Loading…
Reference in New Issue