HDFS-10440. Improve DataNode web UI (Contributed by Weiwei Yang)
(cherry picked from commit2a0082c51d
) (cherry picked from commit1e34763181
)
This commit is contained in:
parent
47c13c4f7d
commit
f44d7782f6
|
@ -26,6 +26,7 @@ import java.net.InetSocketAddress;
|
||||||
import java.net.SocketTimeoutException;
|
import java.net.SocketTimeoutException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -58,6 +59,7 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
import org.apache.hadoop.util.VersionUtil;
|
import org.apache.hadoop.util.VersionUtil;
|
||||||
|
@ -138,6 +140,10 @@ class BPServiceActor implements Runnable {
|
||||||
|| runningState == BPServiceActor.RunningState.CONNECTING;
|
|| runningState == BPServiceActor.RunningState.CONNECTING;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String getRunningState() {
|
||||||
|
return runningState.toString();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return bpos.toString() + " service to " + nnAddr;
|
return bpos.toString() + " service to " + nnAddr;
|
||||||
|
@ -147,6 +153,22 @@ class BPServiceActor implements Runnable {
|
||||||
return nnAddr;
|
return nnAddr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String getNameNodeAddress() {
|
||||||
|
return NetUtils.getHostPortString(getNNSocketAddress());
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, String> getActorInfoMap() {
|
||||||
|
final Map<String, String> info = new HashMap<String, String>();
|
||||||
|
info.put("NamenodeAddress", getNameNodeAddress());
|
||||||
|
info.put("BlockPoolID", bpos.getBlockPoolId());
|
||||||
|
info.put("ActorState", getRunningState());
|
||||||
|
info.put("LastHeartbeat",
|
||||||
|
String.valueOf(getScheduler().getLastHearbeatTime()));
|
||||||
|
info.put("LastBlockReport",
|
||||||
|
String.valueOf(getScheduler().getLastBlockReportTime()));
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
|
||||||
private final CountDownLatch initialRegistrationComplete;
|
private final CountDownLatch initialRegistrationComplete;
|
||||||
private final LifelineSender lifelineSender;
|
private final LifelineSender lifelineSender;
|
||||||
|
|
||||||
|
@ -379,6 +401,7 @@ class BPServiceActor implements Runnable {
|
||||||
(nCmds + " commands: " + Joiner.on("; ").join(cmds)))) +
|
(nCmds + " commands: " + Joiner.on("; ").join(cmds)))) +
|
||||||
".");
|
".");
|
||||||
}
|
}
|
||||||
|
scheduler.updateLastBlockReportTime(monotonicNow());
|
||||||
scheduler.scheduleNextBlockReport();
|
scheduler.scheduleNextBlockReport();
|
||||||
return cmds.size() == 0 ? null : cmds;
|
return cmds.size() == 0 ? null : cmds;
|
||||||
}
|
}
|
||||||
|
@ -425,6 +448,7 @@ class BPServiceActor implements Runnable {
|
||||||
" storage reports from service actor: " + this);
|
" storage reports from service actor: " + this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
scheduler.updateLastHeartbeatTime(monotonicNow());
|
||||||
VolumeFailureSummary volumeFailureSummary = dn.getFSDataset()
|
VolumeFailureSummary volumeFailureSummary = dn.getFSDataset()
|
||||||
.getVolumeFailureSummary();
|
.getVolumeFailureSummary();
|
||||||
int numFailedVolumes = volumeFailureSummary != null ?
|
int numFailedVolumes = volumeFailureSummary != null ?
|
||||||
|
@ -995,6 +1019,12 @@ class BPServiceActor implements Runnable {
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
volatile long nextLifelineTime = monotonicNow();
|
volatile long nextLifelineTime = monotonicNow();
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
volatile long lastBlockReportTime = monotonicNow();
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
volatile long lastHeartbeatTime = monotonicNow();
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
boolean resetBlockReportTime = true;
|
boolean resetBlockReportTime = true;
|
||||||
|
|
||||||
|
@ -1033,6 +1063,22 @@ class BPServiceActor implements Runnable {
|
||||||
return nextHeartbeatTime;
|
return nextHeartbeatTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void updateLastHeartbeatTime(long heartbeatTime) {
|
||||||
|
lastHeartbeatTime = heartbeatTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
void updateLastBlockReportTime(long blockReportTime) {
|
||||||
|
lastBlockReportTime = blockReportTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
long getLastHearbeatTime() {
|
||||||
|
return (monotonicNow() - lastHeartbeatTime)/1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
long getLastBlockReportTime() {
|
||||||
|
return (monotonicNow() - lastBlockReportTime)/1000;
|
||||||
|
}
|
||||||
|
|
||||||
long scheduleNextLifeline(long baseTime) {
|
long scheduleNextLifeline(long baseTime) {
|
||||||
// Numerical overflow is possible here and is okay.
|
// Numerical overflow is possible here and is okay.
|
||||||
nextLifelineTime = baseTime + lifelineIntervalMs;
|
nextLifelineTime = baseTime + lifelineIntervalMs;
|
||||||
|
|
|
@ -2831,6 +2831,13 @@ public class DataNode extends ReconfigurableBase
|
||||||
return Integer.toString(ipcAddr.getPort());
|
return Integer.toString(ipcAddr.getPort());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override // DataNodeMXBean
|
||||||
|
public String getDataPort(){
|
||||||
|
InetSocketAddress dataAddr = NetUtils.createSocketAddr(
|
||||||
|
this.getConf().get(DFS_DATANODE_ADDRESS_KEY));
|
||||||
|
return Integer.toString(dataAddr.getPort());
|
||||||
|
}
|
||||||
|
|
||||||
@Override // DataNodeMXBean
|
@Override // DataNodeMXBean
|
||||||
public String getHttpPort(){
|
public String getHttpPort(){
|
||||||
return this.getConf().get("dfs.datanode.info.port");
|
return this.getConf().get("dfs.datanode.info.port");
|
||||||
|
@ -2870,6 +2877,25 @@ public class DataNode extends ReconfigurableBase
|
||||||
return JSON.toString(info);
|
return JSON.toString(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returned information is a JSON representation of an array,
|
||||||
|
* each element of the array is a map contains the information
|
||||||
|
* about a block pool service actor.
|
||||||
|
*/
|
||||||
|
@Override // DataNodeMXBean
|
||||||
|
public String getBPServiceActorInfo() {
|
||||||
|
final ArrayList<Map<String, String>> infoArray =
|
||||||
|
new ArrayList<Map<String, String>>();
|
||||||
|
for (BPOfferService bpos : blockPoolManager.getAllNamenodeThreads()) {
|
||||||
|
if (bpos != null) {
|
||||||
|
for (BPServiceActor actor : bpos.getBPServiceActors()) {
|
||||||
|
infoArray.add(actor.getActorInfoMap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return JSON.toString(infoArray);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returned information is a JSON representation of a map with
|
* Returned information is a JSON representation of a map with
|
||||||
* volume name as the key and value is a map of volume attribute
|
* volume name as the key and value is a map of volume attribute
|
||||||
|
|
|
@ -52,12 +52,26 @@ public interface DataNodeMXBean {
|
||||||
public String getHttpPort();
|
public String getHttpPort();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the namenode IP addresses
|
* Gets the data port.
|
||||||
|
*
|
||||||
|
* @return the data port
|
||||||
|
*/
|
||||||
|
String getDataPort();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the namenode IP addresses.
|
||||||
*
|
*
|
||||||
* @return the namenode IP addresses that the datanode is talking to
|
* @return the namenode IP addresses that the datanode is talking to
|
||||||
*/
|
*/
|
||||||
public String getNamenodeAddresses();
|
public String getNamenodeAddresses();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets information of the block pool service actors.
|
||||||
|
*
|
||||||
|
* @return block pool service actors info
|
||||||
|
*/
|
||||||
|
String getBPServiceActorInfo();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the information of each volume on the Datanode. Please
|
* Gets the information of each volume on the Datanode. Please
|
||||||
* see the implementation for the format of returned information.
|
* see the implementation for the format of returned information.
|
||||||
|
|
|
@ -0,0 +1,129 @@
|
||||||
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
||||||
|
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||||
|
<!--
|
||||||
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
this work for additional information regarding copyright ownership.
|
||||||
|
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
(the "License"); you may not use this file except in compliance with
|
||||||
|
the License. You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
-->
|
||||||
|
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||||
|
<head>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=9" />
|
||||||
|
<link rel="stylesheet" type="text/css" href="/static/bootstrap-3.0.2/css/bootstrap.min.css" />
|
||||||
|
<link rel="stylesheet" type="text/css" href="/static/hadoop.css" />
|
||||||
|
<title>DataNode Information</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
|
||||||
|
<header class="navbar navbar-inverse bs-docs-nav" role="banner">
|
||||||
|
<div class="container">
|
||||||
|
<div class="navbar-header">
|
||||||
|
<div class="navbar-brand">Hadoop</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<ul class="nav navbar-nav" id="ui-tabs">
|
||||||
|
<li><a href="#tab-overview">Overview</a></li>
|
||||||
|
<li class="dropdown">
|
||||||
|
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Utilities <b class="caret"></b></a>
|
||||||
|
<ul class="dropdown-menu">
|
||||||
|
<li><a href="logs">Logs</a></li>
|
||||||
|
</ul>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<div class="container">
|
||||||
|
|
||||||
|
<div id="alert-panel">
|
||||||
|
<div class="alert alert-danger">
|
||||||
|
<button type="button" class="close" onclick="$('#alert-panel').hide();">×</button>
|
||||||
|
<div class="alert-body" id="alert-panel-body"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="tab-content">
|
||||||
|
<div class="tab-pane" id="tab-overview"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="row">
|
||||||
|
<hr />
|
||||||
|
<div class="col-xs-2"><p>Hadoop, {release-year-token}.</p></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script type="text/x-dust-template" id="tmpl-dn">
|
||||||
|
{#dn}
|
||||||
|
<div class="page-header"><h1>DataNode on <small>{HostName}:{DataPort}</small></h1></div>
|
||||||
|
<table class="table table-bordered table-striped">
|
||||||
|
<tr><th>Cluster ID:</th><td>{ClusterId}</td></tr>
|
||||||
|
<tr><th>Version:</th><td>{Version}</td></tr>
|
||||||
|
</table>
|
||||||
|
{/dn}
|
||||||
|
|
||||||
|
<div class="page-header"><h1>Block Pools</h1></div>
|
||||||
|
<table class="table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Namenode Address</th>
|
||||||
|
<th>Block Pool ID</th>
|
||||||
|
<th>Actor State</th>
|
||||||
|
<th>Last Heartbeat</th>
|
||||||
|
<th>Last Block Report</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
{#dn.BPServiceActorInfo}
|
||||||
|
<tr>
|
||||||
|
<td>{NamenodeAddress}</td>
|
||||||
|
<td>{BlockPoolID}</td>
|
||||||
|
<td>{ActorState}</td>
|
||||||
|
<td>{LastHeartbeat}s</td>
|
||||||
|
<td>{#helper_relative_time value="{LastBlockReport}"/}</td>
|
||||||
|
</tr>
|
||||||
|
{/dn.BPServiceActorInfo}
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<div class="page-header"><h1>Volume Information</h1></div>
|
||||||
|
<table class="table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Directory</th>
|
||||||
|
<th>Capacity Used</th>
|
||||||
|
<th>Capacity Left</th>
|
||||||
|
<th>Capacity Reserved</th>
|
||||||
|
<th>Reserved Space for Replicas</th>
|
||||||
|
<th>Blocks</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
{#dn.VolumeInfo}
|
||||||
|
<tr>
|
||||||
|
<td>{name}</td>
|
||||||
|
<td>{usedSpace|fmt_bytes}</td>
|
||||||
|
<td>{freeSpace|fmt_bytes}</td>
|
||||||
|
<td>{reservedSpace|fmt_bytes}</td>
|
||||||
|
<td>{reservedSpaceForReplicas|fmt_bytes}</td>
|
||||||
|
<td>{numBlocks}</td>
|
||||||
|
</tr>
|
||||||
|
{/dn.VolumeInfo}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script type="text/javascript" src="/static/jquery-1.10.2.min.js"></script>
|
||||||
|
<script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js"></script>
|
||||||
|
<script type="text/javascript" src="/static/moment.min.js"></script>
|
||||||
|
<script type="text/javascript" src="/static/dust-full-2.0.0.min.js"></script>
|
||||||
|
<script type="text/javascript" src="/static/dust-helpers-1.1.1.min.js"></script>
|
||||||
|
<script type="text/javascript" src="/static/dfs-dust.js"></script>
|
||||||
|
<script type="text/javascript" src="dn.js"></script>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -0,0 +1,70 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
(function () {
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
var data = {};
|
||||||
|
|
||||||
|
dust.loadSource(dust.compile($('#tmpl-dn').html(), 'dn'));
|
||||||
|
|
||||||
|
function load() {
|
||||||
|
$.get('/jmx?qry=Hadoop:service=DataNode,name=DataNodeInfo', function(resp) {
|
||||||
|
data.dn = workaround(resp.beans[0]);
|
||||||
|
data.dn.HostName=window.location.hostname;
|
||||||
|
render();
|
||||||
|
}).fail(show_err_msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
function workaround(dn) {
|
||||||
|
function node_map_to_array(nodes) {
|
||||||
|
var res = [];
|
||||||
|
for (var n in nodes) {
|
||||||
|
var p = nodes[n];
|
||||||
|
p.name = n;
|
||||||
|
res.push(p);
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
dn.VolumeInfo = node_map_to_array(JSON.parse(dn.VolumeInfo));
|
||||||
|
dn.BPServiceActorInfo = JSON.parse(dn.BPServiceActorInfo);
|
||||||
|
|
||||||
|
return dn;
|
||||||
|
}
|
||||||
|
|
||||||
|
function render() {
|
||||||
|
var base = dust.makeBase({
|
||||||
|
'helper_relative_time' : function (chunk, ctx, bodies, params) {
|
||||||
|
var value = dust.helpers.tap(params.value, chunk, ctx);
|
||||||
|
return chunk.write(moment().subtract(Number(value), 'seconds').fromNow(true));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
dust.render('dn', base.push(data), function(err, out) {
|
||||||
|
$('#tab-overview').html(out);
|
||||||
|
$('#tab-overview').addClass('active');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function show_err_msg() {
|
||||||
|
$('#alert-panel-body').html("Failed to load datanode information");
|
||||||
|
$('#alert-panel').show();
|
||||||
|
}
|
||||||
|
|
||||||
|
load();
|
||||||
|
|
||||||
|
})();
|
|
@ -1,5 +1,3 @@
|
||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
|
||||||
<!--
|
<!--
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
contributor license agreements. See the NOTICE file distributed with
|
||||||
|
@ -16,47 +14,11 @@
|
||||||
See the License for the specific language governing permissions and
|
See the License for the specific language governing permissions and
|
||||||
limitations under the License.
|
limitations under the License.
|
||||||
-->
|
-->
|
||||||
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
||||||
|
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||||
<head>
|
<head>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=9" />
|
<meta http-equiv="REFRESH" content="0;url=datanode.html" />
|
||||||
<link rel="stylesheet" type="text/css" href="/static/bootstrap-3.0.2/css/bootstrap.min.css" />
|
<title>Hadoop Administration</title>
|
||||||
<link rel="stylesheet" type="text/css" href="/static/hadoop.css" />
|
|
||||||
<title>DataNode Information</title>
|
|
||||||
</head>
|
</head>
|
||||||
<body>
|
|
||||||
|
|
||||||
<header class="navbar navbar-inverse bs-docs-nav" role="banner">
|
|
||||||
<div class="container">
|
|
||||||
<div class="navbar-header">
|
|
||||||
<div class="navbar-brand">Hadoop</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<ul class="nav navbar-nav" id="ui-tabs">
|
|
||||||
<li><a>Overview</a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
|
|
||||||
<div class="container">
|
|
||||||
|
|
||||||
<div class="tab-content">
|
|
||||||
<div class="tab-pane" id="tab-overview">
|
|
||||||
<div class="page-header"><h1>DataNode on <small><div id="authority" style="display: inline-block"></div></small></h1></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="row">
|
|
||||||
<hr />
|
|
||||||
<div class="col-xs-2"><p>Hadoop, {release-year-token}.</p></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script type="text/javascript" src="/static/jquery-1.10.2.min.js">
|
|
||||||
</script><script type="text/javascript" src="/static/bootstrap-3.0.2/js/bootstrap.min.js">
|
|
||||||
</script>
|
|
||||||
<script type="text/javascript">
|
|
||||||
$('#authority').html(window.location.host);
|
|
||||||
$('#tab-overview').addClass('active');
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
</html>
|
|
@ -78,6 +78,10 @@ public class TestDataNodeMXBean {
|
||||||
int xceiverCount = (Integer)mbs.getAttribute(mxbeanName,
|
int xceiverCount = (Integer)mbs.getAttribute(mxbeanName,
|
||||||
"XceiverCount");
|
"XceiverCount");
|
||||||
Assert.assertEquals(datanode.getXceiverCount(), xceiverCount);
|
Assert.assertEquals(datanode.getXceiverCount(), xceiverCount);
|
||||||
|
|
||||||
|
String bpActorInfo = (String)mbs.getAttribute(mxbeanName,
|
||||||
|
"BPServiceActorInfo");
|
||||||
|
Assert.assertEquals(datanode.getBPServiceActorInfo(), bpActorInfo);
|
||||||
} finally {
|
} finally {
|
||||||
if (cluster != null) {cluster.shutdown();}
|
if (cluster != null) {cluster.shutdown();}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue