MAPREDUCE-2843. svn merge -c r1176730 --ignore-ancestry ../../trunk/

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1176731 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2011-09-28 05:37:30 +00:00
parent f67abbdbf6
commit c0bcd07330
3 changed files with 60 additions and 2 deletions

View File

@ -1432,6 +1432,9 @@ Release 0.23.0 - Unreleased
capabilities along-with new ApplicationId for application submission. capabilities along-with new ApplicationId for application submission.
(Hitesh Shah via acmurthy) (Hitesh Shah via acmurthy)
MAPREDUCE-2843. Fixed the node-table to be completely displayed and making
node entries on RM UI to be sortable. (Abhijit Suresh Shingate via vinodkv)
Release 0.22.0 - Unreleased Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -76,7 +76,7 @@ class NodesPage extends RmView {
// TODO: FIXME Vinodkv // TODO: FIXME Vinodkv
// td(String.valueOf(ni.getUsedResource().getMemory())). // td(String.valueOf(ni.getUsedResource().getMemory())).
// td(String.valueOf(ni.getAvailableResource().getMemory())). // td(String.valueOf(ni.getAvailableResource().getMemory())).
_(); td("n/a")._();
} }
tbody._()._(); tbody._()._();
} }
@ -100,7 +100,7 @@ class NodesPage extends RmView {
// rack, nodeid, host, healthStatus, health update ts, health report, // rack, nodeid, host, healthStatus, health update ts, health report,
// containers, memused, memavail // containers, memused, memavail
append(", aoColumns:[null, null, null, null, null, null, "). append(", aoColumns:[null, null, null, null, null, null, ").
append("{bSearchable:false},{bSearchable:false},{bSearchable:false}]}"). append("{sType:'title-numeric', bSearchable:false}]}").
toString(); toString();
} }
} }

View File

@ -0,0 +1,55 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import java.io.PrintWriter;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.NodesPage.NodesBlock;
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Test;
import org.mockito.Mockito;
/**
* This tests the NodesPage block table that it should contain the table body
* data for all the columns in the table as specified in the header.
*/
public class TestNodesPage {
@Test
public void testNodesBlockRender() throws Exception {
int numberOfRacks = 2;
int numberOfNodesPerRack = 2;
// Number of Actual Table Headers for NodesPage.NodesBlock might change in
// future. In that case this value should be adjusted to the new value.
int numberOfActualTableHeaders = 7;
PrintWriter writer = WebAppTests.testBlock(
NodesBlock.class,
RMContext.class,
TestRMWebApp.mockRMContext(3, numberOfRacks, numberOfNodesPerRack,
8 * TestRMWebApp.GiB)).getInstance(PrintWriter.class);
Mockito.verify(writer, Mockito.times(numberOfActualTableHeaders)).print(
"<th");
Mockito.verify(
writer,
Mockito.times(numberOfRacks * numberOfNodesPerRack
* numberOfActualTableHeaders)).print("<td");
}
}