diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index c0ebb1bf508..77d82e0fd80 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -218,6 +218,9 @@ Release 0.23.1 - Unreleased MAPREDUCE-3527. Fix minor API incompatibilities between 1.0 and 0.23. (tomwhite) + MAPREDUCE-3328. mapred queue -list output inconsistent and missing child + queues. (Ravi Prakash via mahadev) + Release 0.23.0 - 2011-11-01 INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java index a9382130735..f20fbf934af 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java @@ -450,9 +450,19 @@ public class TypeConverter { public static QueueInfo fromYarn(org.apache.hadoop.yarn.api.records.QueueInfo queueInfo, Configuration conf) { - return new QueueInfo(queueInfo.getQueueName(),queueInfo.toString(), - fromYarn(queueInfo.getQueueState()), TypeConverter.fromYarnApps( - queueInfo.getApplications(), conf)); + QueueInfo toReturn = new QueueInfo(queueInfo.getQueueName(), "Capacity: " + + queueInfo.getCapacity() * 100 + ", MaximumCapacity: " + + (queueInfo.getMaximumCapacity() < 0 ? "UNDEFINED" : + queueInfo.getMaximumCapacity()) + ", CurrentCapacity: " + + queueInfo.getCurrentCapacity() * 100, fromYarn(queueInfo.getQueueState()), + TypeConverter.fromYarnApps(queueInfo.getApplications(), conf)); + List childQueues = new ArrayList(); + for(org.apache.hadoop.yarn.api.records.QueueInfo childQueue : + queueInfo.getChildQueues()) { + childQueues.add(fromYarn(childQueue, conf)); + } + toReturn.setQueueChildren(childQueues); + return toReturn; } public static QueueInfo[] fromYarnQueueInfo( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java index 9bbd070768e..a9a2c0a9b92 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java @@ -17,6 +17,9 @@ */ package org.apache.hadoop.mapreduce; +import java.util.ArrayList; +import java.util.List; + import junit.framework.Assert; import org.apache.hadoop.conf.Configuration; @@ -36,6 +39,7 @@ import org.apache.hadoop.yarn.api.records.QueueState; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import org.junit.Test; +import org.mockito.Mockito; public class TestTypeConverter { @Test @@ -134,4 +138,33 @@ public class TestTypeConverter { Assert.assertEquals("queueInfo translation didn't work.", returned.getState().toString(), queueInfo.getQueueState().toString().toLowerCase()); } + + /** + * Test that child queues are converted too during conversion of the parent + * queue + */ + @Test + public void testFromYarnQueue() { + //Define child queue + org.apache.hadoop.yarn.api.records.QueueInfo child = + Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); + Mockito.when(child.getQueueState()).thenReturn(QueueState.RUNNING); + + //Define parent queue + org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = + Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); + List children = + new ArrayList(); + children.add(child); //Add one child + Mockito.when(queueInfo.getChildQueues()).thenReturn(children); + Mockito.when(queueInfo.getQueueState()).thenReturn(QueueState.RUNNING); + + //Call the function we're testing + org.apache.hadoop.mapreduce.QueueInfo returned = + TypeConverter.fromYarn(queueInfo, new Configuration()); + + //Verify that the converted queue has the 1 child we had added + Assert.assertEquals("QueueInfo children weren't properly converted", + returned.getQueueChildren().size(), 1); + } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java index 47ebb9284a2..c7834adb7e5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java @@ -110,40 +110,33 @@ class JobQueueClient extends Configured implements Tool { } // format and print information about the passed in job queue. - void printJobQueueInfo(JobQueueInfo jobQueueInfo, Writer writer) - throws IOException { + void printJobQueueInfo(JobQueueInfo jobQueueInfo, Writer writer, + String prefix) throws IOException { if (jobQueueInfo == null) { writer.write("No queue found.\n"); writer.flush(); return; } - writer.write(String.format("Queue Name : %s \n", + writer.write(String.format(prefix + "======================\n")); + writer.write(String.format(prefix + "Queue Name : %s \n", jobQueueInfo.getQueueName())); - writer.write(String.format("Queue State : %s \n", + writer.write(String.format(prefix + "Queue State : %s \n", jobQueueInfo.getQueueState())); - writer.write(String.format("Scheduling Info : %s \n", + writer.write(String.format(prefix + "Scheduling Info : %s \n", jobQueueInfo.getSchedulingInfo())); List childQueues = jobQueueInfo.getChildren(); if (childQueues != null && childQueues.size() > 0) { - writer.write(String.format("Child Queues : ")); for (int i = 0; i < childQueues.size(); i++) { - JobQueueInfo childQueue = childQueues.get(i); - writer.write(String.format("%s", childQueue.getQueueName())); - if (i != childQueues.size() - 1) { - writer.write(String.format(", ")); - } + printJobQueueInfo(childQueues.get(i), writer, " " + prefix); } - writer.write("\n"); } - writer.write(String.format("======================\n")); writer.flush(); } private void displayQueueList() throws IOException { JobQueueInfo[] rootQueues = jc.getRootQueues(); - List allQueues = expandQueueList(rootQueues); - for (JobQueueInfo queue : allQueues) { - printJobQueueInfo(queue, new PrintWriter(System.out)); + for (JobQueueInfo queue : rootQueues) { + printJobQueueInfo(queue, new PrintWriter(System.out), ""); } } @@ -181,7 +174,7 @@ class JobQueueClient extends Configured implements Tool { System.out.println("Queue \"" + queue + "\" does not exist."); return; } - printJobQueueInfo(jobQueueInfo, new PrintWriter(System.out)); + printJobQueueInfo(jobQueueInfo, new PrintWriter(System.out), ""); if (showJobs && (jobQueueInfo.getChildren() == null || jobQueueInfo.getChildren().size() == 0)) { JobStatus[] jobs = jc.getJobsFromQueue(queue); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java index 41e6d09ebdd..5c9f9f0f56f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java @@ -105,7 +105,7 @@ public class JobQueueInfo extends QueueInfo { public List getChildren() { List list = new ArrayList(); for (QueueInfo q : super.getQueueChildren()) { - list.add((JobQueueInfo)q); + list.add(new JobQueueInfo(q)); } return list; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobQueueClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobQueueClient.java new file mode 100644 index 00000000000..636b12a89dc --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobQueueClient.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.hadoop.mapred; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintWriter; + +import junit.framework.Assert; + +import org.junit.Test; + + +public class TestJobQueueClient { + /** + * Test that print job queue recursively prints child queues + */ + @Test + @SuppressWarnings("deprecation") + public void testPrintJobQueueInfo() throws IOException { + JobQueueClient queueClient = new JobQueueClient(); + JobQueueInfo parent = new JobQueueInfo(); + JobQueueInfo child = new JobQueueInfo(); + JobQueueInfo grandChild = new JobQueueInfo(); + child.addChild(grandChild); + parent.addChild(child); + grandChild.setQueueName("GrandChildQueue"); + + ByteArrayOutputStream bbos = new ByteArrayOutputStream(); + PrintWriter writer = new PrintWriter(bbos); + queueClient.printJobQueueInfo(parent, writer, ""); + + Assert.assertTrue("printJobQueueInfo did not print grandchild's name", + bbos.toString().contains("GrandChildQueue")); + } + +} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java index 1645ae88ebc..09bae2d5ecd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java @@ -198,13 +198,16 @@ public class ResourceMgrDelegate { } private void getChildQueues(org.apache.hadoop.yarn.api.records.QueueInfo parent, - List queues) { + List queues, + boolean recursive) { List childQueues = parent.getChildQueues(); for (org.apache.hadoop.yarn.api.records.QueueInfo child : childQueues) { queues.add(child); - getChildQueues(child, queues); + if(recursive) { + getChildQueues(child, queues, recursive); + } } } @@ -226,7 +229,7 @@ public class ResourceMgrDelegate { org.apache.hadoop.yarn.api.records.QueueInfo rootQueue = applicationsManager.getQueueInfo( getQueueInfoRequest(ROOT, false, true, true)).getQueueInfo(); - getChildQueues(rootQueue, queues); + getChildQueues(rootQueue, queues, true); return TypeConverter.fromYarnQueueInfo(queues, this.conf); } @@ -238,8 +241,8 @@ public class ResourceMgrDelegate { org.apache.hadoop.yarn.api.records.QueueInfo rootQueue = applicationsManager.getQueueInfo( - getQueueInfoRequest(ROOT, false, true, false)).getQueueInfo(); - getChildQueues(rootQueue, queues); + getQueueInfoRequest(ROOT, false, true, true)).getQueueInfo(); + getChildQueues(rootQueue, queues, false); return TypeConverter.fromYarnQueueInfo(queues, this.conf); } @@ -252,7 +255,7 @@ public class ResourceMgrDelegate { org.apache.hadoop.yarn.api.records.QueueInfo parentQueue = applicationsManager.getQueueInfo( getQueueInfoRequest(parent, false, true, false)).getQueueInfo(); - getChildQueues(parentQueue, queues); + getChildQueues(parentQueue, queues, true); return TypeConverter.fromYarnQueueInfo(queues, this.conf); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java new file mode 100644 index 00000000000..5334f29c9b0 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java @@ -0,0 +1,63 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.mapred; + +import java.io.IOException; + +import junit.framework.Assert; + +import org.apache.hadoop.yarn.api.ClientRMProtocol; +import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; + +public class TestResourceMgrDelegate { + + /** + * Tests that getRootQueues makes a request for the (recursive) child queues + */ +@Test + public void testGetRootQueues() throws IOException, InterruptedException { + ClientRMProtocol applicationsManager = Mockito.mock(ClientRMProtocol.class); + GetQueueInfoResponse response = Mockito.mock(GetQueueInfoResponse.class); + org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = + Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); + Mockito.when(response.getQueueInfo()).thenReturn(queueInfo); + Mockito.when(applicationsManager.getQueueInfo(Mockito.any( + GetQueueInfoRequest.class))).thenReturn(response); + + ResourceMgrDelegate delegate = new ResourceMgrDelegate( + new YarnConfiguration(), applicationsManager); + delegate.getRootQueues(); + + ArgumentCaptor argument = + ArgumentCaptor.forClass(GetQueueInfoRequest.class); + Mockito.verify(delegate.applicationsManager).getQueueInfo( + argument.capture()); + + Assert.assertTrue("Children of root queue not requested", + argument.getValue().getIncludeChildQueues()); + Assert.assertTrue("Request wasn't to recurse through children", + argument.getValue().getRecursive()); + } + +}