diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index ecdda61890d..4a29b77abd9 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -78,6 +78,9 @@ Release 2.8.0 - UNRELEASED YARN-3318. Create Initial OrderingPolicy Framework and FifoOrderingPolicy. (Craig Welch via wangda) + YARN-3326. Support RESTful API for getLabelsToNodes. (Naganarasimha G R + via ozawa) + IMPROVEMENTS YARN-1880. Cleanup TestApplicationClientProtocolOnHA diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodeIDsInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodeIDsInfo.java new file mode 100644 index 00000000000..39d636d1613 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodeIDsInfo.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.resourcemanager.webapp; + +import java.util.ArrayList; +import java.util.List; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlRootElement; + +@XmlRootElement(name = "labelsToNodesInfo") +@XmlAccessorType(XmlAccessType.FIELD) +public class NodeIDsInfo { + + /** + * Set doesn't support default no arg constructor which is req by JAXB + */ + protected ArrayList nodeIDsList = new ArrayList(); + + public NodeIDsInfo() { + } // JAXB needs this + + public NodeIDsInfo(List nodeIdsList) { + this.nodeIDsList.addAll(nodeIdsList); + } + + public ArrayList getNodeIDs() { + return nodeIDsList; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java index 967fbf19763..fae0806d02f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java @@ -20,10 +20,11 @@ import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; -import java.security.AccessControlException; import java.nio.ByteBuffer; +import java.security.AccessControlException; import java.security.Principal; import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; @@ -31,6 +32,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentMap; @@ -64,25 +66,25 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; +import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest; +import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse; -import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; -import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse; -import org.apache.hadoop.security.token.SecretManager.InvalidToken; -import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; -import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse; -import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest; -import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse; +import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse; -import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest; +import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; +import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; @@ -116,12 +118,11 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppAttemptInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppAttemptsInfo; -import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NewApplication; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo; -import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppState; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppQueue; -import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ApplicationSubmissionContextInfo; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppState; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ApplicationStatisticsInfo; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ApplicationSubmissionContextInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterInfo; @@ -130,15 +131,17 @@ import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.DelegationToken; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FairSchedulerInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FifoSchedulerInfo; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.LabelsToNodesInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.LocalResourceInfo; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NewApplication; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeLabelsInfo; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeToLabelsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodesInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ResourceInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedulerInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedulerTypeInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.StatisticsItemInfo; -import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeLabelsInfo; -import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeToLabelsInfo; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.AdHocLogDumper; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -781,7 +784,35 @@ public NodeToLabelsInfo getNodeToLabels(@Context HttpServletRequest hsr) return ntl; } - + + @GET + @Path("/label-mappings") + @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + public LabelsToNodesInfo getLabelsToNodes( + @QueryParam("labels") Set labels) throws IOException { + init(); + + LabelsToNodesInfo lts = new LabelsToNodesInfo(); + Map ltsMap = lts.getLabelsToNodes(); + Map> labelsToNodeId = null; + if (labels == null || labels.size() == 0) { + labelsToNodeId = + rm.getRMContext().getNodeLabelManager().getLabelsToNodes(); + } else { + labelsToNodeId = + rm.getRMContext().getNodeLabelManager().getLabelsToNodes(labels); + } + + for (Entry> entry : labelsToNodeId.entrySet()) { + List nodeIdStrList = new ArrayList(); + for (NodeId nodeId : entry.getValue()) { + nodeIdStrList.add(nodeId.toString()); + } + ltsMap.put(entry.getKey(), new NodeIDsInfo(nodeIdStrList)); + } + return lts; + } + @POST @Path("/replace-node-to-labels") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/LabelsToNodesInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/LabelsToNodesInfo.java new file mode 100644 index 00000000000..625feddba79 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/LabelsToNodesInfo.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao; + +import java.util.HashMap; +import java.util.Map; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.yarn.server.resourcemanager.webapp.NodeIDsInfo; + +@XmlRootElement(name = "labelsToNodesInfo") +@XmlAccessorType(XmlAccessType.FIELD) +public class LabelsToNodesInfo { + + protected Map labelsToNodes = + new HashMap(); + + public LabelsToNodesInfo() { + } // JAXB needs this + + public Map getLabelsToNodes() { + return labelsToNodes; + } +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java index 0cc576cb12e..40c54a30a6a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesNodeLabels.java @@ -33,6 +33,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.LabelsToNodesInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeLabelsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeToLabelsInfo; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; @@ -51,6 +52,7 @@ import com.sun.jersey.api.json.JSONJAXBContext; import com.sun.jersey.api.json.JSONMarshaller; import com.sun.jersey.api.json.JSONUnmarshaller; +import com.sun.jersey.core.util.MultivaluedMapImpl; import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; import com.sun.jersey.test.framework.WebAppDescriptor; @@ -160,6 +162,59 @@ public void testNodeLabels() throws JSONException, Exception { .post(ClientResponse.class); LOG.info("posted node nodelabel"); + // Add labels to another node + response = + r.path("ws").path("v1").path("cluster") + .path("nodes").path("nid1:0") + .path("replace-labels") + .queryParam("user.name", userName) + .accept(MediaType.APPLICATION_JSON) + .entity("{\"nodeLabels\": [\"b\"]}", + MediaType.APPLICATION_JSON) + .post(ClientResponse.class); + LOG.info("posted node nodelabel"); + + // Add labels to another node + response = + r.path("ws").path("v1").path("cluster") + .path("nodes").path("nid2:0") + .path("replace-labels") + .queryParam("user.name", userName) + .accept(MediaType.APPLICATION_JSON) + .entity("{\"nodeLabels\": [\"b\"]}", + MediaType.APPLICATION_JSON) + .post(ClientResponse.class); + LOG.info("posted node nodelabel"); + + // Verify, using get-labels-to-Nodes + response = + r.path("ws").path("v1").path("cluster") + .path("label-mappings").queryParam("user.name", userName) + .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + LabelsToNodesInfo ltni = response.getEntity(LabelsToNodesInfo.class); + assertEquals(2, ltni.getLabelsToNodes().size()); + NodeIDsInfo nodes = ltni.getLabelsToNodes().get("b"); + assertTrue(nodes.getNodeIDs().contains("nid2:0")); + assertTrue(nodes.getNodeIDs().contains("nid1:0")); + nodes = ltni.getLabelsToNodes().get("a"); + assertTrue(nodes.getNodeIDs().contains("nid:0")); + + // Verify, using get-labels-to-Nodes for specifiedset of labels + MultivaluedMapImpl params = new MultivaluedMapImpl(); + params.add("labels", "a"); + response = + r.path("ws").path("v1").path("cluster") + .path("label-mappings").queryParam("user.name", userName) + .queryParams(params) + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + ltni = response.getEntity(LabelsToNodesInfo.class); + assertEquals(1, ltni.getLabelsToNodes().size()); + nodes = ltni.getLabelsToNodes().get("a"); + assertTrue(nodes.getNodeIDs().contains("nid:0")); + // Verify response = r.path("ws").path("v1").path("cluster")