YARN-3413. Changed Nodelabel attributes (like exclusivity) to be settable only via addToClusterNodeLabels but not changeable at runtime. (Wangda Tan via vinodkv)
(cherry picked from commit f5fe35e297
)
This commit is contained in:
parent
48d8d88245
commit
0ec6e7e7f5
|
@ -58,6 +58,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||||
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
|
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
|
||||||
|
@ -461,7 +462,7 @@ public class ResourceMgrDelegate extends YarnClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<String> getClusterNodeLabels()
|
public List<NodeLabel> getClusterNodeLabels()
|
||||||
throws YarnException, IOException {
|
throws YarnException, IOException {
|
||||||
return client.getClusterNodeLabels();
|
return client.getClusterNodeLabels();
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,16 +18,17 @@
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||||
|
|
||||||
import java.util.Set;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||||
import org.apache.hadoop.classification.InterfaceStability.Evolving;
|
import org.apache.hadoop.classification.InterfaceStability.Evolving;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
import org.apache.hadoop.yarn.util.Records;
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Evolving
|
@Evolving
|
||||||
public abstract class GetClusterNodeLabelsResponse {
|
public abstract class GetClusterNodeLabelsResponse {
|
||||||
public static GetClusterNodeLabelsResponse newInstance(Set<String> labels) {
|
public static GetClusterNodeLabelsResponse newInstance(List<NodeLabel> labels) {
|
||||||
GetClusterNodeLabelsResponse request =
|
GetClusterNodeLabelsResponse request =
|
||||||
Records.newRecord(GetClusterNodeLabelsResponse.class);
|
Records.newRecord(GetClusterNodeLabelsResponse.class);
|
||||||
request.setNodeLabels(labels);
|
request.setNodeLabels(labels);
|
||||||
|
@ -36,9 +37,9 @@ public abstract class GetClusterNodeLabelsResponse {
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Evolving
|
@Evolving
|
||||||
public abstract void setNodeLabels(Set<String> labels);
|
public abstract void setNodeLabels(List<NodeLabel> labels);
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Evolving
|
@Evolving
|
||||||
public abstract Set<String> getNodeLabels();
|
public abstract List<NodeLabel> getNodeLabels();
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.api.records;
|
package org.apache.hadoop.yarn.api.records;
|
||||||
|
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||||
import org.apache.hadoop.classification.InterfaceStability.Stable;
|
import org.apache.hadoop.classification.InterfaceStability.Stable;
|
||||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||||
|
@ -25,31 +26,73 @@ import org.apache.hadoop.yarn.util.Records;
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Unstable
|
@Unstable
|
||||||
public abstract class NodeLabel {
|
public abstract class NodeLabel implements Comparable<NodeLabel> {
|
||||||
@Public
|
/**
|
||||||
|
* By default, node label is exclusive or not
|
||||||
|
*/
|
||||||
|
@Private
|
||||||
@Unstable
|
@Unstable
|
||||||
public static NodeLabel newInstance(String nodeLabel,
|
public static final boolean DEFAULT_NODE_LABEL_EXCLUSIVITY = true;
|
||||||
boolean isExclusive) {
|
|
||||||
NodeLabel request =
|
@Private
|
||||||
Records.newRecord(NodeLabel.class);
|
@Unstable
|
||||||
request.setNodeLabel(nodeLabel);
|
public static NodeLabel newInstance(String name) {
|
||||||
request.setIsExclusive(isExclusive);
|
return newInstance(name, DEFAULT_NODE_LABEL_EXCLUSIVITY);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Private
|
||||||
|
@Unstable
|
||||||
|
public static NodeLabel newInstance(String name, boolean isExclusive) {
|
||||||
|
NodeLabel request = Records.newRecord(NodeLabel.class);
|
||||||
|
request.setName(name);
|
||||||
|
request.setExclusivity(isExclusive);
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Stable
|
@Stable
|
||||||
public abstract String getNodeLabel();
|
public abstract String getName();
|
||||||
|
|
||||||
@Public
|
@Private
|
||||||
@Unstable
|
@Unstable
|
||||||
public abstract void setNodeLabel(String nodeLabel);
|
public abstract void setName(String name);
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Stable
|
@Stable
|
||||||
public abstract boolean getIsExclusive();
|
public abstract boolean isExclusive();
|
||||||
|
|
||||||
@Public
|
@Private
|
||||||
@Unstable
|
@Unstable
|
||||||
public abstract void setIsExclusive(boolean isExclusive);
|
public abstract void setExclusivity(boolean isExclusive);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compareTo(NodeLabel other) {
|
||||||
|
return getName().compareTo(other.getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj instanceof NodeLabel) {
|
||||||
|
NodeLabel nl = (NodeLabel) obj;
|
||||||
|
return nl.getName().equals(getName())
|
||||||
|
&& nl.isExclusive() == isExclusive();
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append("<");
|
||||||
|
sb.append(getName());
|
||||||
|
sb.append(":exclusivity=");
|
||||||
|
sb.append(isExclusive());
|
||||||
|
sb.append(">");
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return (getName().hashCode() << 16) + (isExclusive() ? 1 : 0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,8 +50,6 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLa
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsRequest;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsResponse;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
|
||||||
|
|
||||||
|
@ -139,12 +137,6 @@ public interface ResourceManagerAdministrationProtocol extends GetUserMappingsPr
|
||||||
public ReplaceLabelsOnNodeResponse replaceLabelsOnNode(
|
public ReplaceLabelsOnNodeResponse replaceLabelsOnNode(
|
||||||
ReplaceLabelsOnNodeRequest request) throws YarnException, IOException;
|
ReplaceLabelsOnNodeRequest request) throws YarnException, IOException;
|
||||||
|
|
||||||
@Public
|
|
||||||
@Evolving
|
|
||||||
@Idempotent
|
|
||||||
public UpdateNodeLabelsResponse updateNodeLabels(
|
|
||||||
UpdateNodeLabelsRequest request) throws YarnException, IOException;
|
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Evolving
|
@Evolving
|
||||||
@Idempotent
|
@Idempotent
|
||||||
|
|
|
@ -18,27 +18,31 @@
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.api.protocolrecords;
|
package org.apache.hadoop.yarn.server.api.protocolrecords;
|
||||||
|
|
||||||
import java.util.Set;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||||
import org.apache.hadoop.classification.InterfaceStability.Evolving;
|
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
import org.apache.hadoop.yarn.util.Records;
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Evolving
|
@Unstable
|
||||||
public abstract class AddToClusterNodeLabelsRequest {
|
public abstract class AddToClusterNodeLabelsRequest {
|
||||||
public static AddToClusterNodeLabelsRequest newInstance(Set<String> labels) {
|
@Public
|
||||||
|
@Unstable
|
||||||
|
public static AddToClusterNodeLabelsRequest newInstance(
|
||||||
|
List<NodeLabel> NodeLabels) {
|
||||||
AddToClusterNodeLabelsRequest request =
|
AddToClusterNodeLabelsRequest request =
|
||||||
Records.newRecord(AddToClusterNodeLabelsRequest.class);
|
Records.newRecord(AddToClusterNodeLabelsRequest.class);
|
||||||
request.setNodeLabels(labels);
|
request.setNodeLabels(NodeLabels);
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Evolving
|
@Unstable
|
||||||
public abstract void setNodeLabels(Set<String> labels);
|
public abstract void setNodeLabels(List<NodeLabel> NodeLabels);
|
||||||
|
|
||||||
@Public
|
@Public
|
||||||
@Evolving
|
@Unstable
|
||||||
public abstract Set<String> getNodeLabels();
|
public abstract List<NodeLabel> getNodeLabels();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.api.protocolrecords;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
|
||||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
|
||||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
|
||||||
|
|
||||||
@Public
|
|
||||||
@Unstable
|
|
||||||
public abstract class UpdateNodeLabelsRequest {
|
|
||||||
@Public
|
|
||||||
@Unstable
|
|
||||||
public static UpdateNodeLabelsRequest newInstance(
|
|
||||||
List<NodeLabel> NodeLabels) {
|
|
||||||
UpdateNodeLabelsRequest request =
|
|
||||||
Records.newRecord(UpdateNodeLabelsRequest.class);
|
|
||||||
request.setNodeLabels(NodeLabels);
|
|
||||||
return request;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Public
|
|
||||||
@Unstable
|
|
||||||
public abstract void setNodeLabels(
|
|
||||||
List<NodeLabel> NodeLabels);
|
|
||||||
|
|
||||||
@Public
|
|
||||||
@Unstable
|
|
||||||
public abstract List<NodeLabel> getNodeLabels();
|
|
||||||
}
|
|
|
@ -1,37 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.api.protocolrecords;
|
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
|
||||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
|
||||||
|
|
||||||
@Public
|
|
||||||
@Unstable
|
|
||||||
public class UpdateNodeLabelsResponse {
|
|
||||||
|
|
||||||
@Private
|
|
||||||
@Unstable
|
|
||||||
public static UpdateNodeLabelsResponse newInstance() {
|
|
||||||
UpdateNodeLabelsResponse response =
|
|
||||||
Records.newRecord(UpdateNodeLabelsResponse.class);
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -42,6 +42,5 @@ service ResourceManagerAdministrationProtocolService {
|
||||||
rpc addToClusterNodeLabels(AddToClusterNodeLabelsRequestProto) returns (AddToClusterNodeLabelsResponseProto);
|
rpc addToClusterNodeLabels(AddToClusterNodeLabelsRequestProto) returns (AddToClusterNodeLabelsResponseProto);
|
||||||
rpc removeFromClusterNodeLabels(RemoveFromClusterNodeLabelsRequestProto) returns (RemoveFromClusterNodeLabelsResponseProto);
|
rpc removeFromClusterNodeLabels(RemoveFromClusterNodeLabelsRequestProto) returns (RemoveFromClusterNodeLabelsResponseProto);
|
||||||
rpc replaceLabelsOnNodes(ReplaceLabelsOnNodeRequestProto) returns (ReplaceLabelsOnNodeResponseProto);
|
rpc replaceLabelsOnNodes(ReplaceLabelsOnNodeRequestProto) returns (ReplaceLabelsOnNodeResponseProto);
|
||||||
rpc updateNodeLabels(UpdateNodeLabelsRequestProto) returns (UpdateNodeLabelsResponseProto);
|
|
||||||
rpc checkForDecommissioningNodes(CheckForDecommissioningNodesRequestProto) returns (CheckForDecommissioningNodesResponseProto);
|
rpc checkForDecommissioningNodes(CheckForDecommissioningNodesRequestProto) returns (CheckForDecommissioningNodesResponseProto);
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,7 +77,7 @@ message UpdateNodeResourceResponseProto {
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddToClusterNodeLabelsRequestProto {
|
message AddToClusterNodeLabelsRequestProto {
|
||||||
repeated string nodeLabels = 1;
|
repeated NodeLabelProto nodeLabels = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message AddToClusterNodeLabelsResponseProto {
|
message AddToClusterNodeLabelsResponseProto {
|
||||||
|
@ -98,11 +98,6 @@ message ReplaceLabelsOnNodeResponseProto {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
message UpdateNodeLabelsRequestProto {
|
|
||||||
repeated NodeLabelProto nodeLabels = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
message UpdateNodeLabelsResponseProto {
|
message UpdateNodeLabelsResponseProto {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -262,7 +262,7 @@ message LabelsToNodeIdsProto {
|
||||||
}
|
}
|
||||||
|
|
||||||
message NodeLabelProto {
|
message NodeLabelProto {
|
||||||
optional string nodeLabel = 1;
|
optional string name = 1;
|
||||||
optional bool isExclusive = 2 [default = true];
|
optional bool isExclusive = 2 [default = true];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -213,7 +213,7 @@ message GetClusterNodeLabelsRequestProto {
|
||||||
}
|
}
|
||||||
|
|
||||||
message GetClusterNodeLabelsResponseProto {
|
message GetClusterNodeLabelsResponseProto {
|
||||||
repeated string nodeLabels = 1;
|
repeated NodeLabelProto nodeLabels = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
//////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class TestDistributedShellWithNodeLabels {
|
||||||
RMNodeLabelsManager labelsMgr = rmContext.getNodeLabelManager();
|
RMNodeLabelsManager labelsMgr = rmContext.getNodeLabelManager();
|
||||||
Set<String> labels = new HashSet<String>();
|
Set<String> labels = new HashSet<String>();
|
||||||
labels.add("x");
|
labels.add("x");
|
||||||
labelsMgr.addToCluserNodeLabels(labels);
|
labelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(labels);
|
||||||
|
|
||||||
// Setup queue access to node labels
|
// Setup queue access to node labels
|
||||||
distShellTest.conf.set("yarn.scheduler.capacity.root.accessible-node-labels", "x");
|
distShellTest.conf.set("yarn.scheduler.capacity.root.accessible-node-labels", "x");
|
||||||
|
|
|
@ -46,6 +46,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||||
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||||
|
@ -663,6 +664,6 @@ public abstract class YarnClient extends AbstractService {
|
||||||
*/
|
*/
|
||||||
@Public
|
@Public
|
||||||
@Unstable
|
@Unstable
|
||||||
public abstract Set<String> getClusterNodeLabels()
|
public abstract List<NodeLabel> getClusterNodeLabels()
|
||||||
throws YarnException, IOException;
|
throws YarnException, IOException;
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,6 +86,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||||
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||||
|
@ -815,7 +816,7 @@ public class YarnClientImpl extends YarnClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<String> getClusterNodeLabels() throws YarnException, IOException {
|
public List<NodeLabel> getClusterNodeLabels() throws YarnException, IOException {
|
||||||
return rmClient.getClusterNodeLabels(
|
return rmClient.getClusterNodeLabels(
|
||||||
GetClusterNodeLabelsRequest.newInstance()).getNodeLabels();
|
GetClusterNodeLabelsRequest.newInstance()).getNodeLabels();
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,9 +25,7 @@ import java.io.PrintWriter;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.nio.charset.Charset;
|
import java.nio.charset.Charset;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.GnuParser;
|
import org.apache.commons.cli.GnuParser;
|
||||||
|
@ -38,6 +36,7 @@ import org.apache.commons.lang.StringUtils;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||||
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
|
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
|
||||||
|
@ -113,23 +112,16 @@ public class ClusterCLI extends YarnCLI {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> sortStrSet(Set<String> labels) {
|
|
||||||
List<String> list = new ArrayList<String>();
|
|
||||||
list.addAll(labels);
|
|
||||||
Collections.sort(list);
|
|
||||||
return list;
|
|
||||||
}
|
|
||||||
|
|
||||||
void printClusterNodeLabels() throws YarnException, IOException {
|
void printClusterNodeLabels() throws YarnException, IOException {
|
||||||
Set<String> nodeLabels = null;
|
List<NodeLabel> nodeLabels = null;
|
||||||
if (accessLocal) {
|
if (accessLocal) {
|
||||||
nodeLabels =
|
nodeLabels =
|
||||||
getNodeLabelManagerInstance(getConf()).getClusterNodeLabels();
|
new ArrayList<>(getNodeLabelManagerInstance(getConf()).getClusterNodeLabels());
|
||||||
} else {
|
} else {
|
||||||
nodeLabels = client.getClusterNodeLabels();
|
nodeLabels = new ArrayList<>(client.getClusterNodeLabels());
|
||||||
}
|
}
|
||||||
sysout.println(String.format("Node Labels: %s",
|
sysout.println(String.format("Node Labels: %s",
|
||||||
StringUtils.join(sortStrSet(nodeLabels).iterator(), ",")));
|
StringUtils.join(nodeLabels.iterator(), ",")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
|
|
@ -40,6 +40,7 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.hadoop.yarn.api.records.DecommissionType;
|
import org.apache.hadoop.yarn.api.records.DecommissionType;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.client.ClientRMProxy;
|
import org.apache.hadoop.yarn.client.ClientRMProxy;
|
||||||
import org.apache.hadoop.yarn.client.RMHAServiceTarget;
|
import org.apache.hadoop.yarn.client.RMHAServiceTarget;
|
||||||
import org.apache.hadoop.yarn.conf.HAUtil;
|
import org.apache.hadoop.yarn.conf.HAUtil;
|
||||||
|
@ -65,6 +66,7 @@ import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import com.google.common.collect.ImmutableSet;
|
||||||
|
|
||||||
@Private
|
@Private
|
||||||
@Unstable
|
@Unstable
|
||||||
|
@ -80,6 +82,9 @@ public class RMAdminCLI extends HAAdmin {
|
||||||
"No node-to-labels mappings are specified";
|
"No node-to-labels mappings are specified";
|
||||||
private static final String INVALID_TIMEOUT_ERR_MSG =
|
private static final String INVALID_TIMEOUT_ERR_MSG =
|
||||||
"Invalid timeout specified : ";
|
"Invalid timeout specified : ";
|
||||||
|
private static final String ADD_LABEL_FORMAT_ERR_MSG =
|
||||||
|
"Input format for adding node-labels is not correct, it should be "
|
||||||
|
+ "labelName1[(exclusive=true/false)],LabelName2[] ..";
|
||||||
|
|
||||||
protected final static Map<String, UsageInfo> ADMIN_USAGE =
|
protected final static Map<String, UsageInfo> ADMIN_USAGE =
|
||||||
ImmutableMap.<String, UsageInfo>builder()
|
ImmutableMap.<String, UsageInfo>builder()
|
||||||
|
@ -104,7 +109,8 @@ public class RMAdminCLI extends HAAdmin {
|
||||||
.put("-getGroups", new UsageInfo("[username]",
|
.put("-getGroups", new UsageInfo("[username]",
|
||||||
"Get the groups which given user belongs to."))
|
"Get the groups which given user belongs to."))
|
||||||
.put("-addToClusterNodeLabels",
|
.put("-addToClusterNodeLabels",
|
||||||
new UsageInfo("[label1,label2,label3] (label splitted by \",\")",
|
new UsageInfo("[label1(exclusive=true),"
|
||||||
|
+ "label2(exclusive=false),label3]",
|
||||||
"add to cluster node labels "))
|
"add to cluster node labels "))
|
||||||
.put("-removeFromClusterNodeLabels",
|
.put("-removeFromClusterNodeLabels",
|
||||||
new UsageInfo("[label1,label2,label3] (label splitted by \",\")",
|
new UsageInfo("[label1,label2,label3] (label splitted by \",\")",
|
||||||
|
@ -407,7 +413,66 @@ public class RMAdminCLI extends HAAdmin {
|
||||||
return localNodeLabelsManager;
|
return localNodeLabelsManager;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Set<String> buildNodeLabelsSetFromStr(String args) {
|
private List<NodeLabel> buildNodeLabelsFromStr(String args) {
|
||||||
|
List<NodeLabel> nodeLabels = new ArrayList<>();
|
||||||
|
for (String p : args.split(",")) {
|
||||||
|
if (!p.trim().isEmpty()) {
|
||||||
|
String labelName = p;
|
||||||
|
|
||||||
|
// Try to parse exclusive
|
||||||
|
boolean exclusive = NodeLabel.DEFAULT_NODE_LABEL_EXCLUSIVITY;
|
||||||
|
int leftParenthesisIdx = p.indexOf("(");
|
||||||
|
int rightParenthesisIdx = p.indexOf(")");
|
||||||
|
|
||||||
|
if ((leftParenthesisIdx == -1 && rightParenthesisIdx != -1)
|
||||||
|
|| (leftParenthesisIdx != -1 && rightParenthesisIdx == -1)) {
|
||||||
|
// Parenthese not match
|
||||||
|
throw new IllegalArgumentException(ADD_LABEL_FORMAT_ERR_MSG);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (leftParenthesisIdx > 0 && rightParenthesisIdx > 0) {
|
||||||
|
if (leftParenthesisIdx > rightParenthesisIdx) {
|
||||||
|
// Parentese not match
|
||||||
|
throw new IllegalArgumentException(ADD_LABEL_FORMAT_ERR_MSG);
|
||||||
|
}
|
||||||
|
|
||||||
|
String property = p.substring(p.indexOf("(") + 1, p.indexOf(")"));
|
||||||
|
if (property.contains("=")) {
|
||||||
|
String key = property.substring(0, property.indexOf("=")).trim();
|
||||||
|
String value =
|
||||||
|
property
|
||||||
|
.substring(property.indexOf("=") + 1, property.length())
|
||||||
|
.trim();
|
||||||
|
|
||||||
|
// Now we only support one property, which is exclusive, so check if
|
||||||
|
// key = exclusive and value = {true/false}
|
||||||
|
if (key.equals("exclusive")
|
||||||
|
&& ImmutableSet.of("true", "false").contains(value)) {
|
||||||
|
exclusive = Boolean.parseBoolean(value);
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException(ADD_LABEL_FORMAT_ERR_MSG);
|
||||||
|
}
|
||||||
|
} else if (!property.trim().isEmpty()) {
|
||||||
|
throw new IllegalArgumentException(ADD_LABEL_FORMAT_ERR_MSG);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to get labelName if there's "(..)"
|
||||||
|
if (labelName.contains("(")) {
|
||||||
|
labelName = labelName.substring(0, labelName.indexOf("(")).trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
nodeLabels.add(NodeLabel.newInstance(labelName, exclusive));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nodeLabels.isEmpty()) {
|
||||||
|
throw new IllegalArgumentException(NO_LABEL_ERR_MSG);
|
||||||
|
}
|
||||||
|
return nodeLabels;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Set<String> buildNodeLabelNamesFromStr(String args) {
|
||||||
Set<String> labels = new HashSet<String>();
|
Set<String> labels = new HashSet<String>();
|
||||||
for (String p : args.split(",")) {
|
for (String p : args.split(",")) {
|
||||||
if (!p.trim().isEmpty()) {
|
if (!p.trim().isEmpty()) {
|
||||||
|
@ -423,7 +488,7 @@ public class RMAdminCLI extends HAAdmin {
|
||||||
|
|
||||||
private int addToClusterNodeLabels(String args) throws IOException,
|
private int addToClusterNodeLabels(String args) throws IOException,
|
||||||
YarnException {
|
YarnException {
|
||||||
Set<String> labels = buildNodeLabelsSetFromStr(args);
|
List<NodeLabel> labels = buildNodeLabelsFromStr(args);
|
||||||
|
|
||||||
if (directlyAccessNodeLabelStore) {
|
if (directlyAccessNodeLabelStore) {
|
||||||
getNodeLabelManagerInstance(getConf()).addToCluserNodeLabels(labels);
|
getNodeLabelManagerInstance(getConf()).addToCluserNodeLabels(labels);
|
||||||
|
@ -439,7 +504,7 @@ public class RMAdminCLI extends HAAdmin {
|
||||||
|
|
||||||
private int removeFromClusterNodeLabels(String args) throws IOException,
|
private int removeFromClusterNodeLabels(String args) throws IOException,
|
||||||
YarnException {
|
YarnException {
|
||||||
Set<String> labels = buildNodeLabelsSetFromStr(args);
|
Set<String> labels = buildNodeLabelNamesFromStr(args);
|
||||||
|
|
||||||
if (directlyAccessNodeLabelStore) {
|
if (directlyAccessNodeLabelStore) {
|
||||||
getNodeLabelManagerInstance(getConf()).removeFromClusterNodeLabels(
|
getNodeLabelManagerInstance(getConf()).removeFromClusterNodeLabels(
|
||||||
|
|
|
@ -27,8 +27,10 @@ import static org.mockito.Mockito.when;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
import java.util.HashSet;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.client.api.YarnClient;
|
import org.apache.hadoop.yarn.client.api.YarnClient;
|
||||||
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
|
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -55,7 +57,8 @@ public class TestClusterCLI {
|
||||||
public void testGetClusterNodeLabels() throws Exception {
|
public void testGetClusterNodeLabels() throws Exception {
|
||||||
YarnClient client = mock(YarnClient.class);
|
YarnClient client = mock(YarnClient.class);
|
||||||
when(client.getClusterNodeLabels()).thenReturn(
|
when(client.getClusterNodeLabels()).thenReturn(
|
||||||
ImmutableSet.of("label1", "label2"));
|
Arrays.asList(NodeLabel.newInstance("label1"),
|
||||||
|
NodeLabel.newInstance("label2")));
|
||||||
ClusterCLI cli = new ClusterCLI();
|
ClusterCLI cli = new ClusterCLI();
|
||||||
cli.setClient(client);
|
cli.setClient(client);
|
||||||
cli.setSysOutPrintStream(sysOut);
|
cli.setSysOutPrintStream(sysOut);
|
||||||
|
@ -67,7 +70,7 @@ public class TestClusterCLI {
|
||||||
|
|
||||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||||
PrintWriter pw = new PrintWriter(baos);
|
PrintWriter pw = new PrintWriter(baos);
|
||||||
pw.print("Node Labels: label1,label2");
|
pw.print("Node Labels: <label1:exclusivity=true>,<label2:exclusivity=true>");
|
||||||
pw.close();
|
pw.close();
|
||||||
verify(sysOut).println(baos.toString("UTF-8"));
|
verify(sysOut).println(baos.toString("UTF-8"));
|
||||||
}
|
}
|
||||||
|
@ -76,14 +79,16 @@ public class TestClusterCLI {
|
||||||
public void testGetClusterNodeLabelsWithLocalAccess() throws Exception {
|
public void testGetClusterNodeLabelsWithLocalAccess() throws Exception {
|
||||||
YarnClient client = mock(YarnClient.class);
|
YarnClient client = mock(YarnClient.class);
|
||||||
when(client.getClusterNodeLabels()).thenReturn(
|
when(client.getClusterNodeLabels()).thenReturn(
|
||||||
ImmutableSet.of("remote1", "remote2"));
|
Arrays.asList(NodeLabel.newInstance("remote1"),
|
||||||
|
NodeLabel.newInstance("remote2")));
|
||||||
ClusterCLI cli = new ClusterCLI();
|
ClusterCLI cli = new ClusterCLI();
|
||||||
cli.setClient(client);
|
cli.setClient(client);
|
||||||
cli.setSysOutPrintStream(sysOut);
|
cli.setSysOutPrintStream(sysOut);
|
||||||
cli.setSysErrPrintStream(sysErr);
|
cli.setSysErrPrintStream(sysErr);
|
||||||
ClusterCLI.localNodeLabelsManager = mock(CommonNodeLabelsManager.class);
|
ClusterCLI.localNodeLabelsManager = mock(CommonNodeLabelsManager.class);
|
||||||
when(ClusterCLI.localNodeLabelsManager.getClusterNodeLabels())
|
when(ClusterCLI.localNodeLabelsManager.getClusterNodeLabels()).thenReturn(
|
||||||
.thenReturn(ImmutableSet.of("local1", "local2"));
|
Arrays.asList(NodeLabel.newInstance("local1"),
|
||||||
|
NodeLabel.newInstance("local2")));
|
||||||
|
|
||||||
int rc =
|
int rc =
|
||||||
cli.run(new String[] { ClusterCLI.CMD,
|
cli.run(new String[] { ClusterCLI.CMD,
|
||||||
|
@ -94,7 +99,7 @@ public class TestClusterCLI {
|
||||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||||
PrintWriter pw = new PrintWriter(baos);
|
PrintWriter pw = new PrintWriter(baos);
|
||||||
// it should return local* instead of remote*
|
// it should return local* instead of remote*
|
||||||
pw.print("Node Labels: local1,local2");
|
pw.print("Node Labels: <local1:exclusivity=true>,<local2:exclusivity=true>");
|
||||||
pw.close();
|
pw.close();
|
||||||
verify(sysOut).println(baos.toString("UTF-8"));
|
verify(sysOut).println(baos.toString("UTF-8"));
|
||||||
}
|
}
|
||||||
|
@ -102,7 +107,7 @@ public class TestClusterCLI {
|
||||||
@Test
|
@Test
|
||||||
public void testGetEmptyClusterNodeLabels() throws Exception {
|
public void testGetEmptyClusterNodeLabels() throws Exception {
|
||||||
YarnClient client = mock(YarnClient.class);
|
YarnClient client = mock(YarnClient.class);
|
||||||
when(client.getClusterNodeLabels()).thenReturn(new HashSet<String>());
|
when(client.getClusterNodeLabels()).thenReturn(new ArrayList<NodeLabel>());
|
||||||
ClusterCLI cli = new ClusterCLI();
|
ClusterCLI cli = new ClusterCLI();
|
||||||
cli.setClient(client);
|
cli.setClient(client);
|
||||||
cli.setSysOutPrintStream(sysOut);
|
cli.setSysOutPrintStream(sysOut);
|
||||||
|
|
|
@ -454,7 +454,7 @@ public class TestRMAdminCLI {
|
||||||
String[] args =
|
String[] args =
|
||||||
{ "-addToClusterNodeLabels", "x,y", "-directlyAccessNodeLabelStore" };
|
{ "-addToClusterNodeLabels", "x,y", "-directlyAccessNodeLabelStore" };
|
||||||
assertEquals(0, rmAdminCLI.run(args));
|
assertEquals(0, rmAdminCLI.run(args));
|
||||||
assertTrue(dummyNodeLabelsManager.getClusterNodeLabels().containsAll(
|
assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().containsAll(
|
||||||
ImmutableSet.of("x", "y")));
|
ImmutableSet.of("x", "y")));
|
||||||
|
|
||||||
// reset localNodeLabelsManager
|
// reset localNodeLabelsManager
|
||||||
|
@ -466,7 +466,7 @@ public class TestRMAdminCLI {
|
||||||
new String[] { "-addToClusterNodeLabels",
|
new String[] { "-addToClusterNodeLabels",
|
||||||
"-directlyAccessNodeLabelStore", "x,y" };
|
"-directlyAccessNodeLabelStore", "x,y" };
|
||||||
assertEquals(0, rmAdminCLI.run(args));
|
assertEquals(0, rmAdminCLI.run(args));
|
||||||
assertTrue(dummyNodeLabelsManager.getClusterNodeLabels().containsAll(
|
assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().containsAll(
|
||||||
ImmutableSet.of("x", "y")));
|
ImmutableSet.of("x", "y")));
|
||||||
|
|
||||||
// local node labels manager will be close after running
|
// local node labels manager will be close after running
|
||||||
|
@ -480,7 +480,7 @@ public class TestRMAdminCLI {
|
||||||
assertEquals(0, rmAdminCLI.run(args));
|
assertEquals(0, rmAdminCLI.run(args));
|
||||||
|
|
||||||
// localNodeLabelsManager shouldn't accessed
|
// localNodeLabelsManager shouldn't accessed
|
||||||
assertTrue(dummyNodeLabelsManager.getClusterNodeLabels().isEmpty());
|
assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().isEmpty());
|
||||||
|
|
||||||
// remote node labels manager accessed
|
// remote node labels manager accessed
|
||||||
assertTrue(remoteAdminServiceAccessed);
|
assertTrue(remoteAdminServiceAccessed);
|
||||||
|
@ -492,7 +492,7 @@ public class TestRMAdminCLI {
|
||||||
String[] args =
|
String[] args =
|
||||||
{ "-addToClusterNodeLabels", "x", "-directlyAccessNodeLabelStore" };
|
{ "-addToClusterNodeLabels", "x", "-directlyAccessNodeLabelStore" };
|
||||||
assertEquals(0, rmAdminCLI.run(args));
|
assertEquals(0, rmAdminCLI.run(args));
|
||||||
assertTrue(dummyNodeLabelsManager.getClusterNodeLabels().containsAll(
|
assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().containsAll(
|
||||||
ImmutableSet.of("x")));
|
ImmutableSet.of("x")));
|
||||||
|
|
||||||
// no labels, should fail
|
// no labels, should fail
|
||||||
|
@ -518,19 +518,61 @@ public class TestRMAdminCLI {
|
||||||
new String[] { "-addToClusterNodeLabels", ",x,,",
|
new String[] { "-addToClusterNodeLabels", ",x,,",
|
||||||
"-directlyAccessNodeLabelStore" };
|
"-directlyAccessNodeLabelStore" };
|
||||||
assertEquals(0, rmAdminCLI.run(args));
|
assertEquals(0, rmAdminCLI.run(args));
|
||||||
assertTrue(dummyNodeLabelsManager.getClusterNodeLabels().containsAll(
|
assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().containsAll(
|
||||||
ImmutableSet.of("x")));
|
ImmutableSet.of("x")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAddToClusterNodeLabelsWithExclusivitySetting()
|
||||||
|
throws Exception {
|
||||||
|
// Parenthese not match
|
||||||
|
String[] args = new String[] { "-addToClusterNodeLabels", "x(" };
|
||||||
|
assertTrue(0 != rmAdminCLI.run(args));
|
||||||
|
|
||||||
|
args = new String[] { "-addToClusterNodeLabels", "x)" };
|
||||||
|
assertTrue(0 != rmAdminCLI.run(args));
|
||||||
|
|
||||||
|
// Not expected key=value specifying inner parentese
|
||||||
|
args = new String[] { "-addToClusterNodeLabels", "x(key=value)" };
|
||||||
|
assertTrue(0 != rmAdminCLI.run(args));
|
||||||
|
|
||||||
|
// Not key is expected, but value not
|
||||||
|
args = new String[] { "-addToClusterNodeLabels", "x(exclusive=)" };
|
||||||
|
assertTrue(0 != rmAdminCLI.run(args));
|
||||||
|
|
||||||
|
// key=value both set
|
||||||
|
args =
|
||||||
|
new String[] { "-addToClusterNodeLabels",
|
||||||
|
"w,x(exclusive=true), y(exclusive=false),z()",
|
||||||
|
"-directlyAccessNodeLabelStore" };
|
||||||
|
assertTrue(0 == rmAdminCLI.run(args));
|
||||||
|
|
||||||
|
assertTrue(dummyNodeLabelsManager.isExclusiveNodeLabel("w"));
|
||||||
|
assertTrue(dummyNodeLabelsManager.isExclusiveNodeLabel("x"));
|
||||||
|
assertFalse(dummyNodeLabelsManager.isExclusiveNodeLabel("y"));
|
||||||
|
assertTrue(dummyNodeLabelsManager.isExclusiveNodeLabel("z"));
|
||||||
|
|
||||||
|
// key=value both set, and some spaces need to be handled
|
||||||
|
args =
|
||||||
|
new String[] { "-addToClusterNodeLabels",
|
||||||
|
"a (exclusive= true) , b( exclusive =false),c ",
|
||||||
|
"-directlyAccessNodeLabelStore" };
|
||||||
|
assertTrue(0 == rmAdminCLI.run(args));
|
||||||
|
|
||||||
|
assertTrue(dummyNodeLabelsManager.isExclusiveNodeLabel("a"));
|
||||||
|
assertFalse(dummyNodeLabelsManager.isExclusiveNodeLabel("b"));
|
||||||
|
assertTrue(dummyNodeLabelsManager.isExclusiveNodeLabel("c"));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRemoveFromClusterNodeLabels() throws Exception {
|
public void testRemoveFromClusterNodeLabels() throws Exception {
|
||||||
// Successfully remove labels
|
// Successfully remove labels
|
||||||
dummyNodeLabelsManager.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
dummyNodeLabelsManager.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
String[] args =
|
String[] args =
|
||||||
{ "-removeFromClusterNodeLabels", "x,,y",
|
{ "-removeFromClusterNodeLabels", "x,,y",
|
||||||
"-directlyAccessNodeLabelStore" };
|
"-directlyAccessNodeLabelStore" };
|
||||||
assertEquals(0, rmAdminCLI.run(args));
|
assertEquals(0, rmAdminCLI.run(args));
|
||||||
assertTrue(dummyNodeLabelsManager.getClusterNodeLabels().isEmpty());
|
assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().isEmpty());
|
||||||
|
|
||||||
// no labels, should fail
|
// no labels, should fail
|
||||||
args = new String[] { "-removeFromClusterNodeLabels" };
|
args = new String[] { "-removeFromClusterNodeLabels" };
|
||||||
|
@ -555,7 +597,7 @@ public class TestRMAdminCLI {
|
||||||
public void testReplaceLabelsOnNode() throws Exception {
|
public void testReplaceLabelsOnNode() throws Exception {
|
||||||
// Successfully replace labels
|
// Successfully replace labels
|
||||||
dummyNodeLabelsManager
|
dummyNodeLabelsManager
|
||||||
.addToCluserNodeLabels(ImmutableSet.of("x", "y", "Y"));
|
.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y", "Y"));
|
||||||
String[] args =
|
String[] args =
|
||||||
{ "-replaceLabelsOnNode",
|
{ "-replaceLabelsOnNode",
|
||||||
"node1:8000,x node2:8000=y node3,x node4=Y",
|
"node1:8000,x node2:8000=y node3,x node4=Y",
|
||||||
|
@ -590,7 +632,7 @@ public class TestRMAdminCLI {
|
||||||
@Test
|
@Test
|
||||||
public void testReplaceMultipleLabelsOnSingleNode() throws Exception {
|
public void testReplaceMultipleLabelsOnSingleNode() throws Exception {
|
||||||
// Successfully replace labels
|
// Successfully replace labels
|
||||||
dummyNodeLabelsManager.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
dummyNodeLabelsManager.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
String[] args =
|
String[] args =
|
||||||
{ "-replaceLabelsOnNode", "node1,x,y",
|
{ "-replaceLabelsOnNode", "node1,x,y",
|
||||||
"-directlyAccessNodeLabelStore" };
|
"-directlyAccessNodeLabelStore" };
|
||||||
|
|
|
@ -18,22 +18,26 @@
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.ArrayList;
|
||||||
import java.util.Set;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
|
import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelPBImpl;
|
||||||
|
import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto;
|
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProtoOrBuilder;
|
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProtoOrBuilder;
|
||||||
|
|
||||||
public class GetClusterNodeLabelsResponsePBImpl extends
|
public class GetClusterNodeLabelsResponsePBImpl extends
|
||||||
GetClusterNodeLabelsResponse {
|
GetClusterNodeLabelsResponse {
|
||||||
Set<String> labels;
|
|
||||||
GetClusterNodeLabelsResponseProto proto = GetClusterNodeLabelsResponseProto
|
GetClusterNodeLabelsResponseProto proto = GetClusterNodeLabelsResponseProto
|
||||||
.getDefaultInstance();
|
.getDefaultInstance();
|
||||||
GetClusterNodeLabelsResponseProto.Builder builder = null;
|
GetClusterNodeLabelsResponseProto.Builder builder = null;
|
||||||
|
private List<NodeLabel> updatedNodeLabels;
|
||||||
boolean viaProto = false;
|
boolean viaProto = false;
|
||||||
|
|
||||||
public GetClusterNodeLabelsResponsePBImpl() {
|
public GetClusterNodeLabelsResponsePBImpl() {
|
||||||
this.builder = GetClusterNodeLabelsResponseProto.newBuilder();
|
builder = GetClusterNodeLabelsResponseProto.newBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
public GetClusterNodeLabelsResponsePBImpl(
|
public GetClusterNodeLabelsResponsePBImpl(
|
||||||
|
@ -42,28 +46,6 @@ public class GetClusterNodeLabelsResponsePBImpl extends
|
||||||
viaProto = true;
|
viaProto = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void maybeInitBuilder() {
|
|
||||||
if (viaProto || builder == null) {
|
|
||||||
builder = GetClusterNodeLabelsResponseProto.newBuilder(proto);
|
|
||||||
}
|
|
||||||
viaProto = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void mergeLocalToBuilder() {
|
|
||||||
if (this.labels != null && !this.labels.isEmpty()) {
|
|
||||||
builder.clearNodeLabels();
|
|
||||||
builder.addAllNodeLabels(this.labels);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void mergeLocalToProto() {
|
|
||||||
if (viaProto)
|
|
||||||
maybeInitBuilder();
|
|
||||||
mergeLocalToBuilder();
|
|
||||||
proto = builder.build();
|
|
||||||
viaProto = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public GetClusterNodeLabelsResponseProto getProto() {
|
public GetClusterNodeLabelsResponseProto getProto() {
|
||||||
mergeLocalToProto();
|
mergeLocalToProto();
|
||||||
proto = viaProto ? proto : builder.build();
|
proto = viaProto ? proto : builder.build();
|
||||||
|
@ -71,34 +53,28 @@ public class GetClusterNodeLabelsResponsePBImpl extends
|
||||||
return proto;
|
return proto;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initNodeLabels() {
|
private void mergeLocalToProto() {
|
||||||
if (this.labels != null) {
|
if (viaProto)
|
||||||
return;
|
maybeInitBuilder();
|
||||||
}
|
mergeLocalToBuilder();
|
||||||
GetClusterNodeLabelsResponseProtoOrBuilder p = viaProto ? proto : builder;
|
proto = builder.build();
|
||||||
this.labels = new HashSet<String>();
|
viaProto = true;
|
||||||
this.labels.addAll(p.getNodeLabelsList());
|
}
|
||||||
}
|
|
||||||
|
private void mergeLocalToBuilder() {
|
||||||
@Override
|
if (this.updatedNodeLabels != null) {
|
||||||
public void setNodeLabels(Set<String> labels) {
|
addNodeLabelsToProto();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addNodeLabelsToProto() {
|
||||||
maybeInitBuilder();
|
maybeInitBuilder();
|
||||||
if (labels == null || labels.isEmpty()) {
|
|
||||||
builder.clearNodeLabels();
|
builder.clearNodeLabels();
|
||||||
|
List<NodeLabelProto> protoList = new ArrayList<NodeLabelProto>();
|
||||||
|
for (NodeLabel r : this.updatedNodeLabels) {
|
||||||
|
protoList.add(convertToProtoFormat(r));
|
||||||
}
|
}
|
||||||
this.labels = labels;
|
builder.addAllNodeLabels(protoList);
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<String> getNodeLabels() {
|
|
||||||
initNodeLabels();
|
|
||||||
return this.labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
assert false : "hashCode not designed";
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -110,4 +86,59 @@ public class GetClusterNodeLabelsResponsePBImpl extends
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
assert false : "hashCode not designed";
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void maybeInitBuilder() {
|
||||||
|
if (viaProto || builder == null) {
|
||||||
|
builder = GetClusterNodeLabelsResponseProto.newBuilder(proto);
|
||||||
|
}
|
||||||
|
viaProto = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setNodeLabels(List<NodeLabel> updatedNodeLabels) {
|
||||||
|
maybeInitBuilder();
|
||||||
|
this.updatedNodeLabels = new ArrayList<>();
|
||||||
|
if (updatedNodeLabels == null) {
|
||||||
|
builder.clearNodeLabels();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.updatedNodeLabels.addAll(updatedNodeLabels);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void initLocalNodeLabels() {
|
||||||
|
GetClusterNodeLabelsResponseProtoOrBuilder p = viaProto ? proto : builder;
|
||||||
|
List<NodeLabelProto> attributesProtoList = p.getNodeLabelsList();
|
||||||
|
this.updatedNodeLabels = new ArrayList<NodeLabel>();
|
||||||
|
for (NodeLabelProto r : attributesProtoList) {
|
||||||
|
this.updatedNodeLabels.add(convertFromProtoFormat(r));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<NodeLabel> getNodeLabels() {
|
||||||
|
if (this.updatedNodeLabels != null) {
|
||||||
|
return this.updatedNodeLabels;
|
||||||
|
}
|
||||||
|
initLocalNodeLabels();
|
||||||
|
return this.updatedNodeLabels;
|
||||||
|
}
|
||||||
|
|
||||||
|
private NodeLabel convertFromProtoFormat(NodeLabelProto p) {
|
||||||
|
return new NodeLabelPBImpl(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
private NodeLabelProto convertToProtoFormat(NodeLabel t) {
|
||||||
|
return ((NodeLabelPBImpl) t).getProto();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return getProto().toString();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,32 +73,32 @@ public class NodeLabelPBImpl extends NodeLabel {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getNodeLabel() {
|
public String getName() {
|
||||||
NodeLabelProtoOrBuilder p = viaProto ? proto : builder;
|
NodeLabelProtoOrBuilder p = viaProto ? proto : builder;
|
||||||
if (!p.hasNodeLabel()) {
|
if (!p.hasName()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return (p.getNodeLabel());
|
return (p.getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNodeLabel(String nodeLabel) {
|
public void setName(String name) {
|
||||||
maybeInitBuilder();
|
maybeInitBuilder();
|
||||||
if (nodeLabel == null) {
|
if (name == null) {
|
||||||
builder.clearNodeLabel();
|
builder.clearName();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
builder.setNodeLabel(nodeLabel);
|
builder.setName(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean getIsExclusive() {
|
public boolean isExclusive() {
|
||||||
NodeLabelProtoOrBuilder p = viaProto ? proto : builder;
|
NodeLabelProtoOrBuilder p = viaProto ? proto : builder;
|
||||||
return p.getIsExclusive();
|
return p.getIsExclusive();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setIsExclusive(boolean isExclusive) {
|
public void setExclusivity(boolean isExclusive) {
|
||||||
maybeInitBuilder();
|
maybeInitBuilder();
|
||||||
builder.setIsExclusive(isExclusive);
|
builder.setIsExclusive(isExclusive);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
package org.apache.hadoop.yarn.nodelabels;
|
package org.apache.hadoop.yarn.nodelabels;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
|
@ -47,13 +48,11 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.hadoop.yarn.event.AsyncDispatcher;
|
import org.apache.hadoop.yarn.event.AsyncDispatcher;
|
||||||
import org.apache.hadoop.yarn.event.Dispatcher;
|
import org.apache.hadoop.yarn.event.Dispatcher;
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
|
||||||
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
||||||
import org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEvent;
|
import org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEvent;
|
||||||
import org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEventType;
|
import org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEventType;
|
||||||
import org.apache.hadoop.yarn.nodelabels.event.RemoveClusterNodeLabels;
|
import org.apache.hadoop.yarn.nodelabels.event.RemoveClusterNodeLabels;
|
||||||
import org.apache.hadoop.yarn.nodelabels.event.StoreNewClusterNodeLabels;
|
import org.apache.hadoop.yarn.nodelabels.event.StoreNewClusterNodeLabels;
|
||||||
import org.apache.hadoop.yarn.nodelabels.event.StoreUpdateNodeLabelsEvent;
|
|
||||||
import org.apache.hadoop.yarn.nodelabels.event.UpdateNodeToLabelsMappingsEvent;
|
import org.apache.hadoop.yarn.nodelabels.event.UpdateNodeToLabelsMappingsEvent;
|
||||||
import org.apache.hadoop.yarn.util.resource.Resources;
|
import org.apache.hadoop.yarn.util.resource.Resources;
|
||||||
|
|
||||||
|
@ -185,13 +184,6 @@ public class CommonNodeLabelsManager extends AbstractService {
|
||||||
store.updateNodeToLabelsMappings(updateNodeToLabelsMappingsEvent
|
store.updateNodeToLabelsMappings(updateNodeToLabelsMappingsEvent
|
||||||
.getNodeToLabels());
|
.getNodeToLabels());
|
||||||
break;
|
break;
|
||||||
case UPDATE_NODE_LABELS:
|
|
||||||
StoreUpdateNodeLabelsEvent
|
|
||||||
storeSetNodeLabelsEventEvent =
|
|
||||||
(StoreUpdateNodeLabelsEvent) event;
|
|
||||||
store.updateNodeLabels(storeSetNodeLabelsEventEvent
|
|
||||||
.getUpdatedNodeLabels());
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Failed to store label modification to storage");
|
LOG.error("Failed to store label modification to storage");
|
||||||
|
@ -274,14 +266,9 @@ public class CommonNodeLabelsManager extends AbstractService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Add multiple node labels to repository
|
|
||||||
*
|
|
||||||
* @param labels
|
|
||||||
* new node labels added
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public void addToCluserNodeLabels(Set<String> labels) throws IOException {
|
public void addToCluserNodeLabels(Collection<NodeLabel> labels)
|
||||||
|
throws IOException {
|
||||||
if (!nodeLabelsEnabled) {
|
if (!nodeLabelsEnabled) {
|
||||||
LOG.error(NODE_LABELS_NOT_ENABLED_ERR);
|
LOG.error(NODE_LABELS_NOT_ENABLED_ERR);
|
||||||
throw new IOException(NODE_LABELS_NOT_ENABLED_ERR);
|
throw new IOException(NODE_LABELS_NOT_ENABLED_ERR);
|
||||||
|
@ -289,19 +276,19 @@ public class CommonNodeLabelsManager extends AbstractService {
|
||||||
if (null == labels || labels.isEmpty()) {
|
if (null == labels || labels.isEmpty()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Set<String> newLabels = new HashSet<String>();
|
List<NodeLabel> newLabels = new ArrayList<NodeLabel>();
|
||||||
labels = normalizeLabels(labels);
|
normalizeNodeLabels(labels);
|
||||||
|
|
||||||
// do a check before actual adding them, will throw exception if any of them
|
// do a check before actual adding them, will throw exception if any of them
|
||||||
// doesn't meet label name requirement
|
// doesn't meet label name requirement
|
||||||
for (String label : labels) {
|
for (NodeLabel label : labels) {
|
||||||
checkAndThrowLabelName(label);
|
checkAndThrowLabelName(label.getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
for (String label : labels) {
|
for (NodeLabel label : labels) {
|
||||||
// shouldn't overwrite it to avoid changing the Label.resource
|
// shouldn't overwrite it to avoid changing the Label.resource
|
||||||
if (this.labelCollections.get(label) == null) {
|
if (this.labelCollections.get(label.getName()) == null) {
|
||||||
this.labelCollections.put(label, new RMNodeLabel(label));
|
this.labelCollections.put(label.getName(), new RMNodeLabel(label));
|
||||||
newLabels.add(label);
|
newLabels.add(label);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -313,6 +300,22 @@ public class CommonNodeLabelsManager extends AbstractService {
|
||||||
LOG.info("Add labels: [" + StringUtils.join(labels.iterator(), ",") + "]");
|
LOG.info("Add labels: [" + StringUtils.join(labels.iterator(), ",") + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add multiple node labels to repository
|
||||||
|
*
|
||||||
|
* @param labels
|
||||||
|
* new node labels added
|
||||||
|
*/
|
||||||
|
@VisibleForTesting
|
||||||
|
public void addToCluserNodeLabelsWithDefaultExclusivity(Set<String> labels)
|
||||||
|
throws IOException {
|
||||||
|
Set<NodeLabel> nodeLabels = new HashSet<NodeLabel>();
|
||||||
|
for (String label : labels) {
|
||||||
|
nodeLabels.add(NodeLabel.newInstance(label));
|
||||||
|
}
|
||||||
|
addToCluserNodeLabels(nodeLabels);
|
||||||
|
}
|
||||||
|
|
||||||
protected void checkAddLabelsToNode(
|
protected void checkAddLabelsToNode(
|
||||||
Map<NodeId, Set<String>> addedLabelsToNode) throws IOException {
|
Map<NodeId, Set<String>> addedLabelsToNode) throws IOException {
|
||||||
if (null == addedLabelsToNode || addedLabelsToNode.isEmpty()) {
|
if (null == addedLabelsToNode || addedLabelsToNode.isEmpty()) {
|
||||||
|
@ -780,7 +783,7 @@ public class CommonNodeLabelsManager extends AbstractService {
|
||||||
*
|
*
|
||||||
* @return existing valid labels in repository
|
* @return existing valid labels in repository
|
||||||
*/
|
*/
|
||||||
public Set<String> getClusterNodeLabels() {
|
public Set<String> getClusterNodeLabelNames() {
|
||||||
try {
|
try {
|
||||||
readLock.lock();
|
readLock.lock();
|
||||||
Set<String> labels = new HashSet<String>(labelCollections.keySet());
|
Set<String> labels = new HashSet<String>(labelCollections.keySet());
|
||||||
|
@ -791,39 +794,17 @@ public class CommonNodeLabelsManager extends AbstractService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkUpdateNodeLabels(
|
public List<NodeLabel> getClusterNodeLabels() {
|
||||||
List<NodeLabel> updatedNodeLabels) throws YarnException {
|
|
||||||
// pre-check
|
|
||||||
for (NodeLabel label : updatedNodeLabels) {
|
|
||||||
if (!labelCollections.containsKey(label.getNodeLabel())) {
|
|
||||||
String message =
|
|
||||||
String.format(
|
|
||||||
"Trying to update a non-existing node-label=%s",
|
|
||||||
label.getNodeLabel());
|
|
||||||
LOG.error(message);
|
|
||||||
throw new YarnException(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public void updateNodeLabels(
|
|
||||||
List<NodeLabel> updatedNodeLabels) throws YarnException {
|
|
||||||
try {
|
try {
|
||||||
writeLock.lock();
|
readLock.lock();
|
||||||
checkUpdateNodeLabels(updatedNodeLabels);
|
List<NodeLabel> nodeLabels = new ArrayList<>();
|
||||||
|
for (RMNodeLabel label : labelCollections.values()) {
|
||||||
for (NodeLabel label : updatedNodeLabels) {
|
nodeLabels.add(NodeLabel.newInstance(label.getLabelName(),
|
||||||
RMNodeLabel rmLabel = labelCollections.get(label.getNodeLabel());
|
label.getIsExclusive()));
|
||||||
rmLabel.setIsExclusive(label.getIsExclusive());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (null != dispatcher && !updatedNodeLabels.isEmpty()) {
|
|
||||||
dispatcher.getEventHandler().handle(
|
|
||||||
new StoreUpdateNodeLabelsEvent(updatedNodeLabels));
|
|
||||||
}
|
}
|
||||||
|
return nodeLabels;
|
||||||
} finally {
|
} finally {
|
||||||
writeLock.unlock();
|
readLock.unlock();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -876,6 +857,12 @@ public class CommonNodeLabelsManager extends AbstractService {
|
||||||
return newLabels;
|
return newLabels;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void normalizeNodeLabels(Collection<NodeLabel> labels) {
|
||||||
|
for (NodeLabel label : labels) {
|
||||||
|
label.setName(normalizeLabel(label.getName()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
protected Node getNMInNodeSet(NodeId nodeId) {
|
protected Node getNMInNodeSet(NodeId nodeId) {
|
||||||
return getNMInNodeSet(nodeId, nodeCollections);
|
return getNMInNodeSet(nodeId, nodeCollections);
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,15 +41,12 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsRequestProto;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsRequest;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeLabelsRequestPBImpl;
|
|
||||||
|
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
|
|
||||||
|
@ -66,7 +63,7 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
|
||||||
protected static final String EDITLOG_FILENAME = "nodelabel.editlog";
|
protected static final String EDITLOG_FILENAME = "nodelabel.editlog";
|
||||||
|
|
||||||
protected enum SerializedLogType {
|
protected enum SerializedLogType {
|
||||||
ADD_LABELS, NODE_TO_LABELS, REMOVE_LABELS, UPDATE_NODE_LABELS
|
ADD_LABELS, NODE_TO_LABELS, REMOVE_LABELS
|
||||||
}
|
}
|
||||||
|
|
||||||
Path fsWorkingPath;
|
Path fsWorkingPath;
|
||||||
|
@ -138,12 +135,12 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void storeNewClusterNodeLabels(Set<String> labels)
|
public void storeNewClusterNodeLabels(List<NodeLabel> labels)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
ensureAppendEditlogFile();
|
ensureAppendEditlogFile();
|
||||||
editlogOs.writeInt(SerializedLogType.ADD_LABELS.ordinal());
|
editlogOs.writeInt(SerializedLogType.ADD_LABELS.ordinal());
|
||||||
((AddToClusterNodeLabelsRequestPBImpl) AddToClusterNodeLabelsRequest.newInstance(labels)).getProto()
|
((AddToClusterNodeLabelsRequestPBImpl) AddToClusterNodeLabelsRequest
|
||||||
.writeDelimitedTo(editlogOs);
|
.newInstance(labels)).getProto().writeDelimitedTo(editlogOs);
|
||||||
ensureCloseEditlogFile();
|
ensureCloseEditlogFile();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,16 +154,6 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
|
||||||
ensureCloseEditlogFile();
|
ensureCloseEditlogFile();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void updateNodeLabels(List<NodeLabel> updatedNodeLabels)
|
|
||||||
throws IOException {
|
|
||||||
ensureAppendEditlogFile();
|
|
||||||
editlogOs.writeInt(SerializedLogType.UPDATE_NODE_LABELS.ordinal());
|
|
||||||
((UpdateNodeLabelsRequestPBImpl) UpdateNodeLabelsRequest
|
|
||||||
.newInstance(updatedNodeLabels)).getProto().writeDelimitedTo(editlogOs);
|
|
||||||
ensureCloseEditlogFile();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void recover() throws YarnException, IOException {
|
public void recover() throws YarnException, IOException {
|
||||||
/*
|
/*
|
||||||
|
@ -192,7 +179,7 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (null != is) {
|
if (null != is) {
|
||||||
Set<String> labels =
|
List<NodeLabel> labels =
|
||||||
new AddToClusterNodeLabelsRequestPBImpl(
|
new AddToClusterNodeLabelsRequestPBImpl(
|
||||||
AddToClusterNodeLabelsRequestProto.parseDelimitedFrom(is)).getNodeLabels();
|
AddToClusterNodeLabelsRequestProto.parseDelimitedFrom(is)).getNodeLabels();
|
||||||
Map<NodeId, Set<String>> nodeToLabels =
|
Map<NodeId, Set<String>> nodeToLabels =
|
||||||
|
@ -216,10 +203,11 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
|
||||||
|
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case ADD_LABELS: {
|
case ADD_LABELS: {
|
||||||
Collection<String> labels =
|
List<NodeLabel> labels =
|
||||||
AddToClusterNodeLabelsRequestProto.parseDelimitedFrom(is)
|
new AddToClusterNodeLabelsRequestPBImpl(
|
||||||
.getNodeLabelsList();
|
AddToClusterNodeLabelsRequestProto.parseDelimitedFrom(is))
|
||||||
mgr.addToCluserNodeLabels(Sets.newHashSet(labels.iterator()));
|
.getNodeLabels();
|
||||||
|
mgr.addToCluserNodeLabels(labels);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case REMOVE_LABELS: {
|
case REMOVE_LABELS: {
|
||||||
|
@ -237,14 +225,6 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore {
|
||||||
mgr.replaceLabelsOnNode(map);
|
mgr.replaceLabelsOnNode(map);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case UPDATE_NODE_LABELS: {
|
|
||||||
List<NodeLabel> attributes =
|
|
||||||
new UpdateNodeLabelsRequestPBImpl(
|
|
||||||
UpdateNodeLabelsRequestProto.parseDelimitedFrom(is))
|
|
||||||
.getNodeLabels();
|
|
||||||
mgr.updateNodeLabels(attributes);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (EOFException e) {
|
} catch (EOFException e) {
|
||||||
// EOF hit, break
|
// EOF hit, break
|
||||||
|
|
|
@ -46,7 +46,7 @@ public abstract class NodeLabelsStore implements Closeable {
|
||||||
/**
|
/**
|
||||||
* Store new labels
|
* Store new labels
|
||||||
*/
|
*/
|
||||||
public abstract void storeNewClusterNodeLabels(Set<String> label)
|
public abstract void storeNewClusterNodeLabels(List<NodeLabel> label)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -55,12 +55,6 @@ public abstract class NodeLabelsStore implements Closeable {
|
||||||
public abstract void removeClusterNodeLabels(Collection<String> labels)
|
public abstract void removeClusterNodeLabels(Collection<String> labels)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
||||||
/**
|
|
||||||
* Update node labels
|
|
||||||
*/
|
|
||||||
public abstract void updateNodeLabels(
|
|
||||||
List<NodeLabel> updatedNodeLabels) throws IOException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Recover labels and node to labels mappings from store
|
* Recover labels and node to labels mappings from store
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -20,9 +20,12 @@ package org.apache.hadoop.yarn.nodelabels;
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.api.records.Resource;
|
import org.apache.hadoop.yarn.api.records.Resource;
|
||||||
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.hadoop.yarn.util.resource.Resources;
|
import org.apache.hadoop.yarn.util.resource.Resources;
|
||||||
|
|
||||||
public class RMNodeLabel implements Comparable<RMNodeLabel> {
|
public class RMNodeLabel implements Comparable<RMNodeLabel> {
|
||||||
|
@ -30,17 +33,25 @@ public class RMNodeLabel implements Comparable<RMNodeLabel> {
|
||||||
private int numActiveNMs;
|
private int numActiveNMs;
|
||||||
private String labelName;
|
private String labelName;
|
||||||
private Set<NodeId> nodeIds;
|
private Set<NodeId> nodeIds;
|
||||||
private boolean exclusive = true;
|
private boolean exclusive;
|
||||||
|
|
||||||
public RMNodeLabel(String labelName) {
|
public RMNodeLabel(NodeLabel nodeLabel) {
|
||||||
this(labelName, Resource.newInstance(0, 0), 0);
|
this(nodeLabel.getName(), Resource.newInstance(0, 0), 0,
|
||||||
|
nodeLabel.isExclusive());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected RMNodeLabel(String labelName, Resource res, int activeNMs) {
|
public RMNodeLabel(String labelName) {
|
||||||
|
this(labelName, Resource.newInstance(0, 0), 0,
|
||||||
|
NodeLabel.DEFAULT_NODE_LABEL_EXCLUSIVITY);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected RMNodeLabel(String labelName, Resource res, int activeNMs,
|
||||||
|
boolean exclusive) {
|
||||||
this.labelName = labelName;
|
this.labelName = labelName;
|
||||||
this.resource = res;
|
this.resource = res;
|
||||||
this.numActiveNMs = activeNMs;
|
this.numActiveNMs = activeNMs;
|
||||||
this.nodeIds = new HashSet<NodeId>();
|
this.nodeIds = new HashSet<NodeId>();
|
||||||
|
this.exclusive = exclusive;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addNodeId(NodeId node) {
|
public void addNodeId(NodeId node) {
|
||||||
|
@ -86,7 +97,7 @@ public class RMNodeLabel implements Comparable<RMNodeLabel> {
|
||||||
}
|
}
|
||||||
|
|
||||||
public RMNodeLabel getCopy() {
|
public RMNodeLabel getCopy() {
|
||||||
return new RMNodeLabel(labelName, resource, numActiveNMs);
|
return new RMNodeLabel(labelName, resource, numActiveNMs, exclusive);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -21,6 +21,5 @@ package org.apache.hadoop.yarn.nodelabels.event;
|
||||||
public enum NodeLabelsStoreEventType {
|
public enum NodeLabelsStoreEventType {
|
||||||
REMOVE_LABELS,
|
REMOVE_LABELS,
|
||||||
ADD_LABELS,
|
ADD_LABELS,
|
||||||
STORE_NODE_TO_LABELS,
|
STORE_NODE_TO_LABELS
|
||||||
UPDATE_NODE_LABELS
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,17 +18,19 @@
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.nodelabels.event;
|
package org.apache.hadoop.yarn.nodelabels.event;
|
||||||
|
|
||||||
import java.util.Set;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
|
|
||||||
public class StoreNewClusterNodeLabels extends NodeLabelsStoreEvent {
|
public class StoreNewClusterNodeLabels extends NodeLabelsStoreEvent {
|
||||||
private Set<String> labels;
|
private List<NodeLabel> labels;
|
||||||
|
|
||||||
public StoreNewClusterNodeLabels(Set<String> labels) {
|
public StoreNewClusterNodeLabels(List<NodeLabel> labels) {
|
||||||
super(NodeLabelsStoreEventType.ADD_LABELS);
|
super(NodeLabelsStoreEventType.ADD_LABELS);
|
||||||
this.labels = labels;
|
this.labels = labels;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Set<String> getLabels() {
|
public List<NodeLabel> getLabels() {
|
||||||
return labels;
|
return labels;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,36 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.nodelabels.event;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
|
||||||
|
|
||||||
public class StoreUpdateNodeLabelsEvent extends NodeLabelsStoreEvent {
|
|
||||||
private List<NodeLabel> updatedNodeLabels;
|
|
||||||
|
|
||||||
public StoreUpdateNodeLabelsEvent(List<NodeLabel> updateNodeLabels) {
|
|
||||||
super(NodeLabelsStoreEventType.UPDATE_NODE_LABELS);
|
|
||||||
this.updatedNodeLabels = updateNodeLabels;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<NodeLabel> getUpdatedNodeLabels() {
|
|
||||||
return updatedNodeLabels;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -41,7 +41,6 @@ import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.Refre
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsRequestProto;
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto;
|
||||||
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol;
|
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol;
|
||||||
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocolPB;
|
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocolPB;
|
||||||
|
@ -65,8 +64,6 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLa
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsRequest;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsResponse;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl;
|
||||||
|
@ -89,8 +86,6 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClust
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeLabelsRequestPBImpl;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeLabelsResponsePBImpl;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl;
|
||||||
|
|
||||||
|
@ -274,20 +269,6 @@ public class ResourceManagerAdministrationProtocolPBClientImpl implements Resour
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public UpdateNodeLabelsResponse updateNodeLabels(
|
|
||||||
UpdateNodeLabelsRequest request) throws YarnException, IOException {
|
|
||||||
UpdateNodeLabelsRequestProto requestProto =
|
|
||||||
((UpdateNodeLabelsRequestPBImpl) request).getProto();
|
|
||||||
try {
|
|
||||||
return new UpdateNodeLabelsResponsePBImpl(
|
|
||||||
proxy.updateNodeLabels(null, requestProto));
|
|
||||||
} catch (ServiceException e) {
|
|
||||||
RPCUtil.unwrapAndThrowException(e);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CheckForDecommissioningNodesResponse checkForDecommissioningNodes(
|
public CheckForDecommissioningNodesResponse checkForDecommissioningNodes(
|
||||||
CheckForDecommissioningNodesRequest checkForDecommissioningNodesRequest)
|
CheckForDecommissioningNodesRequest checkForDecommissioningNodesRequest)
|
||||||
|
|
|
@ -44,8 +44,6 @@ import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.Remov
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsRequestProto;
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto;
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto;
|
||||||
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol;
|
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol;
|
||||||
|
@ -61,8 +59,6 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshSuperUserGroupsC
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsRequest;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsResponse;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsResponsePBImpl;
|
||||||
|
@ -84,8 +80,6 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClust
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeLabelsRequestPBImpl;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeLabelsResponsePBImpl;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl;
|
||||||
|
|
||||||
|
@ -281,23 +275,6 @@ public class ResourceManagerAdministrationProtocolPBServiceImpl implements Resou
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public UpdateNodeLabelsResponseProto updateNodeLabels(
|
|
||||||
RpcController controller, UpdateNodeLabelsRequestProto proto)
|
|
||||||
throws ServiceException {
|
|
||||||
UpdateNodeLabelsRequest request =
|
|
||||||
new UpdateNodeLabelsRequestPBImpl(proto);
|
|
||||||
try {
|
|
||||||
UpdateNodeLabelsResponse response =
|
|
||||||
real.updateNodeLabels(request);
|
|
||||||
return ((UpdateNodeLabelsResponsePBImpl) response).getProto();
|
|
||||||
} catch (YarnException e) {
|
|
||||||
throw new ServiceException(e);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ServiceException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CheckForDecommissioningNodesResponseProto checkForDecommissioningNodes(
|
public CheckForDecommissioningNodesResponseProto checkForDecommissioningNodes(
|
||||||
RpcController controller, CheckForDecommissioningNodesRequestProto proto)
|
RpcController controller, CheckForDecommissioningNodesRequestProto proto)
|
||||||
|
|
|
@ -18,23 +18,27 @@
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
|
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.ArrayList;
|
||||||
import java.util.Set;
|
import java.util.Collection;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
|
import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelPBImpl;
|
||||||
|
import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProtoOrBuilder;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProtoOrBuilder;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest;
|
||||||
|
|
||||||
public class AddToClusterNodeLabelsRequestPBImpl extends
|
public class AddToClusterNodeLabelsRequestPBImpl extends
|
||||||
AddToClusterNodeLabelsRequest {
|
AddToClusterNodeLabelsRequest {
|
||||||
Set<String> labels;
|
|
||||||
AddToClusterNodeLabelsRequestProto proto = AddToClusterNodeLabelsRequestProto
|
AddToClusterNodeLabelsRequestProto proto = AddToClusterNodeLabelsRequestProto
|
||||||
.getDefaultInstance();
|
.getDefaultInstance();
|
||||||
AddToClusterNodeLabelsRequestProto.Builder builder = null;
|
AddToClusterNodeLabelsRequestProto.Builder builder = null;
|
||||||
|
private List<NodeLabel> updatedNodeLabels;
|
||||||
boolean viaProto = false;
|
boolean viaProto = false;
|
||||||
|
|
||||||
public AddToClusterNodeLabelsRequestPBImpl() {
|
public AddToClusterNodeLabelsRequestPBImpl() {
|
||||||
this.builder = AddToClusterNodeLabelsRequestProto.newBuilder();
|
builder = AddToClusterNodeLabelsRequestProto.newBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
public AddToClusterNodeLabelsRequestPBImpl(
|
public AddToClusterNodeLabelsRequestPBImpl(
|
||||||
|
@ -43,28 +47,6 @@ public class AddToClusterNodeLabelsRequestPBImpl extends
|
||||||
viaProto = true;
|
viaProto = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void maybeInitBuilder() {
|
|
||||||
if (viaProto || builder == null) {
|
|
||||||
builder = AddToClusterNodeLabelsRequestProto.newBuilder(proto);
|
|
||||||
}
|
|
||||||
viaProto = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void mergeLocalToBuilder() {
|
|
||||||
if (this.labels != null && !this.labels.isEmpty()) {
|
|
||||||
builder.clearNodeLabels();
|
|
||||||
builder.addAllNodeLabels(this.labels);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void mergeLocalToProto() {
|
|
||||||
if (viaProto)
|
|
||||||
maybeInitBuilder();
|
|
||||||
mergeLocalToBuilder();
|
|
||||||
proto = builder.build();
|
|
||||||
viaProto = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public AddToClusterNodeLabelsRequestProto getProto() {
|
public AddToClusterNodeLabelsRequestProto getProto() {
|
||||||
mergeLocalToProto();
|
mergeLocalToProto();
|
||||||
proto = viaProto ? proto : builder.build();
|
proto = viaProto ? proto : builder.build();
|
||||||
|
@ -72,34 +54,28 @@ public class AddToClusterNodeLabelsRequestPBImpl extends
|
||||||
return proto;
|
return proto;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initLabels() {
|
private void mergeLocalToProto() {
|
||||||
if (this.labels != null) {
|
if (viaProto)
|
||||||
return;
|
maybeInitBuilder();
|
||||||
}
|
mergeLocalToBuilder();
|
||||||
AddToClusterNodeLabelsRequestProtoOrBuilder p = viaProto ? proto : builder;
|
proto = builder.build();
|
||||||
this.labels = new HashSet<String>();
|
viaProto = true;
|
||||||
this.labels.addAll(p.getNodeLabelsList());
|
}
|
||||||
}
|
|
||||||
|
private void mergeLocalToBuilder() {
|
||||||
@Override
|
if (this.updatedNodeLabels != null) {
|
||||||
public void setNodeLabels(Set<String> labels) {
|
addNodeLabelsToProto();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addNodeLabelsToProto() {
|
||||||
maybeInitBuilder();
|
maybeInitBuilder();
|
||||||
if (labels == null || labels.isEmpty()) {
|
|
||||||
builder.clearNodeLabels();
|
builder.clearNodeLabels();
|
||||||
|
List<NodeLabelProto> protoList = new ArrayList<NodeLabelProto>();
|
||||||
|
for (NodeLabel r : this.updatedNodeLabels) {
|
||||||
|
protoList.add(convertToProtoFormat(r));
|
||||||
}
|
}
|
||||||
this.labels = labels;
|
builder.addAllNodeLabels(protoList);
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<String> getNodeLabels() {
|
|
||||||
initLabels();
|
|
||||||
return this.labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
assert false : "hashCode not designed";
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -111,4 +87,59 @@ public class AddToClusterNodeLabelsRequestPBImpl extends
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
assert false : "hashCode not designed";
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void maybeInitBuilder() {
|
||||||
|
if (viaProto || builder == null) {
|
||||||
|
builder = AddToClusterNodeLabelsRequestProto.newBuilder(proto);
|
||||||
|
}
|
||||||
|
viaProto = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setNodeLabels(List<NodeLabel> updatedNodeLabels) {
|
||||||
|
maybeInitBuilder();
|
||||||
|
this.updatedNodeLabels = new ArrayList<>();
|
||||||
|
if (updatedNodeLabels == null) {
|
||||||
|
builder.clearNodeLabels();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.updatedNodeLabels.addAll(updatedNodeLabels);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void initLocalNodeLabels() {
|
||||||
|
AddToClusterNodeLabelsRequestProtoOrBuilder p = viaProto ? proto : builder;
|
||||||
|
List<NodeLabelProto> attributesProtoList = p.getNodeLabelsList();
|
||||||
|
this.updatedNodeLabels = new ArrayList<NodeLabel>();
|
||||||
|
for (NodeLabelProto r : attributesProtoList) {
|
||||||
|
this.updatedNodeLabels.add(convertFromProtoFormat(r));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<NodeLabel> getNodeLabels() {
|
||||||
|
if (this.updatedNodeLabels != null) {
|
||||||
|
return this.updatedNodeLabels;
|
||||||
|
}
|
||||||
|
initLocalNodeLabels();
|
||||||
|
return this.updatedNodeLabels;
|
||||||
|
}
|
||||||
|
|
||||||
|
private NodeLabel convertFromProtoFormat(NodeLabelProto p) {
|
||||||
|
return new NodeLabelPBImpl(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
private NodeLabelProto convertToProtoFormat(NodeLabel t) {
|
||||||
|
return ((NodeLabelPBImpl) t).getProto();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return getProto().toString();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,145 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
|
||||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelPBImpl;
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto;
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsRequestProto;
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsRequestProtoOrBuilder;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsRequest;
|
|
||||||
|
|
||||||
public class UpdateNodeLabelsRequestPBImpl extends
|
|
||||||
UpdateNodeLabelsRequest {
|
|
||||||
UpdateNodeLabelsRequestProto proto =
|
|
||||||
UpdateNodeLabelsRequestProto.getDefaultInstance();
|
|
||||||
UpdateNodeLabelsRequestProto.Builder builder = null;
|
|
||||||
private List<NodeLabel> updatedNodeLabels;
|
|
||||||
boolean viaProto = false;
|
|
||||||
|
|
||||||
public UpdateNodeLabelsRequestPBImpl() {
|
|
||||||
builder = UpdateNodeLabelsRequestProto.newBuilder();
|
|
||||||
}
|
|
||||||
|
|
||||||
public UpdateNodeLabelsRequestPBImpl(
|
|
||||||
UpdateNodeLabelsRequestProto proto) {
|
|
||||||
this.proto = proto;
|
|
||||||
viaProto = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public UpdateNodeLabelsRequestProto getProto() {
|
|
||||||
mergeLocalToProto();
|
|
||||||
proto = viaProto ? proto : builder.build();
|
|
||||||
viaProto = true;
|
|
||||||
return proto;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void mergeLocalToProto() {
|
|
||||||
if (viaProto)
|
|
||||||
maybeInitBuilder();
|
|
||||||
mergeLocalToBuilder();
|
|
||||||
proto = builder.build();
|
|
||||||
viaProto = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void mergeLocalToBuilder() {
|
|
||||||
if (this.updatedNodeLabels != null) {
|
|
||||||
addNodeLabelsToProto();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addNodeLabelsToProto() {
|
|
||||||
maybeInitBuilder();
|
|
||||||
builder.clearNodeLabels();
|
|
||||||
List<NodeLabelProto> protoList =
|
|
||||||
new ArrayList<NodeLabelProto>();
|
|
||||||
for (NodeLabel r : this.updatedNodeLabels) {
|
|
||||||
protoList.add(convertToProtoFormat(r));
|
|
||||||
}
|
|
||||||
builder.addAllNodeLabels(protoList);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object other) {
|
|
||||||
if (other == null)
|
|
||||||
return false;
|
|
||||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
|
||||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
assert false : "hashCode not designed";
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void maybeInitBuilder() {
|
|
||||||
if (viaProto || builder == null) {
|
|
||||||
builder = UpdateNodeLabelsRequestProto.newBuilder(proto);
|
|
||||||
}
|
|
||||||
viaProto = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setNodeLabels(List<NodeLabel> updatedNodeLabels) {
|
|
||||||
maybeInitBuilder();
|
|
||||||
if (updatedNodeLabels == null) {
|
|
||||||
builder.clearNodeLabels();
|
|
||||||
}
|
|
||||||
this.updatedNodeLabels = updatedNodeLabels;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initLocalNodeLabels() {
|
|
||||||
UpdateNodeLabelsRequestProtoOrBuilder p = viaProto ? proto : builder;
|
|
||||||
List<NodeLabelProto> attributesProtoList =
|
|
||||||
p.getNodeLabelsList();
|
|
||||||
this.updatedNodeLabels = new ArrayList<NodeLabel>();
|
|
||||||
for (NodeLabelProto r : attributesProtoList) {
|
|
||||||
this.updatedNodeLabels.add(convertFromProtoFormat(r));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<NodeLabel> getNodeLabels() {
|
|
||||||
if (this.updatedNodeLabels != null) {
|
|
||||||
return this.updatedNodeLabels;
|
|
||||||
}
|
|
||||||
initLocalNodeLabels();
|
|
||||||
return this.updatedNodeLabels;
|
|
||||||
}
|
|
||||||
|
|
||||||
private NodeLabel
|
|
||||||
convertFromProtoFormat(NodeLabelProto p) {
|
|
||||||
return new NodeLabelPBImpl(p);
|
|
||||||
}
|
|
||||||
|
|
||||||
private NodeLabelProto convertToProtoFormat(NodeLabel t) {
|
|
||||||
return ((NodeLabelPBImpl) t).getProto();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return getProto().toString();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,67 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
|
|
||||||
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsResponse;
|
|
||||||
|
|
||||||
public class UpdateNodeLabelsResponsePBImpl extends
|
|
||||||
UpdateNodeLabelsResponse {
|
|
||||||
UpdateNodeLabelsResponseProto proto =
|
|
||||||
UpdateNodeLabelsResponseProto.getDefaultInstance();
|
|
||||||
UpdateNodeLabelsResponseProto.Builder builder = null;
|
|
||||||
boolean viaProto = false;
|
|
||||||
|
|
||||||
public UpdateNodeLabelsResponsePBImpl() {
|
|
||||||
builder = UpdateNodeLabelsResponseProto.newBuilder();
|
|
||||||
}
|
|
||||||
|
|
||||||
public UpdateNodeLabelsResponsePBImpl(
|
|
||||||
UpdateNodeLabelsResponseProto proto) {
|
|
||||||
this.proto = proto;
|
|
||||||
viaProto = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public UpdateNodeLabelsResponseProto getProto() {
|
|
||||||
proto = viaProto ? proto : builder.build();
|
|
||||||
viaProto = true;
|
|
||||||
return proto;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return getProto().hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object other) {
|
|
||||||
if (other == null)
|
|
||||||
return false;
|
|
||||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
|
||||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return getProto().toString().replaceAll("\\n", ", ")
|
|
||||||
.replaceAll("\\s+", " ");
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -233,8 +233,6 @@ import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.Remov
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsRequestProto;
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeLabelsResponseProto;
|
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto;
|
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto;
|
||||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto;
|
import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto;
|
||||||
|
@ -311,8 +309,6 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClust
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeLabelsRequestPBImpl;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeLabelsResponsePBImpl;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl;
|
||||||
import org.apache.hadoop.yarn.util.resource.Resources;
|
import org.apache.hadoop.yarn.util.resource.Resources;
|
||||||
|
@ -1294,18 +1290,6 @@ public class TestPBImplRecords {
|
||||||
NodeLabelProto.class);
|
NodeLabelProto.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testUpdateNodeLabelsRequestPBImpl() throws Exception {
|
|
||||||
validatePBImplRecord(UpdateNodeLabelsRequestPBImpl.class,
|
|
||||||
UpdateNodeLabelsRequestProto.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testUpdateNodeLabelsResponsePBImpl() throws Exception {
|
|
||||||
validatePBImplRecord(UpdateNodeLabelsResponsePBImpl.class,
|
|
||||||
UpdateNodeLabelsResponseProto.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCheckForDecommissioningNodesRequestPBImpl() throws Exception {
|
public void testCheckForDecommissioningNodesRequestPBImpl() throws Exception {
|
||||||
validatePBImplRecord(CheckForDecommissioningNodesRequestPBImpl.class,
|
validatePBImplRecord(CheckForDecommissioningNodesRequestPBImpl.class,
|
||||||
|
|
|
@ -31,9 +31,8 @@ import org.apache.hadoop.yarn.event.InlineDispatcher;
|
||||||
|
|
||||||
public class DummyCommonNodeLabelsManager extends CommonNodeLabelsManager {
|
public class DummyCommonNodeLabelsManager extends CommonNodeLabelsManager {
|
||||||
Map<NodeId, Set<String>> lastNodeToLabels = null;
|
Map<NodeId, Set<String>> lastNodeToLabels = null;
|
||||||
Collection<String> lastAddedlabels = null;
|
Collection<NodeLabel> lastAddedlabels = null;
|
||||||
Collection<String> lastRemovedlabels = null;
|
Collection<String> lastRemovedlabels = null;
|
||||||
List<NodeLabel> lastUpdatedNodeLabels = null;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void initNodeLabelStore(Configuration conf) {
|
public void initNodeLabelStore(Configuration conf) {
|
||||||
|
@ -56,16 +55,10 @@ public class DummyCommonNodeLabelsManager extends CommonNodeLabelsManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void storeNewClusterNodeLabels(Set<String> label) throws IOException {
|
public void storeNewClusterNodeLabels(List<NodeLabel> label) throws IOException {
|
||||||
lastAddedlabels = label;
|
lastAddedlabels = label;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void updateNodeLabels(List<NodeLabel> updatedNodeLabels)
|
|
||||||
throws IOException {
|
|
||||||
lastUpdatedNodeLabels = updatedNodeLabels;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() throws IOException {
|
public void close() throws IOException {
|
||||||
// do nothing
|
// do nothing
|
||||||
|
|
|
@ -88,6 +88,7 @@ public class NodeLabelTestBase {
|
||||||
Assert.assertTrue(s1.containsAll(s2));
|
Assert.assertTrue(s1.containsAll(s2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
public static <E> Set<E> toSet(E... elements) {
|
public static <E> Set<E> toSet(E... elements) {
|
||||||
Set<E> set = Sets.newHashSet(elements);
|
Set<E> set = Sets.newHashSet(elements);
|
||||||
return set;
|
return set;
|
||||||
|
|
|
@ -22,6 +22,7 @@ import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -31,7 +32,6 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -61,15 +61,14 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testAddRemovelabel() throws Exception {
|
public void testAddRemovelabel() throws Exception {
|
||||||
// Add some label
|
// Add some label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("hello"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("hello"));
|
||||||
assertCollectionEquals(mgr.lastAddedlabels, Arrays.asList("hello"));
|
verifyNodeLabelAdded(Sets.newHashSet("hello"), mgr.lastAddedlabels);
|
||||||
|
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("world"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("world"));
|
||||||
mgr.addToCluserNodeLabels(toSet("hello1", "world1"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("hello1", "world1"));
|
||||||
assertCollectionEquals(mgr.lastAddedlabels,
|
verifyNodeLabelAdded(Sets.newHashSet("hello1", "world1"), mgr.lastAddedlabels);
|
||||||
Sets.newHashSet("hello1", "world1"));
|
|
||||||
|
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().containsAll(
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
|
||||||
Sets.newHashSet("hello", "world", "hello1", "world1")));
|
Sets.newHashSet("hello", "world", "hello1", "world1")));
|
||||||
|
|
||||||
// try to remove null, empty and non-existed label, should fail
|
// try to remove null, empty and non-existed label, should fail
|
||||||
|
@ -86,23 +85,32 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
// Remove some label
|
// Remove some label
|
||||||
mgr.removeFromClusterNodeLabels(Arrays.asList("hello"));
|
mgr.removeFromClusterNodeLabels(Arrays.asList("hello"));
|
||||||
assertCollectionEquals(mgr.lastRemovedlabels, Arrays.asList("hello"));
|
assertCollectionEquals(Sets.newHashSet("hello"), mgr.lastRemovedlabels);
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().containsAll(
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
|
||||||
Arrays.asList("world", "hello1", "world1")));
|
Arrays.asList("world", "hello1", "world1")));
|
||||||
|
|
||||||
mgr.removeFromClusterNodeLabels(Arrays
|
mgr.removeFromClusterNodeLabels(Arrays
|
||||||
.asList("hello1", "world1", "world"));
|
.asList("hello1", "world1", "world"));
|
||||||
Assert.assertTrue(mgr.lastRemovedlabels.containsAll(Sets.newHashSet(
|
Assert.assertTrue(mgr.lastRemovedlabels.containsAll(Sets.newHashSet(
|
||||||
"hello1", "world1", "world")));
|
"hello1", "world1", "world")));
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().isEmpty());
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testAddlabelWithCase() throws Exception {
|
public void testAddlabelWithCase() throws Exception {
|
||||||
// Add some label, case will not ignore here
|
// Add some label, case will not ignore here
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("HeLlO"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("HeLlO"));
|
||||||
assertCollectionEquals(mgr.lastAddedlabels, Arrays.asList("HeLlO"));
|
verifyNodeLabelAdded(Sets.newHashSet("HeLlO"), mgr.lastAddedlabels);
|
||||||
Assert.assertFalse(mgr.getClusterNodeLabels().containsAll(Arrays.asList("hello")));
|
Assert.assertFalse(mgr.getClusterNodeLabelNames().containsAll(
|
||||||
|
Arrays.asList("hello")));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(timeout = 5000)
|
||||||
|
public void testAddlabelWithExclusivity() throws Exception {
|
||||||
|
// Add some label, case will not ignore here
|
||||||
|
mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("a", false), NodeLabel.newInstance("b", true)));
|
||||||
|
Assert.assertFalse(mgr.isExclusiveNodeLabel("a"));
|
||||||
|
Assert.assertTrue(mgr.isExclusiveNodeLabel("b"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
|
@ -111,7 +119,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
try {
|
try {
|
||||||
Set<String> set = new HashSet<String>();
|
Set<String> set = new HashSet<String>();
|
||||||
set.add(null);
|
set.add(null);
|
||||||
mgr.addToCluserNodeLabels(set);
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(set);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
|
@ -119,7 +127,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
caught = false;
|
caught = false;
|
||||||
try {
|
try {
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of(CommonNodeLabelsManager.NO_LABEL));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(CommonNodeLabelsManager.NO_LABEL));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
|
@ -128,7 +136,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
caught = false;
|
caught = false;
|
||||||
try {
|
try {
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("-?"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("-?"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
|
@ -136,7 +144,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
caught = false;
|
caught = false;
|
||||||
try {
|
try {
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of(StringUtils.repeat("c", 257)));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(StringUtils.repeat("c", 257)));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
|
@ -144,7 +152,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
caught = false;
|
caught = false;
|
||||||
try {
|
try {
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("-aaabbb"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("-aaabbb"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
|
@ -152,7 +160,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
caught = false;
|
caught = false;
|
||||||
try {
|
try {
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("_aaabbb"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("_aaabbb"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
|
@ -160,7 +168,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
caught = false;
|
caught = false;
|
||||||
try {
|
try {
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("a^aabbb"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("a^aabbb"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
|
@ -168,7 +176,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
caught = false;
|
caught = false;
|
||||||
try {
|
try {
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("aa[a]bbb"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("aa[a]bbb"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
caught = true;
|
caught = true;
|
||||||
}
|
}
|
||||||
|
@ -198,7 +206,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
Assert.assertTrue("trying to add a empty node but succeeded", caught);
|
Assert.assertTrue("trying to add a empty node but succeeded", caught);
|
||||||
|
|
||||||
// set node->label one by one
|
// set node->label one by one
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p2")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p2")));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p3")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p3")));
|
||||||
|
@ -249,7 +257,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testRemovelabelWithNodes() throws Exception {
|
public void testRemovelabelWithNodes() throws Exception {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p2")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n2"), toSet("p2")));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n3"), toSet("p3")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n3"), toSet("p3")));
|
||||||
|
@ -261,21 +269,21 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
mgr.removeFromClusterNodeLabels(ImmutableSet.of("p2", "p3"));
|
mgr.removeFromClusterNodeLabels(ImmutableSet.of("p2", "p3"));
|
||||||
Assert.assertTrue(mgr.getNodeLabels().isEmpty());
|
Assert.assertTrue(mgr.getNodeLabels().isEmpty());
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().isEmpty());
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().isEmpty());
|
||||||
assertCollectionEquals(mgr.lastRemovedlabels, Arrays.asList("p2", "p3"));
|
assertCollectionEquals(mgr.lastRemovedlabels, Arrays.asList("p2", "p3"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testTrimLabelsWhenAddRemoveNodeLabels() throws IOException {
|
public void testTrimLabelsWhenAddRemoveNodeLabels() throws IOException {
|
||||||
mgr.addToCluserNodeLabels(toSet(" p1"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(" p1"));
|
||||||
assertCollectionEquals(mgr.getClusterNodeLabels(), toSet("p1"));
|
assertCollectionEquals(mgr.getClusterNodeLabelNames(), toSet("p1"));
|
||||||
mgr.removeFromClusterNodeLabels(toSet("p1 "));
|
mgr.removeFromClusterNodeLabels(toSet("p1 "));
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().isEmpty());
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testTrimLabelsWhenModifyLabelsOnNodes() throws IOException {
|
public void testTrimLabelsWhenModifyLabelsOnNodes() throws IOException {
|
||||||
mgr.addToCluserNodeLabels(toSet(" p1", "p2"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet(" p1", "p2"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1 ")));
|
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1 ")));
|
||||||
assertMapEquals(
|
assertMapEquals(
|
||||||
mgr.getNodeLabels(),
|
mgr.getNodeLabels(),
|
||||||
|
@ -291,7 +299,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testReplaceLabelsOnHostsShouldUpdateNodesBelongTo()
|
public void testReplaceLabelsOnHostsShouldUpdateNodesBelongTo()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
||||||
assertMapEquals(
|
assertMapEquals(
|
||||||
mgr.getNodeLabels(),
|
mgr.getNodeLabels(),
|
||||||
|
@ -330,7 +338,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
// add labels
|
// add labels
|
||||||
try {
|
try {
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
assertNodeLabelsDisabledErrorMessage(e);
|
assertNodeLabelsDisabledErrorMessage(e);
|
||||||
caught = true;
|
caught = true;
|
||||||
|
@ -392,7 +400,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testLabelsToNodes()
|
public void testLabelsToNodes()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
||||||
Map<String, Set<NodeId>> labelsToNodes = mgr.getLabelsToNodes();
|
Map<String, Set<NodeId>> labelsToNodes = mgr.getLabelsToNodes();
|
||||||
assertLabelsToNodesEquals(
|
assertLabelsToNodesEquals(
|
||||||
|
@ -453,7 +461,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testLabelsToNodesForSelectedLabels()
|
public void testLabelsToNodesForSelectedLabels()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.addLabelsToNode(
|
mgr.addLabelsToNode(
|
||||||
ImmutableMap.of(
|
ImmutableMap.of(
|
||||||
toNodeId("n1:1"), toSet("p1"),
|
toNodeId("n1:1"), toSet("p1"),
|
||||||
|
@ -511,7 +519,7 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
boolean failed = false;
|
boolean failed = false;
|
||||||
// As in YARN-2694, we temporarily disable no more than one label existed in
|
// As in YARN-2694, we temporarily disable no more than one label existed in
|
||||||
// one host
|
// one host
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
try {
|
try {
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1", "p2")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1", "p2")));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -539,29 +547,11 @@ public class TestCommonNodeLabelsManager extends NodeLabelTestBase {
|
||||||
failed);
|
failed);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test (timeout = 5000)
|
private void verifyNodeLabelAdded(Set<String> expectedAddedLabelNames,
|
||||||
public void testUpdateNodeLabels() throws Exception {
|
Collection<NodeLabel> addedNodeLabels) {
|
||||||
boolean failed = false;
|
Assert.assertEquals(expectedAddedLabelNames.size(), addedNodeLabels.size());
|
||||||
|
for (NodeLabel label : addedNodeLabels) {
|
||||||
// should fail: label isn't exist
|
Assert.assertTrue(expectedAddedLabelNames.contains(label.getName()));
|
||||||
try {
|
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance(
|
|
||||||
"p1", false)));
|
|
||||||
} catch (YarnException e) {
|
|
||||||
failed = true;
|
|
||||||
}
|
}
|
||||||
Assert.assertTrue("Should fail since the node label doesn't exist", failed);
|
|
||||||
|
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
|
||||||
|
|
||||||
mgr.updateNodeLabels(Arrays.asList(
|
|
||||||
NodeLabel.newInstance("p1", false), NodeLabel.newInstance("p2", true)));
|
|
||||||
Assert.assertEquals("p1", mgr.lastUpdatedNodeLabels.get(0).getNodeLabel());
|
|
||||||
Assert.assertFalse(mgr.lastUpdatedNodeLabels.get(0).getIsExclusive());
|
|
||||||
Assert.assertTrue(mgr.lastUpdatedNodeLabels.get(1).getIsExclusive());
|
|
||||||
|
|
||||||
// Check exclusive for p1/p2
|
|
||||||
Assert.assertFalse(mgr.isExclusiveNodeLabel("p1"));
|
|
||||||
Assert.assertTrue(mgr.isExclusiveNodeLabel("p2"));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,9 +84,9 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||||
@Test(timeout = 10000)
|
@Test(timeout = 10000)
|
||||||
public void testRecoverWithMirror() throws Exception {
|
public void testRecoverWithMirror() throws Exception {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.addToCluserNodeLabels(toSet("p4"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4"));
|
||||||
mgr.addToCluserNodeLabels(toSet("p5", "p6"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6"));
|
||||||
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
||||||
toNodeId("n2"), toSet("p2")));
|
toNodeId("n2"), toSet("p2")));
|
||||||
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"),
|
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"),
|
||||||
|
@ -110,8 +110,8 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
mgr.init(conf);
|
mgr.init(conf);
|
||||||
|
|
||||||
// check variables
|
// check variables
|
||||||
Assert.assertEquals(3, mgr.getClusterNodeLabels().size());
|
Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size());
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().containsAll(
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
|
||||||
Arrays.asList("p2", "p4", "p6")));
|
Arrays.asList("p2", "p4", "p6")));
|
||||||
|
|
||||||
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
|
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
|
||||||
|
@ -129,8 +129,8 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
mgr.init(conf);
|
mgr.init(conf);
|
||||||
|
|
||||||
// check variables
|
// check variables
|
||||||
Assert.assertEquals(3, mgr.getClusterNodeLabels().size());
|
Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size());
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().containsAll(
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
|
||||||
Arrays.asList("p2", "p4", "p6")));
|
Arrays.asList("p2", "p4", "p6")));
|
||||||
|
|
||||||
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
|
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
|
||||||
|
@ -147,9 +147,9 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||||
@Test(timeout = 10000)
|
@Test(timeout = 10000)
|
||||||
public void testEditlogRecover() throws Exception {
|
public void testEditlogRecover() throws Exception {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.addToCluserNodeLabels(toSet("p4"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4"));
|
||||||
mgr.addToCluserNodeLabels(toSet("p5", "p6"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p5", "p6"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
||||||
toNodeId("n2"), toSet("p2")));
|
toNodeId("n2"), toSet("p2")));
|
||||||
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"),
|
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"),
|
||||||
|
@ -173,8 +173,8 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
mgr.init(conf);
|
mgr.init(conf);
|
||||||
|
|
||||||
// check variables
|
// check variables
|
||||||
Assert.assertEquals(3, mgr.getClusterNodeLabels().size());
|
Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size());
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().containsAll(
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
|
||||||
Arrays.asList("p2", "p4", "p6")));
|
Arrays.asList("p2", "p4", "p6")));
|
||||||
|
|
||||||
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
|
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
|
||||||
|
@ -191,9 +191,12 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||||
@Test (timeout = 10000)
|
@Test (timeout = 10000)
|
||||||
public void testSerilizationAfterRecovery() throws Exception {
|
public void testSerilizationAfterRecovery() throws Exception {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
// Add to cluster node labels, p2/p6 are non-exclusive.
|
||||||
mgr.addToCluserNodeLabels(toSet("p4"));
|
mgr.addToCluserNodeLabels(Arrays.asList(NodeLabel.newInstance("p1", true),
|
||||||
mgr.addToCluserNodeLabels(toSet("p5", "p6"));
|
NodeLabel.newInstance("p2", false), NodeLabel.newInstance("p3", true),
|
||||||
|
NodeLabel.newInstance("p4", true), NodeLabel.newInstance("p5", true),
|
||||||
|
NodeLabel.newInstance("p6", false)));
|
||||||
|
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
||||||
toNodeId("n2"), toSet("p2")));
|
toNodeId("n2"), toSet("p2")));
|
||||||
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"),
|
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n3"), toSet("p3"),
|
||||||
|
@ -220,13 +223,6 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
* p6: n6, n7
|
* p6: n6, n7
|
||||||
*/
|
*/
|
||||||
|
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("p2", false)));
|
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("p6", false)));
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Set p2/p6 to be exclusive
|
|
||||||
*/
|
|
||||||
|
|
||||||
// shutdown mgr and start a new mgr
|
// shutdown mgr and start a new mgr
|
||||||
mgr.stop();
|
mgr.stop();
|
||||||
|
|
||||||
|
@ -235,8 +231,8 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
mgr.start();
|
mgr.start();
|
||||||
|
|
||||||
// check variables
|
// check variables
|
||||||
Assert.assertEquals(3, mgr.getClusterNodeLabels().size());
|
Assert.assertEquals(3, mgr.getClusterNodeLabelNames().size());
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().containsAll(
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
|
||||||
Arrays.asList("p2", "p4", "p6")));
|
Arrays.asList("p2", "p4", "p6")));
|
||||||
|
|
||||||
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
|
assertMapContains(mgr.getNodeLabels(), ImmutableMap.of(toNodeId("n2"),
|
||||||
|
@ -258,7 +254,7 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
mgr = new MockNodeLabelManager();
|
mgr = new MockNodeLabelManager();
|
||||||
mgr.init(conf);
|
mgr.init(conf);
|
||||||
mgr.start();
|
mgr.start();
|
||||||
mgr.addToCluserNodeLabels(toSet("p7", "p8"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p7", "p8"));
|
||||||
mgr.stop();
|
mgr.stop();
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -267,7 +263,7 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
mgr = new MockNodeLabelManager();
|
mgr = new MockNodeLabelManager();
|
||||||
mgr.init(conf);
|
mgr.init(conf);
|
||||||
mgr.start();
|
mgr.start();
|
||||||
mgr.addToCluserNodeLabels(toSet("p9"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p9"));
|
||||||
mgr.stop();
|
mgr.stop();
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -278,8 +274,8 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase {
|
||||||
mgr.start();
|
mgr.start();
|
||||||
|
|
||||||
// check variables
|
// check variables
|
||||||
Assert.assertEquals(6, mgr.getClusterNodeLabels().size());
|
Assert.assertEquals(6, mgr.getClusterNodeLabelNames().size());
|
||||||
Assert.assertTrue(mgr.getClusterNodeLabels().containsAll(
|
Assert.assertTrue(mgr.getClusterNodeLabelNames().containsAll(
|
||||||
Arrays.asList("p2", "p4", "p6", "p7", "p8", "p9")));
|
Arrays.asList("p2", "p4", "p6", "p7", "p8", "p9")));
|
||||||
mgr.stop();
|
mgr.stop();
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,8 +80,6 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLa
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsRequest;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeLabelsResponse;
|
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest;
|
||||||
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
|
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationSystem;
|
import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationSystem;
|
||||||
|
@ -679,28 +677,6 @@ public class AdminService extends CompositeService implements
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public UpdateNodeLabelsResponse updateNodeLabels(
|
|
||||||
UpdateNodeLabelsRequest request) throws YarnException, IOException {
|
|
||||||
String argName = "updateNodeLabels";
|
|
||||||
final String msg = "update node labels";
|
|
||||||
UserGroupInformation user = checkAcls(argName);
|
|
||||||
|
|
||||||
checkRMStatus(user.getShortUserName(), argName, msg);
|
|
||||||
|
|
||||||
UpdateNodeLabelsResponse response = UpdateNodeLabelsResponse.newInstance();
|
|
||||||
|
|
||||||
try {
|
|
||||||
rmContext.getNodeLabelManager().updateNodeLabels(
|
|
||||||
request.getNodeLabels());
|
|
||||||
RMAuditLogger
|
|
||||||
.logSuccess(user.getShortUserName(), argName, "AdminService");
|
|
||||||
return response;
|
|
||||||
} catch (YarnException ioe) {
|
|
||||||
throw logAndWrapException(ioe, user.getShortUserName(), argName, msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void checkRMStatus(String user, String argName, String msg)
|
private void checkRMStatus(String user, String argName, String msg)
|
||||||
throws StandbyException {
|
throws StandbyException {
|
||||||
if (!isRMActive()) {
|
if (!isRMActive()) {
|
||||||
|
|
|
@ -135,7 +135,7 @@ class CSQueueUtils {
|
||||||
accessibleLabels.addAll(labels);
|
accessibleLabels.addAll(labels);
|
||||||
}
|
}
|
||||||
if (accessibleLabels.contains(CommonNodeLabelsManager.ANY)) {
|
if (accessibleLabels.contains(CommonNodeLabelsManager.ANY)) {
|
||||||
accessibleLabels.addAll(mgr.getClusterNodeLabels());
|
accessibleLabels.addAll(mgr.getClusterNodeLabelNames());
|
||||||
}
|
}
|
||||||
accessibleLabels.add(CommonNodeLabelsManager.NO_LABEL);
|
accessibleLabels.add(CommonNodeLabelsManager.NO_LABEL);
|
||||||
|
|
||||||
|
|
|
@ -147,7 +147,7 @@ public class ParentQueue extends AbstractCSQueue {
|
||||||
" for children of queue " + queueName);
|
" for children of queue " + queueName);
|
||||||
}
|
}
|
||||||
// check label capacities
|
// check label capacities
|
||||||
for (String nodeLabel : labelManager.getClusterNodeLabels()) {
|
for (String nodeLabel : labelManager.getClusterNodeLabelNames()) {
|
||||||
float capacityByLabel = queueCapacities.getCapacity(nodeLabel);
|
float capacityByLabel = queueCapacities.getCapacity(nodeLabel);
|
||||||
// check children's labels
|
// check children's labels
|
||||||
float sum = 0;
|
float sum = 0;
|
||||||
|
|
|
@ -857,7 +857,7 @@ public class RMWebServices {
|
||||||
|
|
||||||
NodeLabelsInfo ret =
|
NodeLabelsInfo ret =
|
||||||
new NodeLabelsInfo(rm.getRMContext().getNodeLabelManager()
|
new NodeLabelsInfo(rm.getRMContext().getNodeLabelManager()
|
||||||
.getClusterNodeLabels());
|
.getClusterNodeLabelNames());
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
@ -883,7 +883,7 @@ public class RMWebServices {
|
||||||
}
|
}
|
||||||
|
|
||||||
rm.getRMContext().getNodeLabelManager()
|
rm.getRMContext().getNodeLabelManager()
|
||||||
.addToCluserNodeLabels(new HashSet<String>(
|
.addToCluserNodeLabelsWithDefaultExclusivity(new HashSet<String>(
|
||||||
newNodeLabels.getNodeLabels()));
|
newNodeLabels.getNodeLabels()));
|
||||||
|
|
||||||
return Response.status(Status.OK).build();
|
return Response.status(Status.OK).build();
|
||||||
|
|
|
@ -99,6 +99,7 @@ import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerStatus;
|
import org.apache.hadoop.yarn.api.records.ContainerStatus;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||||
import org.apache.hadoop.yarn.api.records.QueueACL;
|
import org.apache.hadoop.yarn.api.records.QueueACL;
|
||||||
|
@ -200,7 +201,7 @@ public class TestClientRMService {
|
||||||
};
|
};
|
||||||
rm.start();
|
rm.start();
|
||||||
RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager();
|
RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager();
|
||||||
labelsMgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
labelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
|
|
||||||
// Add a healthy node with label = x
|
// Add a healthy node with label = x
|
||||||
MockNM node = rm.registerNode("host1:1234", 1024);
|
MockNM node = rm.registerNode("host1:1234", 1024);
|
||||||
|
@ -1407,7 +1408,7 @@ public class TestClientRMService {
|
||||||
};
|
};
|
||||||
rm.start();
|
rm.start();
|
||||||
RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager();
|
RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager();
|
||||||
labelsMgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
labelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
|
|
||||||
Map<NodeId, Set<String>> map = new HashMap<NodeId, Set<String>>();
|
Map<NodeId, Set<String>> map = new HashMap<NodeId, Set<String>>();
|
||||||
map.put(NodeId.newInstance("host1", 0), ImmutableSet.of("x"));
|
map.put(NodeId.newInstance("host1", 0), ImmutableSet.of("x"));
|
||||||
|
@ -1427,7 +1428,7 @@ public class TestClientRMService {
|
||||||
GetClusterNodeLabelsResponse response =
|
GetClusterNodeLabelsResponse response =
|
||||||
client.getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance());
|
client.getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance());
|
||||||
Assert.assertTrue(response.getNodeLabels().containsAll(
|
Assert.assertTrue(response.getNodeLabels().containsAll(
|
||||||
Arrays.asList("x", "y")));
|
Arrays.asList(NodeLabel.newInstance("x"), NodeLabel.newInstance("y"))));
|
||||||
|
|
||||||
// Get node labels mapping
|
// Get node labels mapping
|
||||||
GetNodesToLabelsResponse response1 =
|
GetNodesToLabelsResponse response1 =
|
||||||
|
@ -1457,7 +1458,7 @@ public class TestClientRMService {
|
||||||
};
|
};
|
||||||
rm.start();
|
rm.start();
|
||||||
RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager();
|
RMNodeLabelsManager labelsMgr = rm.getRMContext().getNodeLabelManager();
|
||||||
labelsMgr.addToCluserNodeLabels(ImmutableSet.of("x", "y", "z"));
|
labelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y", "z"));
|
||||||
|
|
||||||
Map<NodeId, Set<String>> map = new HashMap<NodeId, Set<String>>();
|
Map<NodeId, Set<String>> map = new HashMap<NodeId, Set<String>>();
|
||||||
map.put(NodeId.newInstance("host1", 0), ImmutableSet.of("x"));
|
map.put(NodeId.newInstance("host1", 0), ImmutableSet.of("x"));
|
||||||
|
@ -1480,7 +1481,8 @@ public class TestClientRMService {
|
||||||
GetClusterNodeLabelsResponse response =
|
GetClusterNodeLabelsResponse response =
|
||||||
client.getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance());
|
client.getClusterNodeLabels(GetClusterNodeLabelsRequest.newInstance());
|
||||||
Assert.assertTrue(response.getNodeLabels().containsAll(
|
Assert.assertTrue(response.getNodeLabels().containsAll(
|
||||||
Arrays.asList("x", "y", "z")));
|
Arrays.asList(NodeLabel.newInstance("x"), NodeLabel.newInstance("y"),
|
||||||
|
NodeLabel.newInstance("z"))));
|
||||||
|
|
||||||
// Get labels to nodes mapping
|
// Get labels to nodes mapping
|
||||||
GetLabelsToNodesResponse response1 =
|
GetLabelsToNodesResponse response1 =
|
||||||
|
|
|
@ -2104,7 +2104,7 @@ public class TestRMRestart extends ParameterizedSchedulerTestBase {
|
||||||
clusterNodeLabels.add("y");
|
clusterNodeLabels.add("y");
|
||||||
clusterNodeLabels.add("z");
|
clusterNodeLabels.add("z");
|
||||||
// Add node label x,y,z
|
// Add node label x,y,z
|
||||||
nodeLabelManager.addToCluserNodeLabels(clusterNodeLabels);
|
nodeLabelManager.addToCluserNodeLabelsWithDefaultExclusivity(clusterNodeLabels);
|
||||||
|
|
||||||
// Add node Label to Node h1->x
|
// Add node Label to Node h1->x
|
||||||
NodeId n1 = NodeId.newInstance("h1", 0);
|
NodeId n1 = NodeId.newInstance("h1", 0);
|
||||||
|
@ -2129,7 +2129,7 @@ public class TestRMRestart extends ParameterizedSchedulerTestBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.assertEquals(clusterNodeLabels.size(), nodeLabelManager
|
Assert.assertEquals(clusterNodeLabels.size(), nodeLabelManager
|
||||||
.getClusterNodeLabels().size());
|
.getClusterNodeLabelNames().size());
|
||||||
|
|
||||||
Map<NodeId, Set<String>> nodeLabels = nodeLabelManager.getNodeLabels();
|
Map<NodeId, Set<String>> nodeLabels = nodeLabelManager.getNodeLabels();
|
||||||
Assert.assertEquals(1, nodeLabelManager.getNodeLabels().size());
|
Assert.assertEquals(1, nodeLabelManager.getNodeLabels().size());
|
||||||
|
@ -2148,7 +2148,7 @@ public class TestRMRestart extends ParameterizedSchedulerTestBase {
|
||||||
|
|
||||||
nodeLabelManager = rm2.getRMContext().getNodeLabelManager();
|
nodeLabelManager = rm2.getRMContext().getNodeLabelManager();
|
||||||
Assert.assertEquals(clusterNodeLabels.size(), nodeLabelManager
|
Assert.assertEquals(clusterNodeLabels.size(), nodeLabelManager
|
||||||
.getClusterNodeLabels().size());
|
.getClusterNodeLabelNames().size());
|
||||||
|
|
||||||
nodeLabels = nodeLabelManager.getNodeLabels();
|
nodeLabels = nodeLabelManager.getNodeLabels();
|
||||||
Assert.assertEquals(1, nodeLabelManager.getNodeLabels().size());
|
Assert.assertEquals(1, nodeLabelManager.getNodeLabels().size());
|
||||||
|
|
|
@ -337,7 +337,7 @@ public class TestResourceTrackerService extends NodeLabelTestBase {
|
||||||
rm.start();
|
rm.start();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
nodeLabelsMgr.addToCluserNodeLabels(toSet("A", "B", "C"));
|
nodeLabelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("A", "B", "C"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
Assert.fail("Caught Exception while intializing");
|
Assert.fail("Caught Exception while intializing");
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
@ -386,7 +386,7 @@ public class TestResourceTrackerService extends NodeLabelTestBase {
|
||||||
rm.start();
|
rm.start();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
nodeLabelsMgr.addToCluserNodeLabels(toSet("X", "Y", "Z"));
|
nodeLabelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("X", "Y", "Z"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
Assert.fail("Caught Exception while intializing");
|
Assert.fail("Caught Exception while intializing");
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
@ -439,7 +439,7 @@ public class TestResourceTrackerService extends NodeLabelTestBase {
|
||||||
rm.start();
|
rm.start();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
nodeLabelsMgr.addToCluserNodeLabels(toSet("X", "Y", "Z"));
|
nodeLabelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("X", "Y", "Z"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
Assert.fail("Caught Exception while intializing");
|
Assert.fail("Caught Exception while intializing");
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
@ -491,7 +491,7 @@ public class TestResourceTrackerService extends NodeLabelTestBase {
|
||||||
};
|
};
|
||||||
rm.start();
|
rm.start();
|
||||||
try {
|
try {
|
||||||
nodeLabelsMgr.addToCluserNodeLabels(toSet("A", "B", "C"));
|
nodeLabelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("A", "B", "C"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
Assert.fail("Caught Exception while intializing");
|
Assert.fail("Caught Exception while intializing");
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
@ -551,7 +551,7 @@ public class TestResourceTrackerService extends NodeLabelTestBase {
|
||||||
rm.start();
|
rm.start();
|
||||||
// adding valid labels
|
// adding valid labels
|
||||||
try {
|
try {
|
||||||
nodeLabelsMgr.addToCluserNodeLabels(toSet("A", "B", "C"));
|
nodeLabelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("A", "B", "C"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
Assert.fail("Caught Exception while intializing");
|
Assert.fail("Caught Exception while intializing");
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
@ -636,7 +636,7 @@ public class TestResourceTrackerService extends NodeLabelTestBase {
|
||||||
rm.start();
|
rm.start();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
nodeLabelsMgr.addToCluserNodeLabels(toSet("A", "B", "C"));
|
nodeLabelsMgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("A", "B", "C"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
Assert.fail("Caught Exception while intializing");
|
Assert.fail("Caught Exception while intializing");
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
|
|
@ -57,7 +57,8 @@ public class NullRMNodeLabelsManager extends RMNodeLabelsManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void storeNewClusterNodeLabels(Set<String> label) throws IOException {
|
public void storeNewClusterNodeLabels(List<NodeLabel> label)
|
||||||
|
throws IOException {
|
||||||
// do nothing
|
// do nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,12 +66,6 @@ public class NullRMNodeLabelsManager extends RMNodeLabelsManager {
|
||||||
public void close() throws IOException {
|
public void close() throws IOException {
|
||||||
// do nothing
|
// do nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void updateNodeLabels(List<NodeLabel> updatedNodeLabels)
|
|
||||||
throws IOException {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testGetLabelResourceWhenNodeActiveDeactive() throws Exception {
|
public void testGetLabelResourceWhenNodeActiveDeactive() throws Exception {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
||||||
toNodeId("n2"), toSet("p2"), toNodeId("n3"), toSet("p3")));
|
toNodeId("n2"), toSet("p2"), toNodeId("n3"), toSet("p3")));
|
||||||
|
|
||||||
|
@ -81,7 +81,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
// check add labels multiple times shouldn't overwrite
|
// check add labels multiple times shouldn't overwrite
|
||||||
// original attributes on labels like resource
|
// original attributes on labels like resource
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p4"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p4"));
|
||||||
Assert.assertEquals(mgr.getResourceByLabel("p1", null),
|
Assert.assertEquals(mgr.getResourceByLabel("p1", null),
|
||||||
Resources.add(SMALL_RESOURCE, LARGE_NODE));
|
Resources.add(SMALL_RESOURCE, LARGE_NODE));
|
||||||
Assert.assertEquals(mgr.getResourceByLabel("p4", null), EMPTY_RESOURCE);
|
Assert.assertEquals(mgr.getResourceByLabel("p4", null), EMPTY_RESOURCE);
|
||||||
|
@ -120,7 +120,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testGetLabelResource() throws Exception {
|
public void testGetLabelResource() throws Exception {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
||||||
toNodeId("n2"), toSet("p2"), toNodeId("n3"), toSet("p3")));
|
toNodeId("n2"), toSet("p2"), toNodeId("n3"), toSet("p3")));
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
Assert.assertEquals(mgr.getResourceByLabel("p3", null), SMALL_RESOURCE);
|
Assert.assertEquals(mgr.getResourceByLabel("p3", null), SMALL_RESOURCE);
|
||||||
|
|
||||||
// add more labels
|
// add more labels
|
||||||
mgr.addToCluserNodeLabels(toSet("p4", "p5", "p6"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p4", "p5", "p6"));
|
||||||
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n4"), toSet("p1"),
|
mgr.replaceLabelsOnNode((Map) ImmutableMap.of(toNodeId("n4"), toSet("p1"),
|
||||||
toNodeId("n5"), toSet("p2"), toNodeId("n6"), toSet("p3"),
|
toNodeId("n5"), toSet("p2"), toNodeId("n6"), toSet("p3"),
|
||||||
toNodeId("n7"), toSet("p4"), toNodeId("n8"), toSet("p5")));
|
toNodeId("n7"), toSet("p4"), toNodeId("n8"), toSet("p5")));
|
||||||
|
@ -216,7 +216,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
* host3 : yellow
|
* host3 : yellow
|
||||||
* host4 :
|
* host4 :
|
||||||
*/
|
*/
|
||||||
mgr.addToCluserNodeLabels(toSet("red", "blue", "yellow"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("red", "blue", "yellow"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("host1"), toSet("red")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("host1"), toSet("red")));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("host2"), toSet("blue")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("host2"), toSet("blue")));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("host3"), toSet("yellow")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("host3"), toSet("yellow")));
|
||||||
|
@ -397,7 +397,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
Resources.multiply(SMALL_RESOURCE, 4));
|
Resources.multiply(SMALL_RESOURCE, 4));
|
||||||
|
|
||||||
// change two of these nodes to p1, check resource of no_label and P1
|
// change two of these nodes to p1, check resource of no_label and P1
|
||||||
mgr.addToCluserNodeLabels(toSet("p1"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p1"),
|
mgr.addLabelsToNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p1"),
|
||||||
toNodeId("n1:2"), toSet("p1")));
|
toNodeId("n1:2"), toSet("p1")));
|
||||||
|
|
||||||
|
@ -412,7 +412,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testRemoveLabelsFromNode() throws Exception {
|
public void testRemoveLabelsFromNode() throws Exception {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1"),
|
||||||
toNodeId("n2"), toSet("p2"), toNodeId("n3"), toSet("p3")));
|
toNodeId("n2"), toSet("p2"), toNodeId("n3"), toSet("p3")));
|
||||||
// active one NM to n1:1
|
// active one NM to n1:1
|
||||||
|
@ -432,7 +432,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testGetLabelsOnNodesWhenNodeActiveDeactive() throws Exception {
|
public void testGetLabelsOnNodesWhenNodeActiveDeactive() throws Exception {
|
||||||
mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1", "p2", "p3"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(
|
mgr.replaceLabelsOnNode(ImmutableMap.of(
|
||||||
toNodeId("n1"), toSet("p2")));
|
toNodeId("n1"), toSet("p2")));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p1")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1:1"), toSet("p1")));
|
||||||
|
@ -489,7 +489,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
|
|
||||||
@Test(timeout = 5000)
|
@Test(timeout = 5000)
|
||||||
public void testPullRMNodeLabelsInfo() throws IOException {
|
public void testPullRMNodeLabelsInfo() throws IOException {
|
||||||
mgr.addToCluserNodeLabels(toSet("x", "y", "z"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("x", "y", "z"));
|
||||||
mgr.activateNode(NodeId.newInstance("n1", 1), Resource.newInstance(10, 0));
|
mgr.activateNode(NodeId.newInstance("n1", 1), Resource.newInstance(10, 0));
|
||||||
mgr.activateNode(NodeId.newInstance("n2", 1), Resource.newInstance(10, 0));
|
mgr.activateNode(NodeId.newInstance("n2", 1), Resource.newInstance(10, 0));
|
||||||
mgr.activateNode(NodeId.newInstance("n3", 1), Resource.newInstance(10, 0));
|
mgr.activateNode(NodeId.newInstance("n3", 1), Resource.newInstance(10, 0));
|
||||||
|
@ -516,7 +516,7 @@ public class TestRMNodeLabelsManager extends NodeLabelTestBase {
|
||||||
mgr.getLabelsToNodes(), transposeNodeToLabels(mgr.getNodeLabels()));
|
mgr.getLabelsToNodes(), transposeNodeToLabels(mgr.getNodeLabels()));
|
||||||
|
|
||||||
// Add labels and replace labels on node
|
// Add labels and replace labels on node
|
||||||
mgr.addToCluserNodeLabels(toSet("p1"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(toSet("p1"));
|
||||||
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
mgr.replaceLabelsOnNode(ImmutableMap.of(toNodeId("n1"), toSet("p1")));
|
||||||
// p1 -> n1, n1:1
|
// p1 -> n1, n1:1
|
||||||
Assert.assertEquals(2, mgr.getLabelsToNodes().get("p1").size());
|
Assert.assertEquals(2, mgr.getLabelsToNodes().get("p1").size());
|
||||||
|
|
|
@ -2578,7 +2578,7 @@ public class TestCapacityScheduler {
|
||||||
|
|
||||||
final RMNodeLabelsManager mgr = new NullRMNodeLabelsManager();
|
final RMNodeLabelsManager mgr = new NullRMNodeLabelsManager();
|
||||||
mgr.init(conf);
|
mgr.init(conf);
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
||||||
|
|
||||||
MemoryRMStateStore memStore = new MemoryRMStateStore();
|
MemoryRMStateStore memStore = new MemoryRMStateStore();
|
||||||
|
|
|
@ -98,7 +98,7 @@ public class TestCapacitySchedulerNodeLabelUpdate {
|
||||||
@Test (timeout = 30000)
|
@Test (timeout = 30000)
|
||||||
public void testNodeUpdate() throws Exception {
|
public void testNodeUpdate() throws Exception {
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y", "z"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y", "z"));
|
||||||
|
|
||||||
// set mapping:
|
// set mapping:
|
||||||
// h1 -> x
|
// h1 -> x
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
|
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -34,7 +32,6 @@ import org.apache.hadoop.yarn.api.records.Container;
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
import org.apache.hadoop.yarn.api.records.LogAggregationContext;
|
import org.apache.hadoop.yarn.api.records.LogAggregationContext;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
|
||||||
import org.apache.hadoop.yarn.api.records.Priority;
|
import org.apache.hadoop.yarn.api.records.Priority;
|
||||||
import org.apache.hadoop.yarn.api.records.Resource;
|
import org.apache.hadoop.yarn.api.records.Resource;
|
||||||
import org.apache.hadoop.yarn.api.records.ResourceRequest;
|
import org.apache.hadoop.yarn.api.records.ResourceRequest;
|
||||||
|
@ -54,13 +51,9 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
|
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
|
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
|
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
|
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode;
|
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp;
|
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
|
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
|
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
|
||||||
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
|
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
|
|
@ -143,7 +143,7 @@ public class TestNodeLabelContainerAllocation {
|
||||||
mgr.init(conf);
|
mgr.init(conf);
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
||||||
NodeId.newInstance("h2", 0), toSet("y")));
|
NodeId.newInstance("h2", 0), toSet("y")));
|
||||||
|
|
||||||
|
@ -226,7 +226,7 @@ public class TestNodeLabelContainerAllocation {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y", "z"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y", "z"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0),
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0),
|
||||||
toSet("x"), NodeId.newInstance("h2", 0), toSet("y"),
|
toSet("x"), NodeId.newInstance("h2", 0), toSet("y"),
|
||||||
NodeId.newInstance("h3", 0), toSet("y"), NodeId.newInstance("h4", 0),
|
NodeId.newInstance("h3", 0), toSet("y"), NodeId.newInstance("h4", 0),
|
||||||
|
@ -310,7 +310,7 @@ public class TestNodeLabelContainerAllocation {
|
||||||
@Test (timeout = 120000)
|
@Test (timeout = 120000)
|
||||||
public void testContainerAllocateWithLabels() throws Exception {
|
public void testContainerAllocateWithLabels() throws Exception {
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
||||||
NodeId.newInstance("h2", 0), toSet("y")));
|
NodeId.newInstance("h2", 0), toSet("y")));
|
||||||
|
|
||||||
|
@ -386,7 +386,7 @@ public class TestNodeLabelContainerAllocation {
|
||||||
// instead, it uses default queue label expression
|
// instead, it uses default queue label expression
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
||||||
NodeId.newInstance("h2", 0), toSet("y")));
|
NodeId.newInstance("h2", 0), toSet("y")));
|
||||||
|
|
||||||
|
@ -483,9 +483,8 @@ public class TestNodeLabelContainerAllocation {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
// Makes y to be non-exclusive node labels
|
NodeLabel.newInstance("x"), NodeLabel.newInstance("y", false)));
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("y", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("y")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("y")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
@ -561,9 +560,9 @@ public class TestNodeLabelContainerAllocation {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
|
NodeLabel.newInstance("x"), NodeLabel.newInstance("y", false)));
|
||||||
// Makes y to be non-exclusive node labels
|
// Makes y to be non-exclusive node labels
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("y", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("y")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("y")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
@ -612,10 +611,8 @@ public class TestNodeLabelContainerAllocation {
|
||||||
* non-labeled request should get allocation on non-labeled nodes first.
|
* non-labeled request should get allocation on non-labeled nodes first.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// set node -> label
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
NodeLabel.newInstance("x", false), NodeLabel.newInstance("y")));
|
||||||
// Makes x to be non-exclusive node labels
|
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("x", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
@ -734,9 +731,8 @@ public class TestNodeLabelContainerAllocation {
|
||||||
csConf.setCapacityByLabel(C2, "x", 0);
|
csConf.setCapacityByLabel(C2, "x", 0);
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
// Makes x to be non-exclusive node labels
|
NodeLabel.newInstance("x", false), NodeLabel.newInstance("y")));
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("x", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
@ -844,9 +840,8 @@ public class TestNodeLabelContainerAllocation {
|
||||||
csConf.setUserLimitFactor(B, 5);
|
csConf.setUserLimitFactor(B, 5);
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x"));
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
// Makes x to be non-exclusive node labels
|
NodeLabel.newInstance("x", false), NodeLabel.newInstance("y")));
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("x", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
@ -909,9 +904,8 @@ public class TestNodeLabelContainerAllocation {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
// Makes x to be non-exclusive node labels
|
NodeLabel.newInstance("x", false), NodeLabel.newInstance("y")));
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("x", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
@ -984,9 +978,8 @@ public class TestNodeLabelContainerAllocation {
|
||||||
csConf.setMaximumCapacityByLabel(B, "x", 50);
|
csConf.setMaximumCapacityByLabel(B, "x", 50);
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x"));
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
// Makes x to be non-exclusive node labels
|
NodeLabel.newInstance("x", false)));
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("x", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
@ -1114,9 +1107,8 @@ public class TestNodeLabelContainerAllocation {
|
||||||
csConf.setCapacityByLabel(B, "x", 50);
|
csConf.setCapacityByLabel(B, "x", 50);
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x"));
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
// Makes x to be non-exclusive node labels
|
NodeLabel.newInstance("x", false)));
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("x", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
@ -1349,9 +1341,8 @@ public class TestNodeLabelContainerAllocation {
|
||||||
csConf.setCapacity(D, 25);
|
csConf.setCapacity(D, 25);
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x"));
|
mgr.addToCluserNodeLabels(ImmutableSet.of(
|
||||||
// Makes x to be non-exclusive node labels
|
NodeLabel.newInstance("x", false)));
|
||||||
mgr.updateNodeLabels(Arrays.asList(NodeLabel.newInstance("x", false)));
|
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x")));
|
||||||
|
|
||||||
// inject node label manager
|
// inject node label manager
|
||||||
|
|
|
@ -417,7 +417,7 @@ public class TestQueueParsing {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testQueueParsingReinitializeWithLabels() throws IOException {
|
public void testQueueParsingReinitializeWithLabels() throws IOException {
|
||||||
nodeLabelManager.addToCluserNodeLabels(ImmutableSet.of("red", "blue"));
|
nodeLabelManager.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("red", "blue"));
|
||||||
CapacitySchedulerConfiguration csConf =
|
CapacitySchedulerConfiguration csConf =
|
||||||
new CapacitySchedulerConfiguration();
|
new CapacitySchedulerConfiguration();
|
||||||
setupQueueConfigurationWithoutLabels(csConf);
|
setupQueueConfigurationWithoutLabels(csConf);
|
||||||
|
@ -505,7 +505,7 @@ public class TestQueueParsing {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testQueueParsingWithLabels() throws IOException {
|
public void testQueueParsingWithLabels() throws IOException {
|
||||||
nodeLabelManager.addToCluserNodeLabels(ImmutableSet.of("red", "blue"));
|
nodeLabelManager.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("red", "blue"));
|
||||||
|
|
||||||
YarnConfiguration conf = new YarnConfiguration();
|
YarnConfiguration conf = new YarnConfiguration();
|
||||||
CapacitySchedulerConfiguration csConf =
|
CapacitySchedulerConfiguration csConf =
|
||||||
|
@ -529,7 +529,7 @@ public class TestQueueParsing {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testQueueParsingWithLabelsInherit() throws IOException {
|
public void testQueueParsingWithLabelsInherit() throws IOException {
|
||||||
nodeLabelManager.addToCluserNodeLabels(ImmutableSet.of("red", "blue"));
|
nodeLabelManager.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("red", "blue"));
|
||||||
|
|
||||||
YarnConfiguration conf = new YarnConfiguration();
|
YarnConfiguration conf = new YarnConfiguration();
|
||||||
CapacitySchedulerConfiguration csConf =
|
CapacitySchedulerConfiguration csConf =
|
||||||
|
@ -668,7 +668,7 @@ public class TestQueueParsing {
|
||||||
|
|
||||||
// Initialize a cluster with labels, but doesn't use them, reinitialize
|
// Initialize a cluster with labels, but doesn't use them, reinitialize
|
||||||
// shouldn't fail
|
// shouldn't fail
|
||||||
nodeLabelManager.addToCluserNodeLabels(labels);
|
nodeLabelManager.addToCluserNodeLabelsWithDefaultExclusivity(labels);
|
||||||
|
|
||||||
CapacitySchedulerConfiguration csConf =
|
CapacitySchedulerConfiguration csConf =
|
||||||
new CapacitySchedulerConfiguration();
|
new CapacitySchedulerConfiguration();
|
||||||
|
|
|
@ -129,7 +129,7 @@ public class TestWorkPreservingRMRestartForNodeLabel {
|
||||||
// instead, it uses default queue label expression
|
// instead, it uses default queue label expression
|
||||||
|
|
||||||
// set node -> label
|
// set node -> label
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
||||||
NodeId.newInstance("h2", 0), toSet("y")));
|
NodeId.newInstance("h2", 0), toSet("y")));
|
||||||
|
|
||||||
|
@ -205,7 +205,7 @@ public class TestWorkPreservingRMRestartForNodeLabel {
|
||||||
// Re-start RM
|
// Re-start RM
|
||||||
mgr = new NullRMNodeLabelsManager();
|
mgr = new NullRMNodeLabelsManager();
|
||||||
mgr.init(conf);
|
mgr.init(conf);
|
||||||
mgr.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
|
mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of("x", "y"));
|
||||||
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
mgr.addLabelsToNode(ImmutableMap.of(NodeId.newInstance("h1", 0), toSet("x"),
|
||||||
NodeId.newInstance("h2", 0), toSet("y")));
|
NodeId.newInstance("h2", 0), toSet("y")));
|
||||||
MockRM rm2 =
|
MockRM rm2 =
|
||||||
|
|
Loading…
Reference in New Issue