YARN-2544. Added admin-API objects for using node-labels. Contributed by Wangda Tan.

This commit is contained in:
Vinod Kumar Vavilapalli 2014-10-09 15:46:24 -07:00
parent 8d94114495
commit 596702a025
27 changed files with 1590 additions and 5 deletions

View File

@ -147,6 +147,9 @@ Release 2.6.0 - UNRELEASED
YARN-2493. Added user-APIs for using node-labels. (Wangda Tan via vinodkv)
YARN-2544. Added admin-API objects for using node-labels. (Wangda Tan via
vinodkv)
IMPROVEMENTS
YARN-2197. Add a link to YARN CHANGES.txt in the left side of doc

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.api.records;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
@ -48,13 +49,14 @@ import org.apache.hadoop.yarn.util.Records;
@Public
@Stable
public abstract class QueueInfo {
@Private
@Unstable
public static QueueInfo newInstance(String queueName, float capacity,
float maximumCapacity, float currentCapacity,
List<QueueInfo> childQueues, List<ApplicationReport> applications,
QueueState queueState) {
QueueState queueState, Set<String> accessibleNodeLabels,
String defaultNodeLabelExpression) {
QueueInfo queueInfo = Records.newRecord(QueueInfo.class);
queueInfo.setQueueName(queueName);
queueInfo.setCapacity(capacity);
@ -63,6 +65,8 @@ public abstract class QueueInfo {
queueInfo.setChildQueues(childQueues);
queueInfo.setApplications(applications);
queueInfo.setQueueState(queueState);
queueInfo.setAccessibleNodeLabels(accessibleNodeLabels);
queueInfo.setDefaultNodeLabelExpression(defaultNodeLabelExpression);
return queueInfo;
}
@ -149,4 +153,36 @@ public abstract class QueueInfo {
@Private
@Unstable
public abstract void setQueueState(QueueState queueState);
/**
* Get the <code>accessible node labels</code> of the queue.
* @return <code>accessible node labels</code> of the queue
*/
@Public
@Stable
public abstract Set<String> getAccessibleNodeLabels();
/**
* Set the <code>accessible node labels</code> of the queue.
*/
@Private
@Unstable
public abstract void setAccessibleNodeLabels(Set<String> labels);
/**
* Get the <code>default node label expression</code> of the queue, this takes
* affect only when the <code>ApplicationSubmissionContext</code> and
* <code>ResourceRequest</code> don't specify their
* <code>NodeLabelExpression</code>.
*
* @return <code>default node label expression</code> of the queue
*/
@Public
@Stable
public abstract String getDefaultNodeLabelExpression();
@Public
@Stable
public abstract void setDefaultNodeLabelExpression(
String defaultLabelExpression);
}

View File

@ -0,0 +1,44 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.util.Records;
@Public
@Evolving
public abstract class AddToClusterNodeLabelsRequest {
public static AddToClusterNodeLabelsRequest newInstance(Set<String> labels) {
AddToClusterNodeLabelsRequest request =
Records.newRecord(AddToClusterNodeLabelsRequest.class);
request.setNodeLabels(labels);
return request;
}
@Public
@Evolving
public abstract void setNodeLabels(Set<String> labels);
@Public
@Evolving
public abstract Set<String> getNodeLabels();
}

View File

@ -0,0 +1,31 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.util.Records;
@Public
@Evolving
public abstract class AddToClusterNodeLabelsResponse {
public static AddToClusterNodeLabelsResponse newInstance() {
return Records.newRecord(AddToClusterNodeLabelsResponse.class);
}
}

View File

@ -0,0 +1,31 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.util.Records;
@Public
@Evolving
public abstract class GetClusterNodeLabelsRequest {
public static GetClusterNodeLabelsRequest newInstance() {
return Records.newRecord(GetClusterNodeLabelsRequest.class);
}
}

View File

@ -0,0 +1,44 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.util.Records;
@Public
@Evolving
public abstract class GetClusterNodeLabelsResponse {
public static GetClusterNodeLabelsResponse newInstance(Set<String> labels) {
GetClusterNodeLabelsResponse request =
Records.newRecord(GetClusterNodeLabelsResponse.class);
request.setNodeLabels(labels);
return request;
}
@Public
@Evolving
public abstract void setNodeLabels(Set<String> labels);
@Public
@Evolving
public abstract Set<String> getNodeLabels();
}

View File

@ -0,0 +1,27 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import org.apache.hadoop.yarn.util.Records;
public abstract class GetNodesToLabelsRequest {
public static GetNodesToLabelsRequest newInstance() {
return Records.newRecord(GetNodesToLabelsRequest.class);
}
}

View File

@ -0,0 +1,45 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.util.Records;
public abstract class GetNodesToLabelsResponse {
public static GetNodesToLabelsResponse newInstance(
Map<NodeId, Set<String>> map) {
GetNodesToLabelsResponse response =
Records.newRecord(GetNodesToLabelsResponse.class);
response.setNodeToLabels(map);
return response;
}
@Public
@Evolving
public abstract void setNodeToLabels(Map<NodeId, Set<String>> map);
@Public
@Evolving
public abstract Map<NodeId, Set<String>> getNodeToLabels();
}

View File

@ -0,0 +1,45 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.util.Records;
@Public
@Evolving
public abstract class RemoveFromClusterNodeLabelsRequest {
public static RemoveFromClusterNodeLabelsRequest newInstance(
Set<String> labels) {
RemoveFromClusterNodeLabelsRequest request =
Records.newRecord(RemoveFromClusterNodeLabelsRequest.class);
request.setNodeLabels(labels);
return request;
}
@Public
@Evolving
public abstract void setNodeLabels(Set<String> labels);
@Public
@Evolving
public abstract Set<String> getNodeLabels();
}

View File

@ -0,0 +1,31 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.util.Records;
@Public
@Evolving
public abstract class RemoveFromClusterNodeLabelsResponse {
public static RemoveFromClusterNodeLabelsResponse newInstance() {
return Records.newRecord(RemoveFromClusterNodeLabelsResponse.class);
}
}

View File

@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.util.Records;
@Public
@Evolving
public abstract class ReplaceLabelsOnNodeRequest {
public static ReplaceLabelsOnNodeRequest newInstance(
Map<NodeId, Set<String>> map) {
ReplaceLabelsOnNodeRequest request =
Records.newRecord(ReplaceLabelsOnNodeRequest.class);
request.setNodeToLabels(map);
return request;
}
@Public
@Evolving
public abstract void setNodeToLabels(Map<NodeId, Set<String>> map);
@Public
@Evolving
public abstract Map<NodeId, Set<String>> getNodeToLabels();
}

View File

@ -0,0 +1,31 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.yarn.util.Records;
@Public
@Evolving
public abstract class ReplaceLabelsOnNodeResponse {
public static ReplaceLabelsOnNodeResponse newInstance() {
return Records.newRecord(ReplaceLabelsOnNodeResponse.class);
}
}

View File

@ -75,6 +75,46 @@ message UpdateNodeResourceRequestProto {
message UpdateNodeResourceResponseProto {
}
message AddToClusterNodeLabelsRequestProto {
repeated string nodeLabels = 1;
}
message AddToClusterNodeLabelsResponseProto {
}
message RemoveFromClusterNodeLabelsRequestProto {
repeated string nodeLabels = 1;
}
message RemoveFromClusterNodeLabelsResponseProto {
}
message NodeIdToLabelsProto {
optional NodeIdProto nodeId = 1;
repeated string nodeLabels = 2;
}
message ReplaceLabelsOnNodeRequestProto {
repeated NodeIdToLabelsProto nodeToLabels = 1;
}
message ReplaceLabelsOnNodeResponseProto {
}
message GetNodesToLabelsRequestProto {
}
message GetNodesToLabelsResponseProto {
repeated NodeIdToLabelsProto nodeToLabels = 1;
}
message GetClusterNodeLabelsRequestProto {
}
message GetClusterNodeLabelsResponseProto {
repeated string nodeLabels = 1;
}
//////////////////////////////////////////////////////////////////
///////////// RM Failover related records ////////////////////////

View File

@ -332,6 +332,8 @@ message QueueInfoProto {
optional QueueStateProto state = 5;
repeated QueueInfoProto childQueues = 6;
repeated ApplicationReportProto applications = 7;
repeated string accessibleNodeLabels = 8;
optional string defaultNodeLabelExpression = 9;
}
enum QueueACLProto {

View File

@ -662,7 +662,7 @@ public abstract class ProtocolHATestBase extends ClientBaseWithFixes{
public QueueInfo createFakeQueueInfo() {
return QueueInfo.newInstance("root", 100f, 100f, 50f, null,
createFakeAppReports(), QueueState.RUNNING);
createFakeAppReports(), QueueState.RUNNING, null, null);
}
public List<QueueUserACLInfo> createFakeQueueUserACLInfoList() {

View File

@ -19,8 +19,10 @@
package org.apache.hadoop.yarn.api.records.impl.pb;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
@ -44,6 +46,7 @@ public class QueueInfoPBImpl extends QueueInfo {
List<ApplicationReport> applicationsList;
List<QueueInfo> childQueuesList;
Set<String> accessibleNodeLabels;
public QueueInfoPBImpl() {
builder = QueueInfoProto.newBuilder();
@ -281,6 +284,10 @@ public class QueueInfoPBImpl extends QueueInfo {
if (this.applicationsList != null) {
addApplicationsToProto();
}
if (this.accessibleNodeLabels != null) {
builder.clearAccessibleNodeLabels();
builder.addAllAccessibleNodeLabels(this.accessibleNodeLabels);
}
}
private void mergeLocalToProto() {
@ -322,5 +329,43 @@ public class QueueInfoPBImpl extends QueueInfo {
private QueueStateProto convertToProtoFormat(QueueState queueState) {
return ProtoUtils.convertToProtoFormat(queueState);
}
@Override
public void setAccessibleNodeLabels(Set<String> nodeLabels) {
maybeInitBuilder();
builder.clearAccessibleNodeLabels();
this.accessibleNodeLabels = nodeLabels;
}
private void initNodeLabels() {
if (this.accessibleNodeLabels != null) {
return;
}
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
this.accessibleNodeLabels = new HashSet<String>();
this.accessibleNodeLabels.addAll(p.getAccessibleNodeLabelsList());
}
@Override
public Set<String> getAccessibleNodeLabels() {
initNodeLabels();
return this.accessibleNodeLabels;
}
@Override
public String getDefaultNodeLabelExpression() {
QueueInfoProtoOrBuilder p = viaProto ? proto : builder;
return (p.hasDefaultNodeLabelExpression()) ? p
.getDefaultNodeLabelExpression() : null;
}
@Override
public void setDefaultNodeLabelExpression(String defaultNodeLabelExpression) {
maybeInitBuilder();
if (defaultNodeLabelExpression == null) {
builder.clearDefaultNodeLabelExpression();
return;
}
builder.setDefaultNodeLabelExpression(defaultNodeLabelExpression);
}
}

View File

@ -0,0 +1,114 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import java.util.HashSet;
import java.util.Set;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProtoOrBuilder;
import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest;
public class AddToClusterNodeLabelsRequestPBImpl extends
AddToClusterNodeLabelsRequest {
Set<String> labels;
AddToClusterNodeLabelsRequestProto proto = AddToClusterNodeLabelsRequestProto
.getDefaultInstance();
AddToClusterNodeLabelsRequestProto.Builder builder = null;
boolean viaProto = false;
public AddToClusterNodeLabelsRequestPBImpl() {
this.builder = AddToClusterNodeLabelsRequestProto.newBuilder();
}
public AddToClusterNodeLabelsRequestPBImpl(
AddToClusterNodeLabelsRequestProto proto) {
this.proto = proto;
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = AddToClusterNodeLabelsRequestProto.newBuilder(proto);
}
viaProto = false;
}
private void mergeLocalToBuilder() {
if (this.labels != null && !this.labels.isEmpty()) {
builder.clearNodeLabels();
builder.addAllNodeLabels(this.labels);
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
public AddToClusterNodeLabelsRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void initLabels() {
if (this.labels != null) {
return;
}
AddToClusterNodeLabelsRequestProtoOrBuilder p = viaProto ? proto : builder;
this.labels = new HashSet<String>();
this.labels.addAll(p.getNodeLabelsList());
}
@Override
public void setNodeLabels(Set<String> labels) {
maybeInitBuilder();
if (labels == null || labels.isEmpty()) {
builder.clearNodeLabels();
}
this.labels = labels;
}
@Override
public Set<String> getNodeLabels() {
initLabels();
return this.labels;
}
@Override
public int hashCode() {
assert false : "hashCode not designed";
return 0;
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
}

View File

@ -0,0 +1,69 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsResponse;
import com.google.protobuf.TextFormat;
public class AddToClusterNodeLabelsResponsePBImpl extends
AddToClusterNodeLabelsResponse {
AddToClusterNodeLabelsResponseProto proto = AddToClusterNodeLabelsResponseProto
.getDefaultInstance();
AddToClusterNodeLabelsResponseProto.Builder builder = null;
boolean viaProto = false;
public AddToClusterNodeLabelsResponsePBImpl() {
builder = AddToClusterNodeLabelsResponseProto.newBuilder();
}
public AddToClusterNodeLabelsResponsePBImpl(
AddToClusterNodeLabelsResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public AddToClusterNodeLabelsResponseProto getProto() {
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}

View File

@ -0,0 +1,68 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetClusterNodeLabelsRequestProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.GetClusterNodeLabelsRequest;
import com.google.protobuf.TextFormat;
public class GetClusterNodeLabelsRequestPBImpl extends
GetClusterNodeLabelsRequest {
GetClusterNodeLabelsRequestProto proto = GetClusterNodeLabelsRequestProto
.getDefaultInstance();
GetClusterNodeLabelsRequestProto.Builder builder = null;
boolean viaProto = false;
public GetClusterNodeLabelsRequestPBImpl() {
builder = GetClusterNodeLabelsRequestProto.newBuilder();
}
public GetClusterNodeLabelsRequestPBImpl(GetClusterNodeLabelsRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public GetClusterNodeLabelsRequestProto getProto() {
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}

View File

@ -0,0 +1,114 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import java.util.HashSet;
import java.util.Set;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetClusterNodeLabelsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetClusterNodeLabelsResponseProtoOrBuilder;
import org.apache.hadoop.yarn.server.api.protocolrecords.GetClusterNodeLabelsResponse;
public class GetClusterNodeLabelsResponsePBImpl extends
GetClusterNodeLabelsResponse {
Set<String> labels;
GetClusterNodeLabelsResponseProto proto = GetClusterNodeLabelsResponseProto
.getDefaultInstance();
GetClusterNodeLabelsResponseProto.Builder builder = null;
boolean viaProto = false;
public GetClusterNodeLabelsResponsePBImpl() {
this.builder = GetClusterNodeLabelsResponseProto.newBuilder();
}
public GetClusterNodeLabelsResponsePBImpl(
GetClusterNodeLabelsResponseProto proto) {
this.proto = proto;
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetClusterNodeLabelsResponseProto.newBuilder(proto);
}
viaProto = false;
}
private void mergeLocalToBuilder() {
if (this.labels != null && !this.labels.isEmpty()) {
builder.clearNodeLabels();
builder.addAllNodeLabels(this.labels);
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
public GetClusterNodeLabelsResponseProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void initNodeLabels() {
if (this.labels != null) {
return;
}
GetClusterNodeLabelsResponseProtoOrBuilder p = viaProto ? proto : builder;
this.labels = new HashSet<String>();
this.labels.addAll(p.getNodeLabelsList());
}
@Override
public void setNodeLabels(Set<String> labels) {
maybeInitBuilder();
if (labels == null || labels.isEmpty()) {
builder.clearNodeLabels();
}
this.labels = labels;
}
@Override
public Set<String> getNodeLabels() {
initNodeLabels();
return this.labels;
}
@Override
public int hashCode() {
assert false : "hashCode not designed";
return 0;
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
}

View File

@ -0,0 +1,67 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetNodesToLabelsRequestProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.GetNodesToLabelsRequest;
import com.google.protobuf.TextFormat;
public class GetNodesToLabelsRequestPBImpl extends GetNodesToLabelsRequest {
GetNodesToLabelsRequestProto proto = GetNodesToLabelsRequestProto
.getDefaultInstance();
GetNodesToLabelsRequestProto.Builder builder = null;
boolean viaProto = false;
public GetNodesToLabelsRequestPBImpl() {
builder = GetNodesToLabelsRequestProto.newBuilder();
}
public GetNodesToLabelsRequestPBImpl(GetNodesToLabelsRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public GetNodesToLabelsRequestProto getProto() {
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}

View File

@ -0,0 +1,168 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetNodesToLabelsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetNodesToLabelsResponseProtoOrBuilder;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.GetNodesToLabelsResponse;
import com.google.common.collect.Sets;
public class GetNodesToLabelsResponsePBImpl extends
GetNodesToLabelsResponse {
GetNodesToLabelsResponseProto proto = GetNodesToLabelsResponseProto
.getDefaultInstance();
GetNodesToLabelsResponseProto.Builder builder = null;
boolean viaProto = false;
private Map<NodeId, Set<String>> nodeToLabels;
public GetNodesToLabelsResponsePBImpl() {
this.builder = GetNodesToLabelsResponseProto.newBuilder();
}
public GetNodesToLabelsResponsePBImpl(GetNodesToLabelsResponseProto proto) {
this.proto = proto;
this.viaProto = true;
}
private void initNodeToLabels() {
if (this.nodeToLabels != null) {
return;
}
GetNodesToLabelsResponseProtoOrBuilder p = viaProto ? proto : builder;
List<NodeIdToLabelsProto> list = p.getNodeToLabelsList();
this.nodeToLabels = new HashMap<NodeId, Set<String>>();
for (NodeIdToLabelsProto c : list) {
this.nodeToLabels.put(new NodeIdPBImpl(c.getNodeId()),
Sets.newHashSet(c.getNodeLabelsList()));
}
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetNodesToLabelsResponseProto.newBuilder(proto);
}
viaProto = false;
}
private void addNodeToLabelsToProto() {
maybeInitBuilder();
builder.clearNodeToLabels();
if (nodeToLabels == null) {
return;
}
Iterable<NodeIdToLabelsProto> iterable =
new Iterable<NodeIdToLabelsProto>() {
@Override
public Iterator<NodeIdToLabelsProto> iterator() {
return new Iterator<NodeIdToLabelsProto>() {
Iterator<Entry<NodeId, Set<String>>> iter = nodeToLabels
.entrySet().iterator();
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public NodeIdToLabelsProto next() {
Entry<NodeId, Set<String>> now = iter.next();
return NodeIdToLabelsProto.newBuilder()
.setNodeId(convertToProtoFormat(now.getKey()))
.addAllNodeLabels(now.getValue()).build();
}
@Override
public boolean hasNext() {
return iter.hasNext();
}
};
}
};
builder.addAllNodeToLabels(iterable);
}
private void mergeLocalToBuilder() {
if (this.nodeToLabels != null) {
addNodeToLabelsToProto();
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
public GetNodesToLabelsResponseProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public Map<NodeId, Set<String>> getNodeToLabels() {
initNodeToLabels();
return this.nodeToLabels;
}
@Override
public void setNodeToLabels(Map<NodeId, Set<String>> map) {
initNodeToLabels();
nodeToLabels.clear();
nodeToLabels.putAll(map);
}
private NodeIdProto convertToProtoFormat(NodeId t) {
return ((NodeIdPBImpl)t).getProto();
}
@Override
public int hashCode() {
assert false : "hashCode not designed";
return 0;
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
}

View File

@ -0,0 +1,115 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import java.util.HashSet;
import java.util.Set;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProtoOrBuilder;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest;
public class RemoveFromClusterNodeLabelsRequestPBImpl extends
RemoveFromClusterNodeLabelsRequest {
Set<String> labels;
RemoveFromClusterNodeLabelsRequestProto proto =
RemoveFromClusterNodeLabelsRequestProto.getDefaultInstance();
RemoveFromClusterNodeLabelsRequestProto.Builder builder = null;
boolean viaProto = false;
public RemoveFromClusterNodeLabelsRequestPBImpl() {
this.builder = RemoveFromClusterNodeLabelsRequestProto.newBuilder();
}
public RemoveFromClusterNodeLabelsRequestPBImpl(
RemoveFromClusterNodeLabelsRequestProto proto) {
this.proto = proto;
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = RemoveFromClusterNodeLabelsRequestProto.newBuilder(proto);
}
viaProto = false;
}
private void mergeLocalToBuilder() {
if (this.labels != null && !this.labels.isEmpty()) {
builder.clearNodeLabels();
builder.addAllNodeLabels(this.labels);
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
public RemoveFromClusterNodeLabelsRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void initNodeLabels() {
if (this.labels != null) {
return;
}
RemoveFromClusterNodeLabelsRequestProtoOrBuilder p =
viaProto ? proto : builder;
this.labels = new HashSet<String>();
this.labels.addAll(p.getNodeLabelsList());
}
@Override
public void setNodeLabels(Set<String> labels) {
maybeInitBuilder();
if (labels == null || labels.isEmpty()) {
builder.clearNodeLabels();
}
this.labels = labels;
}
@Override
public Set<String> getNodeLabels() {
initNodeLabels();
return this.labels;
}
@Override
public int hashCode() {
assert false : "hashCode not designed";
return 0;
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
}

View File

@ -0,0 +1,69 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse;
import com.google.protobuf.TextFormat;
public class RemoveFromClusterNodeLabelsResponsePBImpl extends
RemoveFromClusterNodeLabelsResponse {
RemoveFromClusterNodeLabelsResponseProto proto =
RemoveFromClusterNodeLabelsResponseProto.getDefaultInstance();
RemoveFromClusterNodeLabelsResponseProto.Builder builder = null;
boolean viaProto = false;
public RemoveFromClusterNodeLabelsResponsePBImpl() {
builder = RemoveFromClusterNodeLabelsResponseProto.newBuilder();
}
public RemoveFromClusterNodeLabelsResponsePBImpl(
RemoveFromClusterNodeLabelsResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public RemoveFromClusterNodeLabelsResponseProto getProto() {
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}

View File

@ -0,0 +1,168 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProtoOrBuilder;
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest;
import com.google.common.collect.Sets;
public class ReplaceLabelsOnNodeRequestPBImpl extends
ReplaceLabelsOnNodeRequest {
ReplaceLabelsOnNodeRequestProto proto = ReplaceLabelsOnNodeRequestProto
.getDefaultInstance();
ReplaceLabelsOnNodeRequestProto.Builder builder = null;
boolean viaProto = false;
private Map<NodeId, Set<String>> nodeIdToLabels;
public ReplaceLabelsOnNodeRequestPBImpl() {
this.builder = ReplaceLabelsOnNodeRequestProto.newBuilder();
}
public ReplaceLabelsOnNodeRequestPBImpl(ReplaceLabelsOnNodeRequestProto proto) {
this.proto = proto;
this.viaProto = true;
}
private void initNodeToLabels() {
if (this.nodeIdToLabels != null) {
return;
}
ReplaceLabelsOnNodeRequestProtoOrBuilder p = viaProto ? proto : builder;
List<NodeIdToLabelsProto> list = p.getNodeToLabelsList();
this.nodeIdToLabels = new HashMap<NodeId, Set<String>>();
for (NodeIdToLabelsProto c : list) {
this.nodeIdToLabels.put(new NodeIdPBImpl(c.getNodeId()),
Sets.newHashSet(c.getNodeLabelsList()));
}
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = ReplaceLabelsOnNodeRequestProto.newBuilder(proto);
}
viaProto = false;
}
private void addNodeToLabelsToProto() {
maybeInitBuilder();
builder.clearNodeToLabels();
if (nodeIdToLabels == null) {
return;
}
Iterable<NodeIdToLabelsProto> iterable =
new Iterable<NodeIdToLabelsProto>() {
@Override
public Iterator<NodeIdToLabelsProto> iterator() {
return new Iterator<NodeIdToLabelsProto>() {
Iterator<Entry<NodeId, Set<String>>> iter = nodeIdToLabels
.entrySet().iterator();
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public NodeIdToLabelsProto next() {
Entry<NodeId, Set<String>> now = iter.next();
return NodeIdToLabelsProto.newBuilder()
.setNodeId(convertToProtoFormat(now.getKey())).clearNodeLabels()
.addAllNodeLabels(now.getValue()).build();
}
@Override
public boolean hasNext() {
return iter.hasNext();
}
};
}
};
builder.addAllNodeToLabels(iterable);
}
private void mergeLocalToBuilder() {
if (this.nodeIdToLabels != null) {
addNodeToLabelsToProto();
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
public ReplaceLabelsOnNodeRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public Map<NodeId, Set<String>> getNodeToLabels() {
initNodeToLabels();
return this.nodeIdToLabels;
}
@Override
public void setNodeToLabels(Map<NodeId, Set<String>> map) {
initNodeToLabels();
nodeIdToLabels.clear();
nodeIdToLabels.putAll(map);
}
private NodeIdProto convertToProtoFormat(NodeId t) {
return ((NodeIdPBImpl) t).getProto();
}
@Override
public int hashCode() {
assert false : "hashCode not designed";
return 0;
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
}

View File

@ -0,0 +1,69 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse;
import com.google.protobuf.TextFormat;
public class ReplaceLabelsOnNodeResponsePBImpl extends
ReplaceLabelsOnNodeResponse {
ReplaceLabelsOnNodeResponseProto proto = ReplaceLabelsOnNodeResponseProto
.getDefaultInstance();
ReplaceLabelsOnNodeResponseProto.Builder builder = null;
boolean viaProto = false;
public ReplaceLabelsOnNodeResponsePBImpl() {
builder = ReplaceLabelsOnNodeResponseProto.newBuilder();
}
public ReplaceLabelsOnNodeResponsePBImpl(
ReplaceLabelsOnNodeResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public ReplaceLabelsOnNodeResponseProto getProto() {
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
}

View File

@ -50,6 +50,7 @@ import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
@ -212,8 +213,9 @@ public class TestPBImplRecords {
generateByNewInstance(StartContainerRequest.class);
// genByNewInstance does not apply to QueueInfo, cause
// it is recursive(has sub queues)
typeValueCache.put(QueueInfo.class, QueueInfo.newInstance(
"root", 1.0f, 1.0f, 0.1f, null, null, QueueState.RUNNING));
typeValueCache.put(QueueInfo.class, QueueInfo.newInstance("root", 1.0f,
1.0f, 0.1f, null, null, QueueState.RUNNING, ImmutableSet.of("x", "y"),
"x && y"));
generateByNewInstance(QueueUserACLInfo.class);
generateByNewInstance(YarnClusterMetrics.class);
// for reservation system
@ -934,4 +936,64 @@ public class TestPBImplRecords {
validatePBImplRecord(ReservationDeleteResponsePBImpl.class,
ReservationDeleteResponseProto.class);
}
@Test
public void testAddToClusterNodeLabelsRequestPBImpl() throws Exception {
validatePBImplRecord(AddToClusterNodeLabelsRequestPBImpl.class,
AddToClusterNodeLabelsRequestProto.class);
}
@Test
public void testAddToClusterNodeLabelsResponsePBImpl() throws Exception {
validatePBImplRecord(AddToClusterNodeLabelsResponsePBImpl.class,
AddToClusterNodeLabelsResponseProto.class);
}
@Test
public void testRemoveFromClusterNodeLabelsRequestPBImpl() throws Exception {
validatePBImplRecord(RemoveFromClusterNodeLabelsRequestPBImpl.class,
RemoveFromClusterNodeLabelsRequestProto.class);
}
@Test
public void testRemoveFromClusterNodeLabelsResponsePBImpl() throws Exception {
validatePBImplRecord(RemoveFromClusterNodeLabelsResponsePBImpl.class,
RemoveFromClusterNodeLabelsResponseProto.class);
}
@Test
public void testGetClusterNodeLabelsRequestPBImpl() throws Exception {
validatePBImplRecord(GetClusterNodeLabelsRequestPBImpl.class,
GetClusterNodeLabelsRequestProto.class);
}
@Test
public void testGetClusterNodeLabelsResponsePBImpl() throws Exception {
validatePBImplRecord(GetClusterNodeLabelsResponsePBImpl.class,
GetClusterNodeLabelsResponseProto.class);
}
@Test
public void testReplaceLabelsOnNodeRequestPBImpl() throws Exception {
validatePBImplRecord(ReplaceLabelsOnNodeRequestPBImpl.class,
ReplaceLabelsOnNodeRequestProto.class);
}
@Test
public void testReplaceLabelsOnNodeResponsePBImpl() throws Exception {
validatePBImplRecord(ReplaceLabelsOnNodeResponsePBImpl.class,
ReplaceLabelsOnNodeResponseProto.class);
}
@Test
public void testGetNodeToLabelsRequestPBImpl() throws Exception {
validatePBImplRecord(GetNodesToLabelsRequestPBImpl.class,
GetNodesToLabelsRequestProto.class);
}
@Test
public void testGetNodeToLabelsResponsePBImpl() throws Exception {
validatePBImplRecord(GetNodesToLabelsResponsePBImpl.class,
GetNodesToLabelsResponseProto.class);
}
}