Merge -r 1176714:1176715 from trunk to branch-0.23 to fix MAPREDUCE-3041.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1176718 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Arun Murthy 2011-09-28 04:25:19 +00:00
parent f774d36921
commit f67abbdbf6
18 changed files with 308 additions and 181 deletions

View File

@ -1428,6 +1428,10 @@ Release 0.23.0 - Unreleased
MAPREDUCE-3021. Change base urls for RM web-ui. (Thomas Graves via MAPREDUCE-3021. Change base urls for RM web-ui. (Thomas Graves via
acmurthy) acmurthy)
MAPREDUCE-3041. Fixed ClientRMProtocol to provide min/max resource
capabilities along-with new ApplicationId for application submission.
(Hitesh Shah via acmurthy)
Release 0.22.0 - Unreleased Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -53,7 +53,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
@ -169,8 +169,8 @@ public class ResourceMgrDelegate {
} }
public JobID getNewJobID() throws IOException, InterruptedException { public JobID getNewJobID() throws IOException, InterruptedException {
GetNewApplicationIdRequest request = recordFactory.newRecordInstance(GetNewApplicationIdRequest.class); GetNewApplicationRequest request = recordFactory.newRecordInstance(GetNewApplicationRequest.class);
applicationId = applicationsManager.getNewApplicationId(request).getApplicationId(); applicationId = applicationsManager.getNewApplication(request).getApplicationId();
return TypeConverter.fromYarn(applicationId); return TypeConverter.fromYarn(applicationId);
} }

View File

@ -78,8 +78,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
@ -245,7 +245,7 @@ public class TestClientRedirect {
} }
@Override @Override
public GetNewApplicationIdResponse getNewApplicationId(GetNewApplicationIdRequest request) throws YarnRemoteException { public GetNewApplicationResponse getNewApplication(GetNewApplicationRequest request) throws YarnRemoteException {
return null; return null;
} }

View File

@ -39,7 +39,6 @@ import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.ClientCache; import org.apache.hadoop.mapred.ClientCache;
import org.apache.hadoop.mapred.ClientServiceDelegate; import org.apache.hadoop.mapred.ClientServiceDelegate;
import org.apache.hadoop.mapred.JobStatus;
import org.apache.hadoop.mapred.ResourceMgrDelegate; import org.apache.hadoop.mapred.ResourceMgrDelegate;
import org.apache.hadoop.mapred.YARNRunner; import org.apache.hadoop.mapred.YARNRunner;
import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobID;
@ -48,6 +47,8 @@ import org.apache.hadoop.mapreduce.JobStatus.State;
import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.yarn.api.ClientRMProtocol; import org.apache.hadoop.yarn.api.ClientRMProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse;
@ -57,8 +58,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
@ -213,13 +212,13 @@ public class TestYARNRunner extends TestCase {
delegate.getActiveTrackers(); delegate.getActiveTrackers();
verify(clientRMProtocol).getClusterNodes(any(GetClusterNodesRequest.class)); verify(clientRMProtocol).getClusterNodes(any(GetClusterNodesRequest.class));
GetNewApplicationIdResponse newAppIdResponse = recordFactory.newRecordInstance( GetNewApplicationResponse newAppResponse = recordFactory.newRecordInstance(
GetNewApplicationIdResponse.class); GetNewApplicationResponse.class);
newAppIdResponse.setApplicationId(appId); newAppResponse.setApplicationId(appId);
when(clientRMProtocol.getNewApplicationId(any(GetNewApplicationIdRequest.class))). when(clientRMProtocol.getNewApplication(any(GetNewApplicationRequest.class))).
thenReturn(newAppIdResponse); thenReturn(newAppResponse);
delegate.getNewJobID(); delegate.getNewJobID();
verify(clientRMProtocol).getNewApplicationId(any(GetNewApplicationIdRequest.class)); verify(clientRMProtocol).getNewApplication(any(GetNewApplicationRequest.class));
GetQueueInfoResponse queueInfoResponse = recordFactory.newRecordInstance( GetQueueInfoResponse queueInfoResponse = recordFactory.newRecordInstance(
GetQueueInfoResponse.class); GetQueueInfoResponse.class);

View File

@ -31,8 +31,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
@ -63,13 +63,17 @@ public interface ClientRMProtocol {
* increasing, {@link ApplicationId} which is used by the client to submit * increasing, {@link ApplicationId} which is used by the client to submit
* a new application.</p> * a new application.</p>
* *
* <p>The <code>ResourceManager</code> also responds with details such
* as minimum and maximum resource capabilities in the cluster as specified in
* {@link GetNewApplicationResponse}.</p>
*
* @param request request to get a new <code>ApplicationId</code> * @param request request to get a new <code>ApplicationId</code>
* @return new <code>ApplicationId</code> to be used to submit an application * @return new <code>ApplicationId</code> to be used to submit an application
* @throws YarnRemoteException * @throws YarnRemoteException
* @see #submitApplication(SubmitApplicationRequest) * @see #submitApplication(SubmitApplicationRequest)
*/ */
public GetNewApplicationIdResponse getNewApplicationId( public GetNewApplicationResponse getNewApplication(
GetNewApplicationIdRequest request) GetNewApplicationRequest request)
throws YarnRemoteException; throws YarnRemoteException;
/** /**
@ -92,7 +96,7 @@ public interface ClientRMProtocol {
* @param request request to submit a new application * @param request request to submit a new application
* @return (empty) response on accepting the submission * @return (empty) response on accepting the submission
* @throws YarnRemoteException * @throws YarnRemoteException
* @see #getNewApplicationId(GetNewApplicationIdRequest) * @see #getNewApplication(GetNewApplicationRequest)
*/ */
public SubmitApplicationResponse submitApplication( public SubmitApplicationResponse submitApplication(
SubmitApplicationRequest request) SubmitApplicationRequest request)

View File

@ -27,10 +27,10 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
* <p>The request sent by clients to get a new {@link ApplicationId} for * <p>The request sent by clients to get a new {@link ApplicationId} for
* submitting an application.</p> * submitting an application.</p>
* *
* @see ClientRMProtocol#getNewApplicationId(GetNewApplicationIdRequest) * @see ClientRMProtocol#getNewApplication(GetNewApplicationRequest)
*/ */
@Public @Public
@Stable @Stable
public interface GetNewApplicationIdRequest { public interface GetNewApplicationRequest {
} }

View File

@ -24,16 +24,17 @@ import org.apache.hadoop.classification.InterfaceStability.Stable;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.ClientRMProtocol; import org.apache.hadoop.yarn.api.ClientRMProtocol;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Resource;
/** /**
* <p>The response sent by the <code>ResourceManager</code> to the client for * <p>The response sent by the <code>ResourceManager</code> to the client for
* a request to a new {@link ApplicationId} for submitting applications.</p> * a request to a new {@link ApplicationId} for submitting applications.</p>
* *
* @see ClientRMProtocol#getNewApplicationId(GetNewApplicationIdRequest) * @see ClientRMProtocol#getNewApplication(GetNewApplicationRequest)
*/ */
@Public @Public
@Stable @Stable
public interface GetNewApplicationIdResponse { public interface GetNewApplicationResponse {
/** /**
* Get the <em>new</em> <code>ApplicationId</code> allocated by the * Get the <em>new</em> <code>ApplicationId</code> allocated by the
* <code>ResourceManager</code>. * <code>ResourceManager</code>.
@ -47,4 +48,30 @@ public interface GetNewApplicationIdResponse {
@Private @Private
@Unstable @Unstable
public abstract void setApplicationId(ApplicationId applicationId); public abstract void setApplicationId(ApplicationId applicationId);
/**
* Get the minimum capability for any {@link Resource} allocated by the
* <code>ResourceManager</code> in the cluster.
* @return minimum capability of allocated resources in the cluster
*/
@Public
@Stable
public Resource getMinimumResourceCapability();
@Private
@Unstable
public void setMinimumResourceCapability(Resource capability);
/**
* Get the maximum capability for any {@link Resource} allocated by the
* <code>ResourceManager</code> in the cluster.
* @return maximum capability of allocated resources in the cluster
*/
@Public
@Stable
public Resource getMaximumResourceCapability();
@Private
@Unstable
public void setMaximumResourceCapability(Resource capability);
} }

View File

@ -1,109 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdResponseProtoOrBuilder;
public class GetNewApplicationIdResponsePBImpl extends ProtoBase<GetNewApplicationIdResponseProto> implements GetNewApplicationIdResponse {
GetNewApplicationIdResponseProto proto = GetNewApplicationIdResponseProto.getDefaultInstance();
GetNewApplicationIdResponseProto.Builder builder = null;
boolean viaProto = false;
private ApplicationId applicationId = null;
public GetNewApplicationIdResponsePBImpl() {
builder = GetNewApplicationIdResponseProto.newBuilder();
}
public GetNewApplicationIdResponsePBImpl(GetNewApplicationIdResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public GetNewApplicationIdResponseProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void mergeLocalToBuilder() {
if (applicationId != null) {
builder.setApplicationId(convertToProtoFormat(this.applicationId));
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetNewApplicationIdResponseProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public ApplicationId getApplicationId() {
GetNewApplicationIdResponseProtoOrBuilder p = viaProto ? proto : builder;
if (this.applicationId != null) {
return this.applicationId;
}
if (!p.hasApplicationId()) {
return null;
}
this.applicationId = convertFromProtoFormat(p.getApplicationId());
return this.applicationId;
}
@Override
public void setApplicationId(ApplicationId applicationId) {
maybeInitBuilder();
if (applicationId == null)
builder.clearApplicationId();
this.applicationId = applicationId;
}
private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) {
return new ApplicationIdPBImpl(p);
}
private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
return ((ApplicationIdPBImpl)t).getProto();
}
}

View File

@ -19,27 +19,26 @@
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb; package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.records.ProtoBase; import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto;
public class GetNewApplicationRequestPBImpl extends ProtoBase<GetNewApplicationRequestProto> implements GetNewApplicationRequest {
public class GetNewApplicationIdRequestPBImpl extends ProtoBase<GetNewApplicationIdRequestProto> implements GetNewApplicationIdRequest { GetNewApplicationRequestProto proto = GetNewApplicationRequestProto.getDefaultInstance();
GetNewApplicationIdRequestProto proto = GetNewApplicationIdRequestProto.getDefaultInstance(); GetNewApplicationRequestProto.Builder builder = null;
GetNewApplicationIdRequestProto.Builder builder = null;
boolean viaProto = false; boolean viaProto = false;
public GetNewApplicationIdRequestPBImpl() { public GetNewApplicationRequestPBImpl() {
builder = GetNewApplicationIdRequestProto.newBuilder(); builder = GetNewApplicationRequestProto.newBuilder();
} }
public GetNewApplicationIdRequestPBImpl(GetNewApplicationIdRequestProto proto) { public GetNewApplicationRequestPBImpl(GetNewApplicationRequestProto proto) {
this.proto = proto; this.proto = proto;
viaProto = true; viaProto = true;
} }
public GetNewApplicationIdRequestProto getProto() { public GetNewApplicationRequestProto getProto() {
proto = viaProto ? proto : builder.build(); proto = viaProto ? proto : builder.build();
viaProto = true; viaProto = true;
return proto; return proto;
@ -47,7 +46,7 @@ public class GetNewApplicationIdRequestPBImpl extends ProtoBase<GetNewApplicatio
private void maybeInitBuilder() { private void maybeInitBuilder() {
if (viaProto || builder == null) { if (viaProto || builder == null) {
builder = GetNewApplicationIdRequestProto.newBuilder(proto); builder = GetNewApplicationRequestProto.newBuilder(proto);
} }
viaProto = false; viaProto = false;
} }

View File

@ -0,0 +1,173 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProtoOrBuilder;
public class GetNewApplicationResponsePBImpl extends ProtoBase<GetNewApplicationResponseProto> implements GetNewApplicationResponse {
GetNewApplicationResponseProto proto = GetNewApplicationResponseProto.getDefaultInstance();
GetNewApplicationResponseProto.Builder builder = null;
boolean viaProto = false;
private ApplicationId applicationId = null;
private Resource minimumResourceCapability = null;
private Resource maximumResourceCapability = null;
public GetNewApplicationResponsePBImpl() {
builder = GetNewApplicationResponseProto.newBuilder();
}
public GetNewApplicationResponsePBImpl(GetNewApplicationResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public GetNewApplicationResponseProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void mergeLocalToBuilder() {
if (applicationId != null) {
builder.setApplicationId(convertToProtoFormat(this.applicationId));
}
if (minimumResourceCapability != null) {
builder.setMinimumCapability(convertToProtoFormat(this.minimumResourceCapability));
}
if (maximumResourceCapability != null) {
builder.setMaximumCapability(convertToProtoFormat(this.maximumResourceCapability));
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetNewApplicationResponseProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public ApplicationId getApplicationId() {
if (this.applicationId != null) {
return this.applicationId;
}
GetNewApplicationResponseProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasApplicationId()) {
return null;
}
this.applicationId = convertFromProtoFormat(p.getApplicationId());
return this.applicationId;
}
@Override
public void setApplicationId(ApplicationId applicationId) {
maybeInitBuilder();
if (applicationId == null)
builder.clearApplicationId();
this.applicationId = applicationId;
}
@Override
public Resource getMaximumResourceCapability() {
if (this.maximumResourceCapability != null) {
return this.maximumResourceCapability;
}
GetNewApplicationResponseProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasMaximumCapability()) {
return null;
}
this.maximumResourceCapability = convertFromProtoFormat(p.getMaximumCapability());
return this.maximumResourceCapability;
}
@Override
public Resource getMinimumResourceCapability() {
if (this.minimumResourceCapability != null) {
return this.minimumResourceCapability;
}
GetNewApplicationResponseProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasMinimumCapability()) {
return null;
}
this.minimumResourceCapability = convertFromProtoFormat(p.getMinimumCapability());
return this.minimumResourceCapability;
}
@Override
public void setMaximumResourceCapability(Resource capability) {
maybeInitBuilder();
if(maximumResourceCapability == null) {
builder.clearMaximumCapability();
}
this.maximumResourceCapability = capability;
}
@Override
public void setMinimumResourceCapability(Resource capability) {
maybeInitBuilder();
if(minimumResourceCapability == null) {
builder.clearMinimumCapability();
}
this.minimumResourceCapability = capability;
}
private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) {
return new ApplicationIdPBImpl(p);
}
private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
return ((ApplicationIdPBImpl)t).getProto();
}
private Resource convertFromProtoFormat(ResourceProto resource) {
return new ResourcePBImpl(resource);
}
private ResourceProto convertToProtoFormat(Resource resource) {
return ((ResourcePBImpl)resource).getProto();
}
}

View File

@ -24,7 +24,7 @@ option java_generate_equals_and_hash = true;
import "yarn_service_protos.proto"; import "yarn_service_protos.proto";
service ClientRMProtocolService { service ClientRMProtocolService {
rpc getNewApplicationId (GetNewApplicationIdRequestProto) returns (GetNewApplicationIdResponseProto); rpc getNewApplication (GetNewApplicationRequestProto) returns (GetNewApplicationResponseProto);
rpc getApplicationReport (GetApplicationReportRequestProto) returns (GetApplicationReportResponseProto); rpc getApplicationReport (GetApplicationReportRequestProto) returns (GetApplicationReportResponseProto);
rpc submitApplication (SubmitApplicationRequestProto) returns (SubmitApplicationResponseProto); rpc submitApplication (SubmitApplicationRequestProto) returns (SubmitApplicationResponseProto);
rpc forceKillApplication (KillApplicationRequestProto) returns (KillApplicationResponseProto); rpc forceKillApplication (KillApplicationRequestProto) returns (KillApplicationResponseProto);

View File

@ -66,11 +66,13 @@ message AllocateResponseProto {
/////// client_RM_Protocol /////////////////////////// /////// client_RM_Protocol ///////////////////////////
////////////////////////////////////////////////////// //////////////////////////////////////////////////////
message GetNewApplicationIdRequestProto { message GetNewApplicationRequestProto {
} }
message GetNewApplicationIdResponseProto { message GetNewApplicationResponseProto {
optional ApplicationIdProto application_id = 1; optional ApplicationIdProto application_id = 1;
optional ResourceProto minimumCapability = 2;
optional ResourceProto maximumCapability = 3;
} }
message GetApplicationReportRequestProto { message GetApplicationReportRequestProto {

View File

@ -33,8 +33,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
@ -51,8 +51,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsReque
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationIdRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationIdResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl;
@ -68,7 +68,7 @@ import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestP
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto;
@ -138,11 +138,11 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol {
} }
@Override @Override
public GetNewApplicationIdResponse getNewApplicationId( public GetNewApplicationResponse getNewApplication(
GetNewApplicationIdRequest request) throws YarnRemoteException { GetNewApplicationRequest request) throws YarnRemoteException {
GetNewApplicationIdRequestProto requestProto = ((GetNewApplicationIdRequestPBImpl)request).getProto(); GetNewApplicationRequestProto requestProto = ((GetNewApplicationRequestPBImpl)request).getProto();
try { try {
return new GetNewApplicationIdResponsePBImpl(proxy.getNewApplicationId(null, requestProto)); return new GetNewApplicationResponsePBImpl(proxy.getNewApplication(null, requestProto));
} catch (ServiceException e) { } catch (ServiceException e) {
if (e.getCause() instanceof YarnRemoteException) { if (e.getCause() instanceof YarnRemoteException) {
throw (YarnRemoteException)e.getCause(); throw (YarnRemoteException)e.getCause();

View File

@ -23,7 +23,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
@ -36,8 +36,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsReque
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationIdRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationIdResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl;
@ -56,8 +56,8 @@ import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestPr
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto;
@ -116,13 +116,13 @@ public class ClientRMProtocolPBServiceImpl implements BlockingInterface {
} }
@Override @Override
public GetNewApplicationIdResponseProto getNewApplicationId( public GetNewApplicationResponseProto getNewApplication(
RpcController arg0, GetNewApplicationIdRequestProto proto) RpcController arg0, GetNewApplicationRequestProto proto)
throws ServiceException { throws ServiceException {
GetNewApplicationIdRequestPBImpl request = new GetNewApplicationIdRequestPBImpl(proto); GetNewApplicationRequestPBImpl request = new GetNewApplicationRequestPBImpl(proto);
try { try {
GetNewApplicationIdResponse response = real.getNewApplicationId(request); GetNewApplicationResponse response = real.getNewApplication(request);
return ((GetNewApplicationIdResponsePBImpl)response).getProto(); return ((GetNewApplicationResponsePBImpl)response).getProto();
} catch (YarnRemoteException e) { } catch (YarnRemoteException e) {
throw new ServiceException(e); throw new ServiceException(e);
} }

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.yarn.api.ClientRMProtocol;
import org.apache.hadoop.yarn.api.ContainerManager; import org.apache.hadoop.yarn.api.ContainerManager;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerResponse; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.StopContainerRequest;
@ -86,11 +86,11 @@ public class TestRPC {
+ server.getPort()), conf); + server.getPort()), conf);
try { try {
proxy.getNewApplicationId(Records proxy.getNewApplication(Records
.newRecord(GetNewApplicationIdRequest.class)); .newRecord(GetNewApplicationRequest.class));
Assert.fail("Excepted RPC call to fail with unknown method."); Assert.fail("Excepted RPC call to fail with unknown method.");
} catch (YarnRemoteException e) { } catch (YarnRemoteException e) {
Assert.assertEquals("Unknown method getNewApplicationId called on interface " Assert.assertEquals("Unknown method getNewApplication called on "
+ "org.apache.hadoop.yarn.proto.ClientRMProtocol" + "org.apache.hadoop.yarn.proto.ClientRMProtocol"
+ "$ClientRMProtocolService$BlockingInterface protocol.", e + "$ClientRMProtocolService$BlockingInterface protocol.", e
.getMessage()); .getMessage());

View File

@ -46,8 +46,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
@ -165,11 +165,17 @@ public class ClientRMService extends AbstractService implements
} }
@Override @Override
public GetNewApplicationIdResponse getNewApplicationId( public GetNewApplicationResponse getNewApplication(
GetNewApplicationIdRequest request) throws YarnRemoteException { GetNewApplicationRequest request) throws YarnRemoteException {
GetNewApplicationIdResponse response = recordFactory GetNewApplicationResponse response = recordFactory
.newRecordInstance(GetNewApplicationIdResponse.class); .newRecordInstance(GetNewApplicationResponse.class);
response.setApplicationId(getNewApplicationId()); response.setApplicationId(getNewApplicationId());
// Pick up min/max resource from scheduler...
response.setMinimumResourceCapability(scheduler
.getMinimumResourceCapability());
response.setMaximumResourceCapability(scheduler
.getMaximumResourceCapability());
return response; return response;
} }

View File

@ -23,8 +23,8 @@ import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.ClientRMProtocol; import org.apache.hadoop.yarn.api.ClientRMProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
@ -75,10 +75,16 @@ public class MockRM extends ResourceManager {
finalState, app.getState()); finalState, app.getState());
} }
// get new application id
public GetNewApplicationResponse getNewAppId() throws Exception {
ClientRMProtocol client = getClientRMService();
return client.getNewApplication(Records.newRecord(GetNewApplicationRequest.class));
}
//client //client
public RMApp submitApp(int masterMemory) throws Exception { public RMApp submitApp(int masterMemory) throws Exception {
ClientRMProtocol client = getClientRMService(); ClientRMProtocol client = getClientRMService();
GetNewApplicationIdResponse resp = client.getNewApplicationId(Records.newRecord(GetNewApplicationIdRequest.class)); GetNewApplicationResponse resp = client.getNewApplication(Records.newRecord(GetNewApplicationRequest.class));
ApplicationId appId = resp.getApplicationId(); ApplicationId appId = resp.getApplicationId();
SubmitApplicationRequest req = Records.newRecord(SubmitApplicationRequest.class); SubmitApplicationRequest req = Records.newRecord(SubmitApplicationRequest.class);

View File

@ -25,6 +25,7 @@ import junit.framework.Assert;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.ResourceRequest;
@ -40,6 +41,20 @@ public class TestRM {
private static final Log LOG = LogFactory.getLog(TestRM.class); private static final Log LOG = LogFactory.getLog(TestRM.class);
@Test
public void testGetNewAppId() throws Exception {
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
MockRM rm = new MockRM();
rm.start();
GetNewApplicationResponse resp = rm.getNewAppId();
assert (resp.getApplicationId().getId() != 0);
assert (resp.getMinimumResourceCapability().getMemory() > 0);
assert (resp.getMaximumResourceCapability().getMemory() > 0);
rm.stop();
}
@Test @Test
public void testAppWithNoContainers() throws Exception { public void testAppWithNoContainers() throws Exception {
Logger rootLogger = LogManager.getRootLogger(); Logger rootLogger = LogManager.getRootLogger();
@ -119,6 +134,7 @@ public class TestRM {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
TestRM t = new TestRM(); TestRM t = new TestRM();
t.testGetNewAppId();
t.testAppWithNoContainers(); t.testAppWithNoContainers();
t.testAppOnMultiNode(); t.testAppOnMultiNode();
} }