YARN-8299. Added CLI and REST API for query container status.

Contributed by Chandni Singh
This commit is contained in:
Eric Yang 2018-07-16 17:41:23 -04:00
parent efb4e274e5
commit 121865c3f9
21 changed files with 773 additions and 202 deletions

View File

@ -25,8 +25,10 @@
import java.util.Map; import java.util.Map;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.UriBuilder;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -48,10 +50,8 @@
import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.api.records.ServiceState;
import org.apache.hadoop.yarn.service.api.records.ServiceStatus; import org.apache.hadoop.yarn.service.api.records.ServiceStatus;
import org.apache.hadoop.yarn.service.conf.RestApiConstants; import org.apache.hadoop.yarn.service.conf.RestApiConstants;
import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil; import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
import org.apache.hadoop.yarn.util.RMHAUtils; import org.apache.hadoop.yarn.util.RMHAUtils;
import org.codehaus.jackson.map.PropertyNamingStrategy;
import org.eclipse.jetty.util.UrlEncoded; import org.eclipse.jetty.util.UrlEncoded;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -147,11 +147,7 @@ private String getServicePath(String appName) throws IOException {
api.append("/"); api.append("/");
api.append(appName); api.append(appName);
} }
Configuration conf = getConfig(); appendUserNameIfRequired(api);
if (conf.get("hadoop.http.authentication.type").equalsIgnoreCase("simple")) {
api.append("?user.name=" + UrlEncoded
.encodeString(System.getProperty("user.name")));
}
return api.toString(); return api.toString();
} }
@ -162,15 +158,27 @@ private String getInstancesPath(String appName) throws IOException {
api.append(url); api.append(url);
api.append("/app/v1/services/").append(appName).append("/") api.append("/app/v1/services/").append(appName).append("/")
.append(RestApiConstants.COMP_INSTANCES); .append(RestApiConstants.COMP_INSTANCES);
Configuration conf = getConfig(); appendUserNameIfRequired(api);
if (conf.get("hadoop.http.authentication.type").equalsIgnoreCase(
"simple")) {
api.append("?user.name=" + UrlEncoded
.encodeString(System.getProperty("user.name")));
}
return api.toString(); return api.toString();
} }
private String getInstancePath(String appName, List<String> components,
String version, List<String> containerStates) throws IOException {
UriBuilder builder = UriBuilder.fromUri(getInstancesPath(appName));
if (components != null && !components.isEmpty()) {
components.forEach(compName ->
builder.queryParam(RestApiConstants.PARAM_COMP_NAME, compName));
}
if (!Strings.isNullOrEmpty(version)){
builder.queryParam(RestApiConstants.PARAM_VERSION, version);
}
if (containerStates != null && !containerStates.isEmpty()){
containerStates.forEach(state ->
builder.queryParam(RestApiConstants.PARAM_CONTAINER_STATE, state));
}
return builder.build().toString();
}
private String getComponentsPath(String appName) throws IOException { private String getComponentsPath(String appName) throws IOException {
Preconditions.checkNotNull(appName); Preconditions.checkNotNull(appName);
String url = getRMWebAddress(); String url = getRMWebAddress();
@ -178,13 +186,17 @@ private String getComponentsPath(String appName) throws IOException {
api.append(url); api.append(url);
api.append("/app/v1/services/").append(appName).append("/") api.append("/app/v1/services/").append(appName).append("/")
.append(RestApiConstants.COMPONENTS); .append(RestApiConstants.COMPONENTS);
appendUserNameIfRequired(api);
return api.toString();
}
private void appendUserNameIfRequired(StringBuilder builder) {
Configuration conf = getConfig(); Configuration conf = getConfig();
if (conf.get("hadoop.http.authentication.type").equalsIgnoreCase( if (conf.get("hadoop.http.authentication.type").equalsIgnoreCase(
"simple")) { "simple")) {
api.append("?user.name=" + UrlEncoded builder.append("?user.name=").append(UrlEncoded
.encodeString(System.getProperty("user.name"))); .encodeString(System.getProperty("user.name")));
} }
return api.toString();
} }
private Builder getApiClient() throws IOException { private Builder getApiClient() throws IOException {
@ -553,7 +565,7 @@ public int actionUpgradeInstances(String appName, List<String> compInstances)
container.setState(ContainerState.UPGRADING); container.setState(ContainerState.UPGRADING);
toUpgrade[idx++] = container; toUpgrade[idx++] = container;
} }
String buffer = CONTAINER_JSON_SERDE.toJson(toUpgrade); String buffer = ServiceApiUtil.CONTAINER_JSON_SERDE.toJson(toUpgrade);
ClientResponse response = getApiClient(getInstancesPath(appName)) ClientResponse response = getApiClient(getInstancesPath(appName))
.put(ClientResponse.class, buffer); .put(ClientResponse.class, buffer);
result = processResponse(response); result = processResponse(response);
@ -577,7 +589,7 @@ public int actionUpgradeComponents(String appName, List<String> components)
component.setState(ComponentState.UPGRADING); component.setState(ComponentState.UPGRADING);
toUpgrade[idx++] = component; toUpgrade[idx++] = component;
} }
String buffer = COMP_JSON_SERDE.toJson(toUpgrade); String buffer = ServiceApiUtil.COMP_JSON_SERDE.toJson(toUpgrade);
ClientResponse response = getApiClient(getComponentsPath(appName)) ClientResponse response = getApiClient(getComponentsPath(appName))
.put(ClientResponse.class, buffer); .put(ClientResponse.class, buffer);
result = processResponse(response); result = processResponse(response);
@ -599,11 +611,25 @@ public int actionCleanUp(String appName, String userName) throws
return result; return result;
} }
private static final JsonSerDeser<Container[]> CONTAINER_JSON_SERDE = @Override
new JsonSerDeser<>(Container[].class, public String getInstances(String appName, List<String> components,
PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); String version, List<String> containerStates) throws IOException,
YarnException {
private static final JsonSerDeser<Component[]> COMP_JSON_SERDE = try {
new JsonSerDeser<>(Component[].class, String uri = getInstancePath(appName, components, version,
PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); containerStates);
ClientResponse response = getApiClient(uri).get(ClientResponse.class);
if (response.getStatus() != 200) {
StringBuilder sb = new StringBuilder();
sb.append("Failed: HTTP error code: ");
sb.append(response.getStatus());
sb.append(" ErrorMsg: ").append(response.getEntity(String.class));
return sb.toString();
}
return response.getEntity(String.class);
} catch (Exception e) {
LOG.error("Fail to get containers {}", e);
}
return null;
}
} }

View File

@ -44,14 +44,7 @@
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes; import javax.ws.rs.*;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
@ -61,13 +54,7 @@
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException; import java.lang.reflect.UndeclaredThrowableException;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.apache.hadoop.yarn.service.api.records.ServiceState.ACCEPTED; import static org.apache.hadoop.yarn.service.api.records.ServiceState.ACCEPTED;
@ -582,6 +569,40 @@ public Response updateComponentInstances(@Context HttpServletRequest request,
return Response.status(Status.NO_CONTENT).build(); return Response.status(Status.NO_CONTENT).build();
} }
@GET
@Path(COMP_INSTANCES_PATH)
@Produces({RestApiConstants.MEDIA_TYPE_JSON_UTF8})
public Response getComponentInstances(@Context HttpServletRequest request,
@PathParam(SERVICE_NAME) String serviceName,
@QueryParam(PARAM_COMP_NAME) List<String> componentNames,
@QueryParam(PARAM_VERSION) String version,
@QueryParam(PARAM_CONTAINER_STATE) List<String> containerStates) {
try {
UserGroupInformation ugi = getProxyUser(request);
LOG.info("GET: component instances for service = {}, compNames in {}, " +
"version = {}, containerStates in {}, user = {}", serviceName,
Objects.toString(componentNames, "[]"), Objects.toString(version, ""),
Objects.toString(containerStates, "[]"), ugi);
List<ContainerState> containerStatesDe = containerStates.stream().map(
ContainerState::valueOf).collect(Collectors.toList());
return Response.ok(getContainers(ugi, serviceName, componentNames,
version, containerStatesDe)).build();
} catch (IllegalArgumentException iae) {
return formatResponse(Status.BAD_REQUEST, "valid container states are: " +
Arrays.toString(ContainerState.values()));
} catch (AccessControlException e) {
return formatResponse(Response.Status.FORBIDDEN, e.getMessage());
} catch (IOException | InterruptedException e) {
return formatResponse(Response.Status.INTERNAL_SERVER_ERROR,
e.getMessage());
} catch (UndeclaredThrowableException e) {
return formatResponse(Response.Status.INTERNAL_SERVER_ERROR,
e.getCause().getMessage());
}
}
private Response flexService(Service service, UserGroupInformation ugi) private Response flexService(Service service, UserGroupInformation ugi)
throws IOException, InterruptedException { throws IOException, InterruptedException {
String appName = service.getName(); String appName = service.getName();
@ -752,6 +773,22 @@ private Service getServiceFromClient(UserGroupInformation ugi,
}); });
} }
private Container[] getContainers(UserGroupInformation ugi,
String serviceName, List<String> componentNames, String version,
List<ContainerState> containerStates) throws IOException,
InterruptedException {
return ugi.doAs((PrivilegedExceptionAction<Container[]>) () -> {
Container[] result;
ServiceClient sc = getServiceClient();
sc.init(YARN_CONFIG);
sc.start();
result = sc.getContainers(serviceName, componentNames, version,
containerStates);
sc.close();
return result;
});
}
/** /**
* Used by negative test case. * Used by negative test case.
* *

View File

@ -23,6 +23,8 @@
import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.RestartServiceRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.RestartServiceRequestProto;
@ -55,4 +57,7 @@ RestartServiceResponseProto restart(RestartServiceRequestProto request)
CompInstancesUpgradeResponseProto upgrade( CompInstancesUpgradeResponseProto upgrade(
CompInstancesUpgradeRequestProto request) throws IOException, CompInstancesUpgradeRequestProto request) throws IOException,
YarnException; YarnException;
GetCompInstancesResponseProto getCompInstances(
GetCompInstancesRequestProto request) throws IOException, YarnException;
} }

View File

@ -35,6 +35,8 @@
import org.apache.hadoop.yarn.proto.ClientAMProtocol.ComponentCountProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.ComponentCountProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.RestartServiceRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.RestartServiceRequestProto;
@ -43,15 +45,18 @@
import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.UpgradeServiceRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.UpgradeServiceRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.UpgradeServiceResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.UpgradeServiceResponseProto;
import org.apache.hadoop.yarn.service.api.records.Container;
import org.apache.hadoop.yarn.service.component.ComponentEvent; import org.apache.hadoop.yarn.service.component.ComponentEvent;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEvent; import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEvent;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType; import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType;
import org.apache.hadoop.yarn.service.utils.FilterUtils;
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil; import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.List;
import static org.apache.hadoop.yarn.service.component.ComponentEventType.FLEX; import static org.apache.hadoop.yarn.service.component.ComponentEventType.FLEX;
@ -194,4 +199,13 @@ public CompInstancesUpgradeResponseProto upgrade(
} }
return CompInstancesUpgradeResponseProto.newBuilder().build(); return CompInstancesUpgradeResponseProto.newBuilder().build();
} }
@Override
public GetCompInstancesResponseProto getCompInstances(
GetCompInstancesRequestProto request) throws IOException {
List<Container> containers = FilterUtils.filterInstances(context, request);
return GetCompInstancesResponseProto.newBuilder().setCompInstances(
ServiceApiUtil.CONTAINER_JSON_SERDE.toJson(containers.toArray(
new Container[containers.size()]))).build();
}
} }

View File

@ -57,6 +57,8 @@
import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.ComponentCountProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.ComponentCountProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.RestartServiceRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.RestartServiceRequestProto;
@ -66,6 +68,7 @@
import org.apache.hadoop.yarn.service.ClientAMProtocol; import org.apache.hadoop.yarn.service.ClientAMProtocol;
import org.apache.hadoop.yarn.service.ServiceMaster; import org.apache.hadoop.yarn.service.ServiceMaster;
import org.apache.hadoop.yarn.service.api.records.Container; import org.apache.hadoop.yarn.service.api.records.Container;
import org.apache.hadoop.yarn.service.api.records.ContainerState;
import org.apache.hadoop.yarn.service.api.records.Component; import org.apache.hadoop.yarn.service.api.records.Component;
import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.api.records.Service;
import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.api.records.ServiceState;
@ -100,6 +103,7 @@
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.*; import java.util.*;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import static org.apache.hadoop.yarn.api.records.YarnApplicationState.*; import static org.apache.hadoop.yarn.api.records.YarnApplicationState.*;
import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.*; import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.*;
@ -318,6 +322,49 @@ public int actionCleanUp(String appName, String userName) throws
} }
} }
@Override
public String getInstances(String appName,
List<String> components, String version, List<String> containerStates)
throws IOException, YarnException {
GetCompInstancesResponseProto result = filterContainers(appName, components,
version, containerStates);
return result.getCompInstances();
}
public Container[] getContainers(String appName, List<String> components,
String version, List<ContainerState> containerStates)
throws IOException, YarnException {
GetCompInstancesResponseProto result = filterContainers(appName, components,
version, containerStates != null ? containerStates.stream()
.map(Enum::toString).collect(Collectors.toList()) : null);
return ServiceApiUtil.CONTAINER_JSON_SERDE.fromJson(
result.getCompInstances());
}
private GetCompInstancesResponseProto filterContainers(String appName,
List<String> components, String version,
List<String> containerStates) throws IOException, YarnException {
ApplicationReport appReport = yarnClient.getApplicationReport(getAppId(
appName));
if (StringUtils.isEmpty(appReport.getHost())) {
throw new YarnException(appName + " AM hostname is empty.");
}
ClientAMProtocol proxy = createAMProxy(appName, appReport);
GetCompInstancesRequestProto.Builder req = GetCompInstancesRequestProto
.newBuilder();
if (components != null && !components.isEmpty()) {
req.addAllComponentNames(components);
}
if (version != null) {
req.setVersion(version);
}
if (containerStates != null && !containerStates.isEmpty()){
req.addAllContainerStates(containerStates);
}
return proxy.getCompInstances(req.build());
}
public int actionUpgrade(Service service, List<Container> compInstances) public int actionUpgrade(Service service, List<Container> compInstances)
throws IOException, YarnException { throws IOException, YarnException {
ApplicationReport appReport = ApplicationReport appReport =

View File

@ -97,6 +97,7 @@ public class ComponentInstance implements EventHandler<ComponentInstanceEvent>,
private long containerStartedTime = 0; private long containerStartedTime = 0;
// This container object is used for rest API query // This container object is used for rest API query
private org.apache.hadoop.yarn.service.api.records.Container containerSpec; private org.apache.hadoop.yarn.service.api.records.Container containerSpec;
private String serviceVersion;
private static final StateMachineFactory<ComponentInstance, private static final StateMachineFactory<ComponentInstance,
@ -194,6 +195,8 @@ private static class ContainerStartedTransition extends BaseTransition {
compInstance.getCompSpec().addContainer(container); compInstance.getCompSpec().addContainer(container);
compInstance.containerStartedTime = containerStartTime; compInstance.containerStartedTime = containerStartTime;
compInstance.component.incRunningContainers(); compInstance.component.incRunningContainers();
compInstance.serviceVersion = compInstance.scheduler.getApp()
.getVersion();
if (compInstance.timelineServiceEnabled) { if (compInstance.timelineServiceEnabled) {
compInstance.serviceTimelinePublisher compInstance.serviceTimelinePublisher
@ -210,6 +213,8 @@ public void transition(ComponentInstance compInstance,
if (compInstance.getState().equals(ComponentInstanceState.UPGRADING)) { if (compInstance.getState().equals(ComponentInstanceState.UPGRADING)) {
compInstance.component.incContainersReady(false); compInstance.component.incContainersReady(false);
compInstance.component.decContainersThatNeedUpgrade(); compInstance.component.decContainersThatNeedUpgrade();
compInstance.serviceVersion = compInstance.component.getUpgradeEvent()
.getUpgradeVersion();
ComponentEvent checkState = new ComponentEvent( ComponentEvent checkState = new ComponentEvent(
compInstance.component.getName(), ComponentEventType.CHECK_STABLE); compInstance.component.getName(), ComponentEventType.CHECK_STABLE);
compInstance.scheduler.getDispatcher().getEventHandler().handle( compInstance.scheduler.getDispatcher().getEventHandler().handle(
@ -382,6 +387,30 @@ public ComponentInstanceState getState() {
} }
} }
/**
* Returns the version of service at which the instance is at.
*/
public String getServiceVersion() {
this.readLock.lock();
try {
return this.serviceVersion;
} finally {
this.readLock.unlock();
}
}
/**
* Returns the state of the container in the container spec.
*/
public ContainerState getContainerState() {
this.readLock.lock();
try {
return this.containerSpec.getState();
} finally {
this.readLock.unlock();
}
}
@Override @Override
public void handle(ComponentInstanceEvent event) { public void handle(ComponentInstanceEvent event) {
try { try {
@ -667,8 +696,16 @@ public int compareTo(ComponentInstance to) {
return result; return result;
} }
@VisibleForTesting public org.apache.hadoop.yarn.service.api.records /**
* Returns container spec.
*/
public org.apache.hadoop.yarn.service.api.records
.Container getContainerSpec() { .Container getContainerSpec() {
return containerSpec; readLock.lock();
try {
return containerSpec;
} finally {
readLock.unlock();
}
} }
} }

View File

@ -37,11 +37,14 @@ public interface RestApiConstants {
String COMPONENTS = "components"; String COMPONENTS = "components";
String COMPONENTS_PATH = SERVICE_PATH + "/" + COMPONENTS; String COMPONENTS_PATH = SERVICE_PATH + "/" + COMPONENTS;
// Query param
String SERVICE_NAME = "service_name"; String SERVICE_NAME = "service_name";
String COMPONENT_NAME = "component_name"; String COMPONENT_NAME = "component_name";
String COMP_INSTANCE_NAME = "component_instance_name"; String COMP_INSTANCE_NAME = "component_instance_name";
String PARAM_COMP_NAME = "componentName";
String PARAM_VERSION = "version";
String PARAM_CONTAINER_STATE = "containerState";
String MEDIA_TYPE_JSON_UTF8 = MediaType.APPLICATION_JSON + ";charset=utf-8"; String MEDIA_TYPE_JSON_UTF8 = MediaType.APPLICATION_JSON + ";charset=utf-8";
Long DEFAULT_UNLIMITED_LIFETIME = -1l; Long DEFAULT_UNLIMITED_LIFETIME = -1l;

View File

@ -34,6 +34,8 @@
import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto;
import org.apache.hadoop.yarn.service.impl.pb.service.ClientAMProtocolPB; import org.apache.hadoop.yarn.service.impl.pb.service.ClientAMProtocolPB;
@ -128,4 +130,15 @@ public CompInstancesUpgradeResponseProto upgrade(
} }
return null; return null;
} }
@Override
public GetCompInstancesResponseProto getCompInstances(
GetCompInstancesRequestProto request) throws IOException, YarnException {
try {
return proxy.getCompInstances(null, request);
} catch (ServiceException e) {
RPCUtil.unwrapAndThrowException(e);
}
return null;
}
} }

View File

@ -25,6 +25,8 @@
import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.RestartServiceRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.RestartServiceRequestProto;
@ -103,4 +105,15 @@ public CompInstancesUpgradeResponseProto upgrade(RpcController controller,
throw new ServiceException(e); throw new ServiceException(e);
} }
} }
@Override
public GetCompInstancesResponseProto getCompInstances(
RpcController controller, GetCompInstancesRequestProto request)
throws ServiceException {
try {
return real.getCompInstances(request);
} catch (IOException | YarnException e) {
throw new ServiceException(e);
}
}
} }

View File

@ -0,0 +1,81 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.service.utils;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.proto.ClientAMProtocol;
import org.apache.hadoop.yarn.service.ServiceContext;
import org.apache.hadoop.yarn.service.api.records.Container;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class FilterUtils {
/**
* Returns containers filtered by requested fields.
*
* @param context service context
* @param filterReq filter request
*/
public static List<Container> filterInstances(ServiceContext context,
ClientAMProtocol.GetCompInstancesRequestProto filterReq) {
List<Container> results = new ArrayList<>();
Map<ContainerId, ComponentInstance> instances =
context.scheduler.getLiveInstances();
instances.forEach(((containerId, instance) -> {
boolean include = true;
if (filterReq.getComponentNamesList() != null &&
!filterReq.getComponentNamesList().isEmpty()) {
// filter by component name
if (!filterReq.getComponentNamesList().contains(
instance.getComponent().getName())) {
include = false;
}
}
if (filterReq.getVersion() != null && !filterReq.getVersion().isEmpty()) {
// filter by version
String instanceServiceVersion = instance.getServiceVersion();
if (instanceServiceVersion == null || !instanceServiceVersion.equals(
filterReq.getVersion())) {
include = false;
}
}
if (filterReq.getContainerStatesList() != null &&
!filterReq.getContainerStatesList().isEmpty()) {
// filter by state
if (!filterReq.getContainerStatesList().contains(
instance.getContainerState().toString())) {
include = false;
}
}
if (include) {
results.add(instance.getContainerSpec());
}
}));
return results;
}
}

View File

@ -72,6 +72,15 @@ public class ServiceApiUtil {
public static JsonSerDeser<Service> jsonSerDeser = public static JsonSerDeser<Service> jsonSerDeser =
new JsonSerDeser<>(Service.class, new JsonSerDeser<>(Service.class,
PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
public static final JsonSerDeser<Container[]> CONTAINER_JSON_SERDE =
new JsonSerDeser<>(Container[].class,
PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
public static final JsonSerDeser<Component[]> COMP_JSON_SERDE =
new JsonSerDeser<>(Component[].class,
PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
private static final PatternValidator namePattern private static final PatternValidator namePattern
= new PatternValidator("[a-z][a-z0-9-]*"); = new PatternValidator("[a-z][a-z0-9-]*");

View File

@ -32,6 +32,8 @@ service ClientAMProtocolService {
returns (RestartServiceResponseProto); returns (RestartServiceResponseProto);
rpc upgrade(CompInstancesUpgradeRequestProto) returns rpc upgrade(CompInstancesUpgradeRequestProto) returns
(CompInstancesUpgradeResponseProto); (CompInstancesUpgradeResponseProto);
rpc getCompInstances(GetCompInstancesRequestProto) returns
(GetCompInstancesResponseProto);
} }
message FlexComponentsRequestProto { message FlexComponentsRequestProto {
@ -81,4 +83,14 @@ message CompInstancesUpgradeRequestProto {
} }
message CompInstancesUpgradeResponseProto { message CompInstancesUpgradeResponseProto {
}
message GetCompInstancesRequestProto {
repeated string componentNames = 1;
optional string version = 2;
repeated string containerStates = 3;
}
message GetCompInstancesResponseProto {
optional string compInstances = 1;
} }

View File

@ -0,0 +1,154 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.service;
import org.apache.hadoop.registry.client.api.RegistryOperations;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.client.api.NMClient;
import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.service.api.records.Service;
import org.apache.hadoop.yarn.service.component.Component;
import org.apache.hadoop.yarn.service.component.ComponentEvent;
import org.apache.hadoop.yarn.service.component.ComponentEventType;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEvent;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType;
import org.apache.hadoop.yarn.service.containerlaunch.ContainerLaunchService;
import org.apache.hadoop.yarn.service.registry.YarnRegistryViewForProviders;
import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.util.Map;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Mocked service context for a running service.
*/
public class MockRunningServiceContext extends ServiceContext {
public MockRunningServiceContext(ServiceTestUtils.ServiceFSWatcher fsWatcher,
Service serviceDef) throws Exception {
super();
this.service = serviceDef;
this.fs = fsWatcher.getFs();
ContainerLaunchService mockLaunchService = mock(
ContainerLaunchService.class);
this.scheduler = new ServiceScheduler(this) {
@Override
protected YarnRegistryViewForProviders
createYarnRegistryOperations(
ServiceContext context, RegistryOperations registryClient) {
return mock(YarnRegistryViewForProviders.class);
}
@Override
public NMClientAsync createNMClient() {
NMClientAsync nmClientAsync = super.createNMClient();
NMClient nmClient = mock(NMClient.class);
try {
when(nmClient.getContainerStatus(anyObject(), anyObject()))
.thenAnswer(
(Answer<ContainerStatus>) invocation -> ContainerStatus
.newInstance((ContainerId) invocation.getArguments()[0],
org.apache.hadoop.yarn.api.records.ContainerState
.RUNNING,
"", 0));
} catch (YarnException | IOException e) {
throw new RuntimeException(e);
}
nmClientAsync.setClient(nmClient);
return nmClientAsync;
}
@Override
public ContainerLaunchService getContainerLaunchService() {
return mockLaunchService;
}
};
this.scheduler.init(fsWatcher.getConf());
ServiceTestUtils.createServiceManager(this);
doNothing().when(mockLaunchService).
reInitCompInstance(anyObject(), anyObject(), anyObject(), anyObject());
stabilizeComponents(this);
}
private void stabilizeComponents(ServiceContext context) {
ApplicationId appId = ApplicationId.fromString(context.service.getId());
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
context.attemptId = attemptId;
Map<String, Component>
componentState = context.scheduler.getAllComponents();
int counter = 0;
for (org.apache.hadoop.yarn.service.api.records.Component componentSpec :
context.service.getComponents()) {
Component component = new org.apache.hadoop.yarn.service.component.
Component(componentSpec, 1L, context);
componentState.put(component.getName(), component);
component.handle(new ComponentEvent(component.getName(),
ComponentEventType.FLEX));
for (int i = 0; i < componentSpec.getNumberOfContainers(); i++) {
counter++;
assignNewContainer(attemptId, counter, component);
}
component.handle(new ComponentEvent(component.getName(),
ComponentEventType.CHECK_STABLE));
}
}
public void assignNewContainer(ApplicationAttemptId attemptId,
long containerNum, Component component) {
Container container = org.apache.hadoop.yarn.api.records.Container
.newInstance(ContainerId.newContainerId(attemptId, containerNum),
NODE_ID, "localhost", null, null,
null);
component.handle(new ComponentEvent(component.getName(),
ComponentEventType.CONTAINER_ALLOCATED)
.setContainer(container).setContainerId(container.getId()));
ComponentInstance instance = this.scheduler.getLiveInstances().get(
container.getId());
ComponentInstanceEvent startEvent = new ComponentInstanceEvent(
container.getId(), ComponentInstanceEventType.START);
instance.handle(startEvent);
ComponentInstanceEvent readyEvent = new ComponentInstanceEvent(
container.getId(), ComponentInstanceEventType.BECOME_READY);
instance.handle(readyEvent);
}
private static final NodeId NODE_ID = NodeId.fromString("localhost:0");
}

View File

@ -166,7 +166,7 @@ public void testFlexComponents() throws Throwable {
checkApp(serviceName, "master", 1L, 1000L, "qname"); checkApp(serviceName, "master", 1L, 1000L, "qname");
} }
@Test (timeout = 180000) @Test
public void testInitiateServiceUpgrade() throws Exception { public void testInitiateServiceUpgrade() throws Exception {
String[] args = {"app", "-upgrade", "app-1", String[] args = {"app", "-upgrade", "app-1",
"-initiate", ExampleAppJson.resourceName(ExampleAppJson.APP_JSON), "-initiate", ExampleAppJson.resourceName(ExampleAppJson.APP_JSON),
@ -185,7 +185,7 @@ public void testInitiateAutoFinalizeServiceUpgrade() throws Exception {
Assert.assertEquals(result, 0); Assert.assertEquals(result, 0);
} }
@Test (timeout = 180000) @Test
public void testUpgradeInstances() throws Exception { public void testUpgradeInstances() throws Exception {
conf.set(YARN_APP_ADMIN_CLIENT_PREFIX + DUMMY_APP_TYPE, conf.set(YARN_APP_ADMIN_CLIENT_PREFIX + DUMMY_APP_TYPE,
DummyServiceClient.class.getName()); DummyServiceClient.class.getName());
@ -197,7 +197,7 @@ public void testUpgradeInstances() throws Exception {
Assert.assertEquals(result, 0); Assert.assertEquals(result, 0);
} }
@Test (timeout = 180000) @Test
public void testUpgradeComponents() throws Exception { public void testUpgradeComponents() throws Exception {
conf.set(YARN_APP_ADMIN_CLIENT_PREFIX + DUMMY_APP_TYPE, conf.set(YARN_APP_ADMIN_CLIENT_PREFIX + DUMMY_APP_TYPE,
DummyServiceClient.class.getName()); DummyServiceClient.class.getName());
@ -209,6 +209,18 @@ public void testUpgradeComponents() throws Exception {
Assert.assertEquals(result, 0); Assert.assertEquals(result, 0);
} }
@Test
public void testGetInstances() throws Exception {
conf.set(YARN_APP_ADMIN_CLIENT_PREFIX + DUMMY_APP_TYPE,
DummyServiceClient.class.getName());
cli.setConf(conf);
String[] args = {"container", "-list", "app-1",
"-components", "comp1,comp2",
"-appTypes", DUMMY_APP_TYPE};
int result = cli.run(ApplicationCLI.preProcessArgs(args));
Assert.assertEquals(result, 0);
}
@Test (timeout = 180000) @Test (timeout = 180000)
public void testEnableFastLaunch() throws Exception { public void testEnableFastLaunch() throws Exception {
fs.getFileSystem().create(new Path(basedir.getAbsolutePath(), "test.jar")) fs.getFileSystem().create(new Path(basedir.getAbsolutePath(), "test.jar"))
@ -313,5 +325,12 @@ public int actionUpgradeComponents(String appName, List<String> components)
throws IOException, YarnException { throws IOException, YarnException {
return 0; return 0;
} }
@Override
public String getInstances(String appName, List<String> components,
String version, List<String> containerStates)
throws IOException, YarnException {
return "";
}
} }
} }

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.service.client; package org.apache.hadoop.yarn.service.client;
import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@ -32,8 +33,12 @@
import org.apache.hadoop.yarn.service.ClientAMProtocol; import org.apache.hadoop.yarn.service.ClientAMProtocol;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.CompInstancesUpgradeResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesResponseProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.UpgradeServiceRequestProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.UpgradeServiceRequestProto;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.UpgradeServiceResponseProto; import org.apache.hadoop.yarn.proto.ClientAMProtocol.UpgradeServiceResponseProto;
import org.apache.hadoop.yarn.service.MockRunningServiceContext;
import org.apache.hadoop.yarn.service.ServiceContext;
import org.apache.hadoop.yarn.service.ServiceTestUtils; import org.apache.hadoop.yarn.service.ServiceTestUtils;
import org.apache.hadoop.yarn.service.api.records.Component; import org.apache.hadoop.yarn.service.api.records.Component;
import org.apache.hadoop.yarn.service.api.records.Container; import org.apache.hadoop.yarn.service.api.records.Container;
@ -41,6 +46,7 @@
import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.api.records.ServiceState;
import org.apache.hadoop.yarn.service.conf.YarnServiceConf; import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
import org.apache.hadoop.yarn.service.exceptions.ErrorStrings; import org.apache.hadoop.yarn.service.exceptions.ErrorStrings;
import org.apache.hadoop.yarn.service.utils.FilterUtils;
import org.apache.hadoop.yarn.service.utils.ServiceApiUtil; import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Rule; import org.junit.Rule;
@ -52,6 +58,7 @@
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -122,6 +129,26 @@ public void testActionCompInstanceUpgrade() throws Exception {
client.stop(); client.stop();
} }
@Test
public void testGetCompInstances() throws Exception {
Service service = createService();
MockServiceClient client = MockServiceClient.create(rule, service, true);
//upgrade the service
service.setVersion("v2");
client.initiateUpgrade(service);
//add containers to the component that needs to be upgraded.
Component comp = service.getComponents().iterator().next();
ContainerId containerId = ContainerId.newContainerId(client.attemptId, 1L);
comp.addContainer(new Container().id(containerId.toString()));
Container[] containers = client.getContainers(service.getName(),
Lists.newArrayList("compa"), "v1", null);
Assert.assertEquals("num containers", 2, containers.length);
client.stop();
}
private Service createService() throws IOException, private Service createService() throws IOException,
YarnException { YarnException {
Service service = ServiceTestUtils.createExampleApplication(); Service service = ServiceTestUtils.createExampleApplication();
@ -137,6 +164,7 @@ private static final class MockServiceClient extends ServiceClient {
private final ClientAMProtocol amProxy; private final ClientAMProtocol amProxy;
private Object proxyResponse; private Object proxyResponse;
private Service service; private Service service;
private ServiceContext context;
private MockServiceClient() { private MockServiceClient() {
amProxy = mock(ClientAMProtocol.class); amProxy = mock(ClientAMProtocol.class);
@ -147,8 +175,12 @@ private MockServiceClient() {
static MockServiceClient create(ServiceTestUtils.ServiceFSWatcher rule, static MockServiceClient create(ServiceTestUtils.ServiceFSWatcher rule,
Service service, boolean enableUpgrade) Service service, boolean enableUpgrade)
throws IOException, YarnException { throws Exception {
MockServiceClient client = new MockServiceClient(); MockServiceClient client = new MockServiceClient();
ApplicationId applicationId = ApplicationId.newInstance(
System.currentTimeMillis(), 1);
service.setId(applicationId.toString());
client.context = new MockRunningServiceContext(rule, service);
YarnClient yarnClient = createMockYarnClient(); YarnClient yarnClient = createMockYarnClient();
ApplicationReport appReport = mock(ApplicationReport.class); ApplicationReport appReport = mock(ApplicationReport.class);
@ -175,10 +207,28 @@ static MockServiceClient create(ServiceTestUtils.ServiceFSWatcher rule,
CompInstancesUpgradeRequestProto.class))).thenAnswer( CompInstancesUpgradeRequestProto.class))).thenAnswer(
(Answer<CompInstancesUpgradeResponseProto>) invocation -> { (Answer<CompInstancesUpgradeResponseProto>) invocation -> {
CompInstancesUpgradeResponseProto response = CompInstancesUpgradeResponseProto response =
CompInstancesUpgradeResponseProto.newBuilder().build(); CompInstancesUpgradeResponseProto.newBuilder().build();
client.proxyResponse = response; client.proxyResponse = response;
return response; return response;
}); });
when(client.amProxy.getCompInstances(Matchers.any(
GetCompInstancesRequestProto.class))).thenAnswer(
(Answer<GetCompInstancesResponseProto>) invocation -> {
GetCompInstancesRequestProto req = (GetCompInstancesRequestProto)
invocation.getArguments()[0];
List<Container> containers = FilterUtils.filterInstances(
client.context, req);
GetCompInstancesResponseProto response =
GetCompInstancesResponseProto.newBuilder().setCompInstances(
ServiceApiUtil.CONTAINER_JSON_SERDE.toJson(
containers.toArray(new Container[containers.size()])))
.build();
client.proxyResponse = response;
return response;
});
client.setFileSystem(rule.getFs()); client.setFileSystem(rule.getFs());
client.setYarnClient(yarnClient); client.setYarnClient(yarnClient);
client.service = service; client.service = service;

View File

@ -18,19 +18,10 @@
package org.apache.hadoop.yarn.service.component; package org.apache.hadoop.yarn.service.component;
import org.apache.hadoop.registry.client.api.RegistryOperations;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.client.api.NMClient;
import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.service.ServiceContext; import org.apache.hadoop.yarn.service.ServiceContext;
import org.apache.hadoop.yarn.service.ServiceScheduler;
import org.apache.hadoop.yarn.service.ServiceTestUtils; import org.apache.hadoop.yarn.service.ServiceTestUtils;
import org.apache.hadoop.yarn.service.TestServiceManager; import org.apache.hadoop.yarn.service.TestServiceManager;
import org.apache.hadoop.yarn.service.api.records.ComponentState; import org.apache.hadoop.yarn.service.api.records.ComponentState;
@ -38,23 +29,15 @@
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance; import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEvent; import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEvent;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType; import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType;
import org.apache.hadoop.yarn.service.MockRunningServiceContext;
import org.apache.hadoop.yarn.service.containerlaunch.ContainerLaunchService;
import org.apache.hadoop.yarn.service.registry.YarnRegistryViewForProviders;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map;
import static org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType.STOP; import static org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType.STOP;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -63,7 +46,6 @@
*/ */
public class TestComponent { public class TestComponent {
private static final int WAIT_MS_PER_LOOP = 1000;
static final Logger LOG = Logger.getLogger(TestComponent.class); static final Logger LOG = Logger.getLogger(TestComponent.class);
@Rule @Rule
@ -115,7 +97,7 @@ public void testCheckState() throws Exception {
@Test @Test
public void testContainerCompletedWhenUpgrading() throws Exception { public void testContainerCompletedWhenUpgrading() throws Exception {
String serviceName = "testContainerComplete"; String serviceName = "testContainerComplete";
ServiceContext context = createTestContext(rule, serviceName); MockRunningServiceContext context = createTestContext(rule, serviceName);
Component comp = context.scheduler.getAllComponents().entrySet().iterator() Component comp = context.scheduler.getAllComponents().entrySet().iterator()
.next().getValue(); .next().getValue();
@ -148,7 +130,7 @@ public void testContainerCompletedWhenUpgrading() throws Exception {
ComponentState.FLEXING, comp.getComponentSpec().getState()); ComponentState.FLEXING, comp.getComponentSpec().getState());
// new container get allocated // new container get allocated
assignNewContainer(context.attemptId, 10, context, comp); context.assignNewContainer(context.attemptId, 10, comp);
// second instance finished upgrading // second instance finished upgrading
ComponentInstance instance2 = instanceIter.next(); ComponentInstance instance2 = instanceIter.next();
@ -174,7 +156,7 @@ public void testComponentStateUpdatesWithTerminatingComponents() throws
serviceName); serviceName);
TestServiceManager.createDef(serviceName, testService); TestServiceManager.createDef(serviceName, testService);
ServiceContext context = createTestContext(rule, testService); ServiceContext context = new MockRunningServiceContext(rule, testService);
for (Component comp : context.scheduler.getAllComponents().values()) { for (Component comp : context.scheduler.getAllComponents().values()) {
@ -225,114 +207,11 @@ public void testComponentStateUpdatesWithTerminatingComponents() throws
return spec; return spec;
} }
public static ServiceContext createTestContext( public static MockRunningServiceContext createTestContext(
ServiceTestUtils.ServiceFSWatcher fsWatcher, String serviceName) ServiceTestUtils.ServiceFSWatcher fsWatcher, String serviceName)
throws Exception { throws Exception {
return createTestContext(fsWatcher, return new MockRunningServiceContext(fsWatcher,
TestServiceManager.createBaseDef(serviceName)); TestServiceManager.createBaseDef(serviceName));
} }
public static ServiceContext createTestContext(
ServiceTestUtils.ServiceFSWatcher fsWatcher, Service serviceDef)
throws Exception {
ServiceContext context = new ServiceContext();
context.service = serviceDef;
context.fs = fsWatcher.getFs();
ContainerLaunchService mockLaunchService = mock(
ContainerLaunchService.class);
context.scheduler = new ServiceScheduler(context) {
@Override protected YarnRegistryViewForProviders
createYarnRegistryOperations(
ServiceContext context, RegistryOperations registryClient) {
return mock(YarnRegistryViewForProviders.class);
}
@Override public NMClientAsync createNMClient() {
NMClientAsync nmClientAsync = super.createNMClient();
NMClient nmClient = mock(NMClient.class);
try {
when(nmClient.getContainerStatus(anyObject(), anyObject()))
.thenAnswer(
(Answer<ContainerStatus>) invocation -> ContainerStatus
.newInstance((ContainerId) invocation.getArguments()[0],
org.apache.hadoop.yarn.api.records.ContainerState
.RUNNING,
"", 0));
} catch (YarnException | IOException e) {
throw new RuntimeException(e);
}
nmClientAsync.setClient(nmClient);
return nmClientAsync;
}
@Override public ContainerLaunchService getContainerLaunchService() {
return mockLaunchService;
}
};
context.scheduler.init(fsWatcher.getConf());
ServiceTestUtils.createServiceManager(context);
doNothing().when(mockLaunchService).
reInitCompInstance(anyObject(), anyObject(), anyObject(), anyObject());
stabilizeComponents(context);
return context;
}
private static void stabilizeComponents(ServiceContext context) {
ApplicationId appId = ApplicationId.fromString(context.service.getId());
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
context.attemptId = attemptId;
Map<String, Component>
componentState = context.scheduler.getAllComponents();
int counter = 0;
for (org.apache.hadoop.yarn.service.api.records.Component componentSpec :
context.service.getComponents()) {
Component component = new org.apache.hadoop.yarn.service.component.
Component(componentSpec, 1L, context);
componentState.put(component.getName(), component);
component.handle(new ComponentEvent(component.getName(),
ComponentEventType.FLEX));
for (int i = 0; i < componentSpec.getNumberOfContainers(); i++) {
counter++;
assignNewContainer(attemptId, counter, context, component);
}
component.handle(new ComponentEvent(component.getName(),
ComponentEventType.CHECK_STABLE));
}
}
private static void assignNewContainer(
ApplicationAttemptId attemptId, long containerNum,
ServiceContext context, Component component) {
Container container = org.apache.hadoop.yarn.api.records.Container
.newInstance(ContainerId.newContainerId(attemptId, containerNum),
NODE_ID, "localhost", null, null,
null);
component.handle(new ComponentEvent(component.getName(),
ComponentEventType.CONTAINER_ALLOCATED)
.setContainer(container).setContainerId(container.getId()));
ComponentInstance instance = context.scheduler.getLiveInstances().get(
container.getId());
ComponentInstanceEvent startEvent = new ComponentInstanceEvent(
container.getId(), ComponentInstanceEventType.START);
instance.handle(startEvent);
ComponentInstanceEvent readyEvent = new ComponentInstanceEvent(
container.getId(), ComponentInstanceEventType.BECOME_READY);
instance.handle(readyEvent);
}
private static final NodeId NODE_ID = NodeId.fromString("localhost:0");
} }

View File

@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* <p> *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* <p> *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -60,19 +60,20 @@
*/ */
public class TestComponentInstance { public class TestComponentInstance {
@Rule public ServiceTestUtils.ServiceFSWatcher rule = @Rule
public ServiceTestUtils.ServiceFSWatcher rule =
new ServiceTestUtils.ServiceFSWatcher(); new ServiceTestUtils.ServiceFSWatcher();
@Test public void testContainerUpgrade() throws Exception { @Test
public void testContainerUpgrade() throws Exception {
ServiceContext context = TestComponent.createTestContext(rule, ServiceContext context = TestComponent.createTestContext(rule,
"testContainerUpgrade"); "testContainerUpgrade");
Component component = Component component = context.scheduler.getAllComponents().entrySet()
context.scheduler.getAllComponents().entrySet().iterator().next() .iterator().next().getValue();
.getValue();
upgradeComponent(component); upgradeComponent(component);
ComponentInstance instance = ComponentInstance instance = component.getAllComponentInstances().iterator()
component.getAllComponentInstances().iterator().next(); .next();
ComponentInstanceEvent instanceEvent = new ComponentInstanceEvent( ComponentInstanceEvent instanceEvent = new ComponentInstanceEvent(
instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE);
instance.handle(instanceEvent); instance.handle(instanceEvent);
@ -82,16 +83,16 @@ public class TestComponentInstance {
containerSpec.getState()); containerSpec.getState());
} }
@Test public void testContainerReadyAfterUpgrade() throws Exception { @Test
public void testContainerReadyAfterUpgrade() throws Exception {
ServiceContext context = TestComponent.createTestContext(rule, ServiceContext context = TestComponent.createTestContext(rule,
"testContainerStarted"); "testContainerStarted");
Component component = Component component = context.scheduler.getAllComponents().entrySet()
context.scheduler.getAllComponents().entrySet().iterator().next() .iterator().next().getValue();
.getValue();
upgradeComponent(component); upgradeComponent(component);
ComponentInstance instance = ComponentInstance instance = component.getAllComponentInstances().iterator()
component.getAllComponentInstances().iterator().next(); .next();
ComponentInstanceEvent instanceEvent = new ComponentInstanceEvent( ComponentInstanceEvent instanceEvent = new ComponentInstanceEvent(
instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE);
@ -100,9 +101,8 @@ public class TestComponentInstance {
instance.handle(new ComponentInstanceEvent(instance.getContainer().getId(), instance.handle(new ComponentInstanceEvent(instance.getContainer().getId(),
ComponentInstanceEventType.BECOME_READY)); ComponentInstanceEventType.BECOME_READY));
Assert.assertEquals("instance not ready", ContainerState.READY, Assert.assertEquals("instance not ready", ContainerState.READY,
instance.getCompSpec() instance.getCompSpec().getContainer(
.getContainer(instance.getContainer().getId().toString()) instance.getContainer().getId().toString()).getState());
.getState());
} }
private void upgradeComponent(Component component) { private void upgradeComponent(Component component) {
@ -113,9 +113,8 @@ private void upgradeComponent(Component component) {
private Component createComponent(ServiceScheduler scheduler, private Component createComponent(ServiceScheduler scheduler,
org.apache.hadoop.yarn.service.api.records.Component.RestartPolicyEnum org.apache.hadoop.yarn.service.api.records.Component.RestartPolicyEnum
restartPolicy, restartPolicy, int nSucceededInstances, int nFailedInstances,
int nSucceededInstances, int nFailedInstances, int totalAsk, int totalAsk, int componentId) {
int componentId) {
assert (nSucceededInstances + nFailedInstances) <= totalAsk; assert (nSucceededInstances + nFailedInstances) <= totalAsk;
@ -214,7 +213,8 @@ private ComponentInstance createComponentInstance(Component component,
return componentInstance; return componentInstance;
} }
@Test public void testComponentRestartPolicy() { @Test
public void testComponentRestartPolicy() {
Map<String, Component> allComponents = new HashMap<>(); Map<String, Component> allComponents = new HashMap<>();
Service mockService = mock(Service.class); Service mockService = mock(Service.class);

View File

@ -0,0 +1,102 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.service.utils;
import com.google.common.collect.Lists;
import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetCompInstancesRequestProto;
import org.apache.hadoop.yarn.service.ServiceContext;
import org.apache.hadoop.yarn.service.ServiceTestUtils;
import org.apache.hadoop.yarn.service.TestServiceManager;
import org.apache.hadoop.yarn.service.api.records.Container;
import org.apache.hadoop.yarn.service.MockRunningServiceContext;
import org.apache.hadoop.yarn.service.api.records.ContainerState;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import java.util.List;
public class TestFilterUtils {
@Rule
public ServiceTestUtils.ServiceFSWatcher rule =
new ServiceTestUtils.ServiceFSWatcher();
@Test
public void testNoFilter() throws Exception {
GetCompInstancesRequestProto req = GetCompInstancesRequestProto.newBuilder()
.build();
List<Container> containers = FilterUtils.filterInstances(
new MockRunningServiceContext(rule,
TestServiceManager.createBaseDef("service")), req);
Assert.assertEquals("num containers", 4, containers.size());
}
@Test
public void testFilterWithComp() throws Exception {
GetCompInstancesRequestProto req = GetCompInstancesRequestProto.newBuilder()
.addAllComponentNames(Lists.newArrayList("compa")).build();
List<Container> containers = FilterUtils.filterInstances(
new MockRunningServiceContext(rule,
TestServiceManager.createBaseDef("service")), req);
Assert.assertEquals("num containers", 2, containers.size());
}
@Test
public void testFilterWithVersion() throws Exception {
ServiceContext sc = new MockRunningServiceContext(rule,
TestServiceManager.createBaseDef("service"));
GetCompInstancesRequestProto.Builder reqBuilder =
GetCompInstancesRequestProto.newBuilder();
reqBuilder.setVersion("v2");
Assert.assertEquals("num containers", 0,
FilterUtils.filterInstances(sc, reqBuilder.build()).size());
reqBuilder.addAllComponentNames(Lists.newArrayList("compa"))
.setVersion("v1").build();
Assert.assertEquals("num containers", 2,
FilterUtils.filterInstances(sc, reqBuilder.build()).size());
reqBuilder.setVersion("v2").build();
Assert.assertEquals("num containers", 0,
FilterUtils.filterInstances(sc, reqBuilder.build()).size());
}
@Test
public void testFilterWithState() throws Exception {
ServiceContext sc = new MockRunningServiceContext(rule,
TestServiceManager.createBaseDef("service"));
GetCompInstancesRequestProto.Builder reqBuilder =
GetCompInstancesRequestProto.newBuilder();
reqBuilder.addAllContainerStates(Lists.newArrayList(
ContainerState.READY.toString()));
Assert.assertEquals("num containers", 4,
FilterUtils.filterInstances(sc, reqBuilder.build()).size());
reqBuilder.clearContainerStates();
reqBuilder.addAllContainerStates(Lists.newArrayList(
ContainerState.STOPPED.toString()));
Assert.assertEquals("num containers", 0,
FilterUtils.filterInstances(sc, reqBuilder.build()).size());
}
}

View File

@ -105,6 +105,8 @@ public class ApplicationCLI extends YarnCLI {
public static final String UPGRADE_FINALIZE = "finalize"; public static final String UPGRADE_FINALIZE = "finalize";
public static final String COMPONENT_INSTS = "instances"; public static final String COMPONENT_INSTS = "instances";
public static final String COMPONENTS = "components"; public static final String COMPONENTS = "components";
public static final String VERSION = "version";
public static final String STATES = "states";
private static String firstArg = null; private static String firstArg = null;
@ -294,10 +296,39 @@ public int run(String[] args) throws Exception {
opts.addOption(STATUS_CMD, true, opts.addOption(STATUS_CMD, true,
"Prints the status of the container."); "Prints the status of the container.");
opts.addOption(LIST_CMD, true, opts.addOption(LIST_CMD, true,
"List containers for application attempt."); "List containers for application attempt when application " +
"attempt ID is provided. When application name is provided, " +
"then it finds the instances of the application based on app's " +
"own implementation, and -appTypes option must be specified " +
"unless it is the default yarn-service type. With app name, it " +
"supports optional use of -version to filter instances based on " +
"app version, -components to filter instances based on component " +
"names, -states to filter instances based on instance state.");
opts.addOption(HELP_CMD, false, "Displays help for all commands."); opts.addOption(HELP_CMD, false, "Displays help for all commands.");
opts.getOption(STATUS_CMD).setArgName("Container ID"); opts.getOption(STATUS_CMD).setArgName("Container ID");
opts.getOption(LIST_CMD).setArgName("Application Attempt ID"); opts.getOption(LIST_CMD).setArgName("Application Name or Attempt ID");
opts.addOption(APP_TYPE_CMD, true, "Works with -list to " +
"specify the app type when application name is provided.");
opts.getOption(APP_TYPE_CMD).setValueSeparator(',');
opts.getOption(APP_TYPE_CMD).setArgs(Option.UNLIMITED_VALUES);
opts.getOption(APP_TYPE_CMD).setArgName("Types");
opts.addOption(VERSION, true, "Works with -list "
+ "to filter instances based on input application version.");
opts.getOption(VERSION).setArgs(1);
opts.addOption(COMPONENTS, true, "Works with -list to " +
"filter instances based on input comma-separated list of " +
"component names.");
opts.getOption(COMPONENTS).setValueSeparator(',');
opts.getOption(COMPONENTS).setArgs(Option.UNLIMITED_VALUES);
opts.addOption(STATES, true, "Works with -list to " +
"filter instances based on input comma-separated list of " +
"instance states.");
opts.getOption(STATES).setValueSeparator(',');
opts.getOption(STATES).setArgs(Option.UNLIMITED_VALUES);
opts.addOption(SIGNAL_CMD, true, opts.addOption(SIGNAL_CMD, true,
"Signal the container. The available signal commands are " + "Signal the container. The available signal commands are " +
java.util.Arrays.asList(SignalContainerCommand.values()) + java.util.Arrays.asList(SignalContainerCommand.values()) +
@ -426,11 +457,40 @@ public int run(String[] args) throws Exception {
} }
listApplicationAttempts(cliParser.getOptionValue(LIST_CMD)); listApplicationAttempts(cliParser.getOptionValue(LIST_CMD));
} else if (title.equalsIgnoreCase(CONTAINER)) { } else if (title.equalsIgnoreCase(CONTAINER)) {
if (hasAnyOtherCLIOptions(cliParser, opts, LIST_CMD)) { if (hasAnyOtherCLIOptions(cliParser, opts, LIST_CMD, APP_TYPE_CMD,
VERSION, COMPONENTS, STATES)) {
printUsage(title, opts); printUsage(title, opts);
return exitCode; return exitCode;
} }
listContainers(cliParser.getOptionValue(LIST_CMD)); String appAttemptIdOrName = cliParser.getOptionValue(LIST_CMD);
try {
// try parsing attempt id, if it succeeds, it means it's appId
ApplicationAttemptId.fromString(appAttemptIdOrName);
listContainers(appAttemptIdOrName);
} catch (IllegalArgumentException e) {
// not appAttemptId format, it could be appName. If app-type is not
// provided, assume it is yarn-service type.
AppAdminClient client = AppAdminClient
.createAppAdminClient(getSingleAppTypeFromCLI(cliParser),
getConf());
String version = cliParser.getOptionValue(VERSION);
String[] components = cliParser.getOptionValues(COMPONENTS);
String[] instanceStates = cliParser.getOptionValues(STATES);
try {
sysout.println(client.getInstances(appAttemptIdOrName,
components == null ? null : Arrays.asList(components),
version, instanceStates == null ? null :
Arrays.asList(instanceStates)));
return 0;
} catch (ApplicationNotFoundException exception) {
System.err.println("Application with name '" + appAttemptIdOrName
+ "' doesn't exist in RM or Timeline Server.");
return -1;
} catch (Exception ex) {
System.err.println(ex.getMessage());
return -1;
}
}
} }
} else if (cliParser.hasOption(KILL_CMD)) { } else if (cliParser.hasOption(KILL_CMD)) {
if (hasAnyOtherCLIOptions(cliParser, opts, KILL_CMD)) { if (hasAnyOtherCLIOptions(cliParser, opts, KILL_CMD)) {

View File

@ -2280,13 +2280,17 @@ private String createContainerCLIHelpMessage() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter pw = new PrintWriter(baos); PrintWriter pw = new PrintWriter(baos);
pw.println("usage: container"); pw.println("usage: container");
pw.println(" -appTypes <Types> Works with -list to specify the app type when application name is provided.");
pw.println(" -components <arg> Works with -list to filter instances based on input comma-separated list of component names.");
pw.println(" -help Displays help for all commands."); pw.println(" -help Displays help for all commands.");
pw.println(" -list <Application Attempt ID> List containers for application attempt."); pw.println(" -list <Application Name or Attempt ID> List containers for application attempt when application attempt ID is provided. When application name is provided, then it finds the instances of the application based on app's own implementation, and -appTypes option must be specified unless it is the default yarn-service type. With app name, it supports optional use of -version to filter instances based on app version, -components to filter instances based on component names, -states to filter instances based on instance state.");
pw.println(" -signal <container ID [signal command]> Signal the container."); pw.println(" -signal <container ID [signal command]> Signal the container.");
pw.println("The available signal commands are "); pw.println("The available signal commands are ");
pw.println(java.util.Arrays.asList(SignalContainerCommand.values())); pw.println(java.util.Arrays.asList(SignalContainerCommand.values()));
pw.println(" Default command is OUTPUT_THREAD_DUMP."); pw.println(" Default command is OUTPUT_THREAD_DUMP.");
pw.println(" -states <arg> Works with -list to filter instances based on input comma-separated list of instance states.");
pw.println(" -status <Container ID> Prints the status of the container."); pw.println(" -status <Container ID> Prints the status of the container.");
pw.println(" -version <arg> Works with -list to filter instances based on input application version. ");
pw.close(); pw.close();
try { try {
return normalize(baos.toString("UTF-8")); return normalize(baos.toString("UTF-8"));

View File

@ -282,4 +282,10 @@ public abstract int actionUpgradeComponents(String appName,
public abstract int actionCleanUp(String appName, String userName) throws public abstract int actionCleanUp(String appName, String userName) throws
IOException, YarnException; IOException, YarnException;
@Public
@Unstable
public abstract String getInstances(String appName,
List<String> components, String version, List<String> containerStates)
throws IOException, YarnException;
} }