YARN-2335. Annotate all hadoop-sls APIs as @Private. (Wei Yan via kasha)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1613478 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Karthik Kambatla 2014-07-25 17:12:22 +00:00
parent 1e553858f9
commit 77363b9d83
21 changed files with 88 additions and 1 deletions

View File

@ -21,6 +21,8 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter; import org.codehaus.jackson.map.ObjectWriter;
@ -42,6 +44,8 @@ import java.util.Set;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.TreeSet; import java.util.TreeSet;
@Private
@Unstable
public class RumenToSLSConverter { public class RumenToSLSConverter {
private static final String EOL = System.getProperty("line.separator"); private static final String EOL = System.getProperty("line.separator");

View File

@ -32,6 +32,8 @@ import java.util.Iterator;
import java.util.Random; import java.util.Random;
import java.util.Arrays; import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.JobTraceReader; import org.apache.hadoop.tools.rumen.JobTraceReader;
import org.apache.hadoop.tools.rumen.LoggedJob; import org.apache.hadoop.tools.rumen.LoggedJob;
@ -66,6 +68,8 @@ import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
@Private
@Unstable
public class SLSRunner { public class SLSRunner {
// RM, Runner // RM, Runner
private ResourceManager rm; private ResourceManager rm;

View File

@ -29,6 +29,8 @@ import java.util.Map;
import java.util.concurrent.BlockingQueue; import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
@ -70,6 +72,8 @@ import org.apache.hadoop.yarn.sls.SLSRunner;
import org.apache.hadoop.yarn.sls.scheduler.TaskRunner; import org.apache.hadoop.yarn.sls.scheduler.TaskRunner;
import org.apache.hadoop.yarn.sls.utils.SLSUtils; import org.apache.hadoop.yarn.sls.utils.SLSUtils;
@Private
@Unstable
public abstract class AMSimulator extends TaskRunner.Task { public abstract class AMSimulator extends TaskRunner.Task {
// resource manager // resource manager
protected ResourceManager rm; protected ResourceManager rm;

View File

@ -27,6 +27,8 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
@ -45,6 +47,8 @@ import org.apache.hadoop.yarn.sls.scheduler.ContainerSimulator;
import org.apache.hadoop.yarn.sls.SLSRunner; import org.apache.hadoop.yarn.sls.SLSRunner;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@Private
@Unstable
public class MRAMSimulator extends AMSimulator { public class MRAMSimulator extends AMSimulator {
/* /*
Vocabulary Used: Vocabulary Used:

View File

@ -18,6 +18,11 @@
package org.apache.hadoop.yarn.sls.conf; package org.apache.hadoop.yarn.sls.conf;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
@Private
@Unstable
public class SLSConfiguration { public class SLSConfiguration {
// sls // sls
public static final String PREFIX = "yarn.sls."; public static final String PREFIX = "yarn.sls.";

View File

@ -27,6 +27,8 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.DelayQueue; import java.util.concurrent.DelayQueue;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
@ -54,6 +56,8 @@ import org.apache.hadoop.yarn.sls.scheduler.ContainerSimulator;
import org.apache.hadoop.yarn.sls.scheduler.TaskRunner; import org.apache.hadoop.yarn.sls.scheduler.TaskRunner;
import org.apache.hadoop.yarn.sls.utils.SLSUtils; import org.apache.hadoop.yarn.sls.utils.SLSUtils;
@Private
@Unstable
public class NMSimulator extends TaskRunner.Task { public class NMSimulator extends TaskRunner.Task {
// node resource // node resource
private RMNode node; private RMNode node;

View File

@ -21,6 +21,8 @@ package org.apache.hadoop.yarn.sls.nodemanager;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.net.Node; import org.apache.hadoop.net.Node;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
@ -36,6 +38,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode import org.apache.hadoop.yarn.server.resourcemanager.rmnode
.UpdatedContainerInfo; .UpdatedContainerInfo;
@Private
@Unstable
public class NodeInfo { public class NodeInfo {
private static int NODE_ID = 0; private static int NODE_ID = 0;
@ -43,6 +47,8 @@ public class NodeInfo {
return NodeId.newInstance(host, port); return NodeId.newInstance(host, port);
} }
@Private
@Unstable
private static class FakeRMNodeImpl implements RMNode { private static class FakeRMNodeImpl implements RMNode {
private NodeId nodeId; private NodeId nodeId;
private String hostName; private String hostName;

View File

@ -18,6 +18,11 @@
package org.apache.hadoop.yarn.sls.scheduler; package org.apache.hadoop.yarn.sls.scheduler;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
@Private
@Unstable
public class CapacitySchedulerMetrics extends SchedulerMetrics { public class CapacitySchedulerMetrics extends SchedulerMetrics {
public CapacitySchedulerMetrics() { public CapacitySchedulerMetrics() {

View File

@ -21,9 +21,13 @@ package org.apache.hadoop.yarn.sls.scheduler;
import java.util.concurrent.Delayed; import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
@Private
@Unstable
public class ContainerSimulator implements Delayed { public class ContainerSimulator implements Delayed {
// id // id
private ContainerId id; private ContainerId id;

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.yarn.sls.scheduler; package org.apache.hadoop.yarn.sls.scheduler;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair
.AppSchedulable; .AppSchedulable;
@ -28,6 +30,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair
import com.codahale.metrics.Gauge; import com.codahale.metrics.Gauge;
import org.apache.hadoop.yarn.sls.SLSRunner; import org.apache.hadoop.yarn.sls.SLSRunner;
@Private
@Unstable
public class FairSchedulerMetrics extends SchedulerMetrics { public class FairSchedulerMetrics extends SchedulerMetrics {
private int totalMemoryMB = Integer.MAX_VALUE; private int totalMemoryMB = Integer.MAX_VALUE;

View File

@ -18,12 +18,16 @@
package org.apache.hadoop.yarn.sls.scheduler; package org.apache.hadoop.yarn.sls.scheduler;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo
.FifoScheduler; .FifoScheduler;
import com.codahale.metrics.Gauge; import com.codahale.metrics.Gauge;
@Private
@Unstable
public class FifoSchedulerMetrics extends SchedulerMetrics { public class FifoSchedulerMetrics extends SchedulerMetrics {
public FifoSchedulerMetrics() { public FifoSchedulerMetrics() {

View File

@ -18,9 +18,13 @@
package org.apache.hadoop.yarn.sls.scheduler; package org.apache.hadoop.yarn.sls.scheduler;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event
.NodeUpdateSchedulerEvent; .NodeUpdateSchedulerEvent;
@Private
@Unstable
public class NodeUpdateSchedulerEventWrapper extends NodeUpdateSchedulerEvent { public class NodeUpdateSchedulerEventWrapper extends NodeUpdateSchedulerEvent {
public NodeUpdateSchedulerEventWrapper(NodeUpdateSchedulerEvent event) { public NodeUpdateSchedulerEventWrapper(NodeUpdateSchedulerEvent event) {

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.yarn.sls.scheduler; package org.apache.hadoop.yarn.sls.scheduler;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.net.Node; import org.apache.hadoop.net.Node;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
@ -33,6 +35,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmnode
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@Private
@Unstable
public class RMNodeWrapper implements RMNode { public class RMNodeWrapper implements RMNode {
private RMNode node; private RMNode node;
private List<UpdatedContainerInfo> updates; private List<UpdatedContainerInfo> updates;

View File

@ -36,6 +36,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
@ -92,6 +93,8 @@ import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.SlidingWindowReservoir; import com.codahale.metrics.SlidingWindowReservoir;
import com.codahale.metrics.Timer; import com.codahale.metrics.Timer;
@Private
@Unstable
final public class ResourceSchedulerWrapper final public class ResourceSchedulerWrapper
extends AbstractYarnScheduler<SchedulerApplicationAttempt, SchedulerNode> extends AbstractYarnScheduler<SchedulerApplicationAttempt, SchedulerNode>
implements SchedulerWrapper, ResourceScheduler, Configurable { implements SchedulerWrapper, ResourceScheduler, Configurable {

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.yarn.sls.scheduler; package org.apache.hadoop.yarn.sls.scheduler;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.ShutdownHookManager;
import org.apache.hadoop.yarn.sls.SLSRunner; import org.apache.hadoop.yarn.sls.SLSRunner;
import org.apache.hadoop.yarn.sls.conf.SLSConfiguration; import org.apache.hadoop.yarn.sls.conf.SLSConfiguration;
@ -100,6 +102,8 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
@Private
@Unstable
public class SLSCapacityScheduler extends CapacityScheduler implements public class SLSCapacityScheduler extends CapacityScheduler implements
SchedulerWrapper,Configurable { SchedulerWrapper,Configurable {
private static final String EOL = System.getProperty("line.separator"); private static final String EOL = System.getProperty("line.separator");

View File

@ -21,6 +21,8 @@ package org.apache.hadoop.yarn.sls.scheduler;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler import org.apache.hadoop.yarn.server.resourcemanager.scheduler
.ResourceScheduler; .ResourceScheduler;
@ -30,6 +32,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler
import com.codahale.metrics.Gauge; import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
@Private
@Unstable
public abstract class SchedulerMetrics { public abstract class SchedulerMetrics {
protected ResourceScheduler scheduler; protected ResourceScheduler scheduler;
protected Set<String> trackedQueues; protected Set<String> trackedQueues;

View File

@ -19,11 +19,15 @@ package org.apache.hadoop.yarn.sls.scheduler;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
@Private
@Unstable
public interface SchedulerWrapper { public interface SchedulerWrapper {
public MetricRegistry getMetrics(); public MetricRegistry getMetrics();

View File

@ -25,9 +25,15 @@ import java.util.concurrent.Delayed;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnException;
@Private
@Unstable
public class TaskRunner { public class TaskRunner {
@Private
@Unstable
public abstract static class Task implements Runnable, Delayed { public abstract static class Task implements Runnable, Delayed {
private long start; private long start;
private long end; private long end;

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.yarn.sls.utils; package org.apache.hadoop.yarn.sls.utils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.tools.rumen.JobTraceReader; import org.apache.hadoop.tools.rumen.JobTraceReader;
@ -36,6 +38,8 @@ import java.util.Map;
import java.util.List; import java.util.List;
import java.util.Iterator; import java.util.Iterator;
@Private
@Unstable
public class SLSUtils { public class SLSUtils {
public static String[] getRackHostName(String hostname) { public static String[] getRackHostName(String hostname) {

View File

@ -30,6 +30,8 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event
.SchedulerEventType; .SchedulerEventType;
import org.mortbay.jetty.Handler; import org.mortbay.jetty.Handler;
@ -49,6 +51,8 @@ import com.codahale.metrics.Histogram;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import org.mortbay.jetty.handler.ResourceHandler; import org.mortbay.jetty.handler.ResourceHandler;
@Private
@Unstable
public class SLSWebApp extends HttpServlet { public class SLSWebApp extends HttpServlet {
private static final long serialVersionUID = 1905162041950251407L; private static final long serialVersionUID = 1905162041950251407L;
private transient Server server; private transient Server server;

View File

@ -439,6 +439,8 @@ Release 2.5.0 - UNRELEASED
YARN-2319. Made the MiniKdc instance start/close before/after the class of YARN-2319. Made the MiniKdc instance start/close before/after the class of
TestRMWebServicesDelegationTokens. (Wenwu Peng via zjshen) TestRMWebServicesDelegationTokens. (Wenwu Peng via zjshen)
YARN-2335. Annotate all hadoop-sls APIs as @Private. (Wei Yan via kasha)
Release 2.4.1 - 2014-06-23 Release 2.4.1 - 2014-06-23
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES