Fix javac, javadoc and checkstyle issues before branch-2 merge.

This commit is contained in:
Varun Saxena 2017-10-17 23:35:42 +05:30
parent 8b98dba7ee
commit 56e0873e22
55 changed files with 239 additions and 83 deletions

View File

@ -141,7 +141,6 @@
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.AMRMTokenSecretManager;
import org.apache.hadoop.yarn.server.timelineservice.security.TimelineV2DelegationTokenSecretManagerService.TimelineV2DelegationTokenSecretManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.ControlledClock;
@ -776,7 +775,7 @@ public void testUpdateCollectorInfo() throws Exception {
new Text("renewer"), null);
ident.setSequenceNumber(1);
Token<TimelineDelegationTokenIdentifier> collectorToken =
new Token<TimelineDelegationTokenIdentifier> (ident.getBytes(),
new Token<TimelineDelegationTokenIdentifier>(ident.getBytes(),
new byte[0], TimelineDelegationTokenIdentifier.KIND_NAME,
new Text(localAddr));
org.apache.hadoop.yarn.api.records.Token token =
@ -824,7 +823,7 @@ protected ApplicationMasterProtocol createSchedulerProxy() {
// new token.
ident.setSequenceNumber(100);
Token<TimelineDelegationTokenIdentifier> collectorToken1 =
new Token<TimelineDelegationTokenIdentifier> (ident.getBytes(),
new Token<TimelineDelegationTokenIdentifier>(ident.getBytes(),
new byte[0], TimelineDelegationTokenIdentifier.KIND_NAME,
new Text(localAddr));
token = org.apache.hadoop.yarn.api.records.Token.newInstance(
@ -3579,15 +3578,15 @@ public void completeContainer(ContainerId containerId) {
}
}
private static class MockSchedulerForTimelineCollector
private final static class MockSchedulerForTimelineCollector
implements ApplicationMasterProtocol {
CollectorInfo collectorInfo;
private CollectorInfo collectorInfo;
public MockSchedulerForTimelineCollector(CollectorInfo info) {
private MockSchedulerForTimelineCollector(CollectorInfo info) {
this.collectorInfo = info;
}
void updateCollectorInfo(CollectorInfo info) {
private void updateCollectorInfo(CollectorInfo info) {
collectorInfo = info;
}

View File

@ -62,7 +62,7 @@ public class TaskAttemptUnsuccessfulCompletionEvent implements HistoryEvent {
private long startTime;
private static final Counters EMPTY_COUNTERS = new Counters();
/**
/**
* Create an event to record the unsuccessful completion of attempts.
* @param id Attempt ID
* @param taskType Type of the task
@ -232,7 +232,7 @@ public long getStartTime() {
public String getRackName() {
return rackName == null ? null : rackName.toString();
}
/** Gets the error string. */
public String getError() { return error.toString(); }
/**

View File

@ -21,20 +21,21 @@
/**
* Constants for simple entity writers.
*/
interface SimpleEntityWriterConstants {
final class SimpleEntityWriterConstants {
private SimpleEntityWriterConstants() {}
// constants for mtype = 1
String KBS_SENT = "kbs sent";
int KBS_SENT_DEFAULT = 1;
String TEST_TIMES = "testtimes";
int TEST_TIMES_DEFAULT = 100;
String TIMELINE_SERVICE_PERFORMANCE_RUN_ID =
static final String KBS_SENT = "kbs sent";
static final int KBS_SENT_DEFAULT = 1;
static final String TEST_TIMES = "testtimes";
static final int TEST_TIMES_DEFAULT = 100;
static final String TIMELINE_SERVICE_PERFORMANCE_RUN_ID =
"timeline.server.performance.run.id";
/**
* To ensure that the compression really gets exercised, generate a
* random alphanumeric fixed length payload.
*/
char[] ALPHA_NUMS = new char[] {'a', 'b', 'c', 'd', 'e', 'f',
static final char[] ALPHA_NUMS = new char[] {'a', 'b', 'c', 'd', 'e', 'f',
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',
's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D',
'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',

View File

@ -39,8 +39,7 @@
*/
class SimpleEntityWriterV1
extends org.apache.hadoop.mapreduce.Mapper
<IntWritable, IntWritable, Writable, Writable>
implements SimpleEntityWriterConstants {
<IntWritable, IntWritable, Writable, Writable> {
private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV1.class);
public void map(IntWritable key, IntWritable val, Context context)
@ -48,10 +47,12 @@ public void map(IntWritable key, IntWritable val, Context context)
TimelineClient tlc = TimelineClient.createTimelineClient();
Configuration conf = context.getConfiguration();
final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
final int kbs = conf.getInt(SimpleEntityWriterConstants.KBS_SENT,
SimpleEntityWriterConstants.KBS_SENT_DEFAULT);
long totalTime = 0;
final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
final int testtimes = conf.getInt(SimpleEntityWriterConstants.TEST_TIMES,
SimpleEntityWriterConstants.TEST_TIMES_DEFAULT);
final Random rand = new Random();
final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
final char[] payLoad = new char[kbs * 1024];
@ -60,8 +61,8 @@ public void map(IntWritable key, IntWritable val, Context context)
// Generate a fixed length random payload
for (int xx = 0; xx < kbs * 1024; xx++) {
int alphaNumIdx =
rand.nextInt(ALPHA_NUMS.length);
payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
rand.nextInt(SimpleEntityWriterConstants.ALPHA_NUMS.length);
payLoad[xx] = SimpleEntityWriterConstants.ALPHA_NUMS[alphaNumIdx];
}
String entId = taskAttemptId + "_" + Integer.toString(i);
final TimelineEntity entity = new TimelineEntity();

View File

@ -40,8 +40,7 @@
* Adds simple entities with random string payload, events, metrics, and
* configuration.
*/
class SimpleEntityWriterV2 extends EntityWriterV2
implements SimpleEntityWriterConstants {
class SimpleEntityWriterV2 extends EntityWriterV2 {
private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV2.class);
protected void writeEntities(Configuration tlConf,
@ -49,7 +48,8 @@ protected void writeEntities(Configuration tlConf,
Configuration conf = context.getConfiguration();
// simulate the app id with the task id
int taskId = context.getTaskAttemptID().getTaskID().getId();
long timestamp = conf.getLong(TIMELINE_SERVICE_PERFORMANCE_RUN_ID, 0);
long timestamp = conf.getLong(
SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID, 0);
ApplicationId appId = ApplicationId.newInstance(timestamp, taskId);
// create the app level timeline collector
@ -66,10 +66,12 @@ protected void writeEntities(Configuration tlConf,
tlContext.setFlowRunId(timestamp);
tlContext.setUserId(context.getUser());
final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
final int kbs = conf.getInt(SimpleEntityWriterConstants.KBS_SENT,
SimpleEntityWriterConstants.KBS_SENT_DEFAULT);
long totalTime = 0;
final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
final int testtimes = conf.getInt(SimpleEntityWriterConstants.TEST_TIMES,
SimpleEntityWriterConstants.TEST_TIMES_DEFAULT);
final Random rand = new Random();
final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
final char[] payLoad = new char[kbs * 1024];
@ -78,8 +80,8 @@ protected void writeEntities(Configuration tlConf,
// Generate a fixed length random payload
for (int xx = 0; xx < kbs * 1024; xx++) {
int alphaNumIdx =
rand.nextInt(ALPHA_NUMS.length);
payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
rand.nextInt(SimpleEntityWriterConstants.ALPHA_NUMS.length);
payLoad[xx] = SimpleEntityWriterConstants.ALPHA_NUMS[alphaNumIdx];
}
String entId = taskAttemptId + "_" + Integer.toString(i);
final TimelineEntity entity = new TimelineEntity();

View File

@ -27,6 +27,9 @@
import org.junit.Test;
/**
* Tests {@link TimelineMetric} operations such as aggregation.
*/
public class TestTimelineMetric {
@Test
@ -86,15 +89,4 @@ private static TimelineMetric getSingleValueMetric(String id,
m.setValues(metricValues);
return m;
}
private static TimelineMetric getTimeSeriesMetric(String id,
TimelineMetricOperation op, Map<Long, Number> metricValues) {
TimelineMetric m = new TimelineMetric();
m.setId(id);
m.setType(Type.TIME_SERIES);
m.setRealtimeAggregationOp(op);
m.setValues(metricValues);
return m;
}
}

View File

@ -24,7 +24,6 @@
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.CollectorInfo;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.client.api.impl.TimelineV2ClientImpl;
import org.apache.hadoop.yarn.exceptions.YarnException;

View File

@ -131,8 +131,8 @@ protected void serviceInit(Configuration conf) throws Exception {
}
}
private static final ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR
= new ConnectionConfigurator() {
private static final ConnectionConfigurator
DEFAULT_TIMEOUT_CONN_CONFIGURATOR = new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {

View File

@ -3218,7 +3218,7 @@
<name>yarn.app.attempt.diagnostics.limit.kc</name>
<value>64</value>
</property>
<property>
<description>
Flag to enable cross-origin (CORS) support for timeline service v1.x or

View File

@ -34,7 +34,9 @@
import java.util.Iterator;
import java.util.Map;
/**
* Tests timeline service API records.
*/
public class TestTimelineServiceRecords {
private static final Log LOG =
LogFactory.getLog(TestTimelineServiceRecords.class);

View File

@ -426,15 +426,14 @@ protected TimelineConnector createTimelineConnector() {
new TimelineConnector(true, authUgi, doAsUser, token) {
@Override
public TimelineClientRetryOp
createRetryOpForOperateDelegationToken(
createRetryOpForOperateDelegationToken(
final PrivilegedExceptionAction<?> action)
throws IOException {
TimelineClientRetryOpForOperateDelegationToken op =
spy(new TimelineClientRetryOpForOperateDelegationToken(
UserGroupInformation.getCurrentUser(), action));
doThrow(
new SocketTimeoutException("Test socketTimeoutException"))
.when(op).run();
doThrow(new SocketTimeoutException(
"Test socketTimeoutException")).when(op).run();
return op;
}
};

View File

@ -49,6 +49,9 @@
import org.junit.Test;
import org.junit.rules.TestName;
/**
* Tests {@link TimelineV2ClientImpl}.
*/
public class TestTimelineClientV2Impl {
private static final Log LOG =
LogFactory.getLog(TestTimelineClientV2Impl.class);

View File

@ -26,6 +26,9 @@
import org.junit.Assert;
import org.junit.Test;
/**
* Tests utility methods in {@link TimelineServiceHelper}.
*/
public class TestTimelineServiceHelper {
@Test

View File

@ -22,6 +22,9 @@
import org.apache.hadoop.ipc.ProtocolInfo;
import org.apache.hadoop.yarn.proto.CollectorNodemanagerProtocol.CollectorNodemanagerProtocolService;
/**
* Collector nodemanager protocol service implementation.
*/
@Private
@Unstable
@ProtocolInfo(

View File

@ -45,6 +45,9 @@
import com.google.protobuf.ServiceException;
/**
* Client implementation of {@link CollectorNodemanagerProtocol}.
*/
public class CollectorNodemanagerProtocolPBClientImpl implements
CollectorNodemanagerProtocol, Closeable {

View File

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains protocol client implementations used to communicate with different
* YARN server components.
*/
package org.apache.hadoop.yarn.server.api.impl.pb.client;

View File

@ -36,6 +36,9 @@
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
/**
* Service implementation of CollectorNodemanagerProtocol.
*/
public class CollectorNodemanagerProtocolPBServiceImpl implements
CollectorNodemanagerProtocolPB {

View File

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains interfaces which define the protocols to communicate with different
* YARN server components.
*/
package org.apache.hadoop.yarn.server.api;

View File

@ -21,6 +21,11 @@
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.Records;
/**
* GetTimelineCollectorContextRequest invoked by collector to get the context
* information from Nodemanager. The context information includes user, flow and
* flow run ID and is associated with every incoming put-entity request.
*/
public abstract class GetTimelineCollectorContextRequest {
public static GetTimelineCollectorContextRequest newInstance(

View File

@ -20,6 +20,12 @@
import org.apache.hadoop.yarn.util.Records;
/**
* GetTimelineCollectorContextResponse sent in response of
* GetTimelineCollectorContextRequest invoked by collector. The response
* includes user, flow name, flow version and flow run ID. This is then
* associated by collector with every incoming put-entity request.
*/
public abstract class GetTimelineCollectorContextResponse {
public static GetTimelineCollectorContextResponse newInstance(

View File

@ -26,6 +26,12 @@
import org.apache.hadoop.yarn.server.api.records.AppCollectorData;
import org.apache.hadoop.yarn.util.Records;
/**
* Request used to report new collector info from timeline collector manager
* whenever a collector is launched.
* The request contains a list of collector data, with each collector data
* object represented by {@link AppCollectorData}.
*/
@Private
public abstract class ReportNewCollectorInfoRequest {

View File

@ -20,6 +20,9 @@
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.yarn.util.Records;
/**
* Response to {@link ReportNewCollectorInfoRequest}.
*/
public abstract class ReportNewCollectorInfoResponse {
@Private

View File

@ -26,6 +26,10 @@
import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextRequestProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextRequest;
/**
* Protocol buffer based implementation of
* {@link GetTimelineCollectorContextRequest}.
*/
public class GetTimelineCollectorContextRequestPBImpl extends
GetTimelineCollectorContextRequest {

View File

@ -23,6 +23,10 @@
import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.GetTimelineCollectorContextResponseProto;
import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextResponse;
/**
* Protocol buffer based implementation of
* {@link GetTimelineCollectorContextResponse}.
*/
public class GetTimelineCollectorContextResponsePBImpl extends
GetTimelineCollectorContextResponse {

View File

@ -27,6 +27,10 @@
import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.ReportNewCollectorInfoRequestProtoOrBuilder;
import org.apache.hadoop.yarn.server.api.protocolrecords.ReportNewCollectorInfoRequest;
/**
* Protocol buffer based implementation of
* {@link ReportNewCollectorInfoRequest}.
*/
public class ReportNewCollectorInfoRequestPBImpl extends
ReportNewCollectorInfoRequest {

View File

@ -24,6 +24,10 @@
import com.google.protobuf.TextFormat;
/**
* Protocol buffer based implementation of
* {@link ReportNewCollectorInfoResponse}.
*/
@Private
@Unstable
public class ReportNewCollectorInfoResponsePBImpl extends

View File

@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@ -16,8 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.collector;
public class TestAppLevelTimelineCollector {
}
/**
* Contains protocol record implementations for YARN server protocols.
*/
package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb;

View File

@ -24,7 +24,10 @@
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.util.Records;
/**
* Collector data associated with an app collector. Includes app Id, collector
* address, RM identifier, version and collector token.
*/
@Private
@InterfaceStability.Unstable
public abstract class AppCollectorData {

View File

@ -30,6 +30,9 @@
import com.google.protobuf.TextFormat;
/**
* Protocol Buffer based implementation of {@link AppCollectorData}.
*/
@Private
@Unstable
public class AppCollectorDataPBImpl extends AppCollectorData {

View File

@ -53,7 +53,8 @@ public class TimelineAuthenticationFilterInitializer extends FilterInitializer {
/**
* The configuration prefix of timeline HTTP authentication.
*/
public static final String PREFIX = "yarn.timeline-service.http-authentication.";
public static final String PREFIX =
"yarn.timeline-service.http-authentication.";
@VisibleForTesting
Map<String, String> filterConfig;
@ -104,7 +105,7 @@ protected Map<String, String> getFilterConfig() {
}
/**
* Initializes {@link TimelineAuthenticationFilter}
* Initializes {@link TimelineAuthenticationFilter}.
* <p>
* Propagates to {@link TimelineAuthenticationFilter} configuration all YARN
* configuration properties prefixed with {@value #PREFIX}

View File

@ -84,6 +84,9 @@
import org.junit.Assert;
import org.junit.Test;
/**
* Tests RPC communication for different YARN server protocols.
*/
public class TestRPC {
private static final String EXCEPTION_MSG = "test error";
@ -320,7 +323,8 @@ private void test(String rpcClass) throws Exception {
Assert.assertEquals(ContainerState.RUNNING, statuses.get(0).getState());
}
public class DummyContainerManager implements ContainerManagementProtocol {
private static class DummyContainerManager
implements ContainerManagementProtocol {
private List<ContainerStatus> statuses = new ArrayList<ContainerStatus>();
@ -448,7 +452,7 @@ public static Token newContainerToken(NodeId nodeId, byte[] password,
// A dummy implementation for CollectorNodemanagerProtocol for test purpose,
// it only can accept one appID, collectorAddr pair or throw exceptions
public class DummyNMCollectorService
private static class DummyNMCollectorService
implements CollectorNodemanagerProtocol {
@Override

View File

@ -27,7 +27,9 @@
import org.junit.Test;
import org.mockito.Mockito;
/**
* Tests {@link TimelineAuthenticationFilterInitializer}.
*/
public class TestTimelineAuthenticationFilterInitializer {
@Test

View File

@ -429,6 +429,7 @@ private void recoverApplication(ContainerManagerApplicationProto p)
app.handle(new ApplicationInitEvent(appId, acls, logAggregationContext));
}
@SuppressWarnings("unchecked")
private void recoverContainer(RecoveredContainerState rcs)
throws IOException {
StartContainerRequest req = rcs.getStartRequest();
@ -1735,6 +1736,7 @@ public RestartContainerResponse restartContainer(ContainerId containerId)
* @param reInitLaunchContext Target Launch Context.
* @throws YarnException Yarn Exception.
*/
@SuppressWarnings("unchecked")
public void reInitializeContainer(ContainerId containerId,
ContainerLaunchContext reInitLaunchContext, boolean autoCommit)
throws YarnException {
@ -1762,6 +1764,7 @@ public void reInitializeContainer(ContainerId containerId,
* @return Rollback Response.
* @throws YarnException Yarn Exception.
*/
@SuppressWarnings("unchecked")
@Override
public RollbackResponse rollbackLastReInitialization(ContainerId containerId)
throws YarnException {

View File

@ -85,6 +85,9 @@ public class ContainersMonitorImpl extends AbstractService implements
private static final long UNKNOWN_MEMORY_LIMIT = -1L;
private int nodeCpuPercentageForYARN;
/**
* Identifies the type of container metric to be published.
*/
@Private
public static enum ContainerMetric {
CPU, MEMORY

View File

@ -33,21 +33,22 @@
@InterfaceStability.Unstable
public class NMPolicyProvider extends PolicyProvider {
private static final Service[] nodeManagerServices =
private static final Service[] NODE_MANAGER_SERVICES =
new Service[] {
new Service(
YarnConfiguration.YARN_SECURITY_SERVICE_AUTHORIZATION_CONTAINER_MANAGEMENT_PROTOCOL,
ContainerManagementProtocolPB.class),
new Service(YarnConfiguration.YARN_SECURITY_SERVICE_AUTHORIZATION_RESOURCE_LOCALIZER,
LocalizationProtocolPB.class),
new Service(YarnConfiguration.
YARN_SECURITY_SERVICE_AUTHORIZATION_COLLECTOR_NODEMANAGER_PROTOCOL,
new Service(YarnConfiguration.
YARN_SECURITY_SERVICE_AUTHORIZATION_CONTAINER_MANAGEMENT_PROTOCOL,
ContainerManagementProtocolPB.class),
new Service(YarnConfiguration.
YARN_SECURITY_SERVICE_AUTHORIZATION_RESOURCE_LOCALIZER,
LocalizationProtocolPB.class),
new Service(YarnConfiguration.
YARN_SECURITY_SERVICE_AUTHORIZATION_COLLECTOR_NODEMANAGER_PROTOCOL,
CollectorNodemanagerProtocolPB.class)
};
};
@Override
public Service[] getServices() {
return nodeManagerServices;
return NODE_MANAGER_SERVICES;
}
}

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.yarn.server.nodemanager.timelineservice;
/**
* Tests publishing of entities from NM to ATSv2.
*/
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
@ -43,6 +46,9 @@
import org.junit.Assert;
import org.junit.Test;
/**
* Tests {@link NMTimelinePublisher}.
*/
public class TestNMTimelinePublisher {
private static final String MEMORY_ID = "MEMORY";
private static final String CPU_ID = "CPU";

View File

@ -45,7 +45,6 @@
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
import org.apache.hadoop.yarn.api.records.CollectorInfo;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;

View File

@ -55,7 +55,6 @@
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.api.records.AppCollectorData;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt

View File

@ -75,6 +75,9 @@
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Tests publishing of entities and metrics from RM to ATSv2.
*/
public class TestSystemMetricsPublisherForV2 {
/**

View File

@ -67,13 +67,13 @@ public static void setup() throws IOException {
}
private static void writeFile(String content, File file) throws IOException {
FileOutputStream outputStream = new FileOutputStream(file);
FileChannel fc = outputStream.getChannel();
FileOutputStream outputStream = new FileOutputStream(file);
FileChannel fc = outputStream.getChannel();
ByteBuffer buffer =
ByteBuffer.wrap(content.getBytes(StandardCharsets.UTF_8));
fc.write(buffer);
outputStream.close();
ByteBuffer buffer =
ByteBuffer.wrap(content.getBytes(StandardCharsets.UTF_8));
fc.write(buffer);
outputStream.close();
}
@AfterClass

View File

@ -58,6 +58,9 @@
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Tests timeline client integration with collector service.
*/
public class TestTimelineServiceClientIntegration {
private static final String ROOT_DIR = new File("target",
TestTimelineServiceClientIntegration.class.getSimpleName()).

View File

@ -64,7 +64,7 @@
import com.sun.jersey.api.client.GenericType;
/**
* Test TimelineReder Web Service REST API's.
* Test TimelineReder Web Service REST API's using HBase storage.
*/
public class TestTimelineReaderWebServicesHBaseStorage
extends AbstractTimelineReaderHBaseTestBase {

View File

@ -30,6 +30,10 @@
import com.google.common.collect.Iterables;
/**
* Tests the encoding and decoding of separators. Typically used to separate
* different segments in HBase row key.
*/
public class TestSeparator {
private static String villain = "Dr. Heinz Doofenshmirtz";

View File

@ -36,7 +36,7 @@ public class TimelineReaderAuthenticationFilterInitializer extends
* <p>
* Propagates to {@link AuthenticationWithProxyUserFilter} configuration all
* YARN configuration properties prefixed with
* {@value TimelineAuthenticationFilterInitializer#PREFIX}.
* {@link TimelineAuthenticationFilterInitializer#PREFIX}.
*
* @param container
* The filter container

View File

@ -125,7 +125,7 @@ TimelineEntity getEntity(TimelineReaderContext context,
* <li><b>flowRunId</b> - Context flow run id.</li>
* <li><b>appId</b> - Context app id.</li>
* </ul>
* Although entityIdPrefix and entityId are also part of context,
* Although entityIdPrefix and entityId are also part of context,
* it has no meaning for getEntities.<br>
* Fields in context which are mandatory depends on entity type. Entity
* type is always mandatory. In addition to entity type, below is the list
@ -161,7 +161,7 @@ TimelineEntity getEntity(TimelineReaderContext context,
* {@link TimelineDataToRetrieve} for details.
* @return A set of <cite>TimelineEntity</cite> instances of the given entity
* type in the given context scope which matches the given predicates
* ordered by enitityIdPrefix(for generic entities only).
* ordered by enitityIdPrefix(for generic entities only).
* Each entity will only contain
* the metadata(id, type , idPrefix and created time) plus the given
* fields to retrieve.

View File

@ -51,6 +51,9 @@
import org.junit.Before;
import org.junit.Test;
/**
* Tests node level timeline collector manager.
*/
public class TestNMTimelineCollectorManager {
private NodeTimelineCollectorManager collectorManager;

View File

@ -49,6 +49,10 @@
import org.junit.After;
import org.junit.Test;
/**
* Tests the NodeManager auxiliary service started for node level timeline
* collector.
*/
public class TestPerNodeTimelineCollectorsAuxService {
private ApplicationAttemptId appAttemptId;
private PerNodeTimelineCollectorsAuxService auxService;

View File

@ -46,6 +46,9 @@
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Tests functionality of timeline collector.
*/
public class TestTimelineCollector {
private TimelineEntities generateTestEntities(int groups, int entities) {

View File

@ -28,6 +28,9 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
import org.junit.Test;
/**
* Tests {@link TimelineReaderServer} start and stop.
*/
public class TestTimelineReaderServer {
@Test(timeout = 60000)

View File

@ -26,6 +26,9 @@
import org.junit.Test;
/**
* Tests helper methods in {@link TimelineReaderUtils}.
*/
public class TestTimelineReaderUtils {
@Test

View File

@ -59,6 +59,9 @@
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
/**
* Test TimelineReder Web Service REST API's using filesystem storage.
*/
public class TestTimelineReaderWebServices {
private static final String ROOT_DIR = new File("target",

View File

@ -35,6 +35,9 @@
import com.google.common.collect.Sets;
/**
* Tests helper methods in {@link TimelineReaderWebServicesUtils}.
*/
public class TestTimelineReaderWebServicesUtils {
private static void verifyFilterList(String expr, TimelineFilterList list,
TimelineFilterList expectedList) throws Exception {

View File

@ -24,6 +24,9 @@
import org.junit.Test;
/**
* Tests Timeline UIDs' encoding and decoding.
*/
public class TestTimelineUIDConverter {
@Test

View File

@ -59,6 +59,9 @@
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Tests filesystem implemetation for timelineservice reader.
*/
public class TestFileSystemTimelineReaderImpl {
private static final String ROOT_DIR = new File("target",

View File

@ -42,6 +42,9 @@
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
/**
* Tests filesystem implemetation for timelineservice writer.
*/
public class TestFileSystemTimelineWriterImpl {
@Rule
public TemporaryFolder tmpFolder = new TemporaryFolder();