Revert "YARN-8270 Adding JMX Metrics for Timeline Collector and Reader. Contributed by Sushil Ks."

This reverts commit 5b72aa04e1.
This commit is contained in:
Rohith Sharma K S 2019-01-28 10:55:12 +05:30
parent 6092d913b1
commit 6e059c7930
7 changed files with 52 additions and 500 deletions

View File

@ -41,7 +41,6 @@ import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity;
@ -54,7 +53,6 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
import org.apache.hadoop.yarn.api.records.timelineservice.UserEntity;
import org.apache.hadoop.yarn.server.timelineservice.metrics.PerNodeAggTimelineCollectorMetrics;
import org.apache.hadoop.yarn.webapp.ForbiddenException;
import org.apache.hadoop.yarn.webapp.NotFoundException;
@ -79,8 +77,6 @@ public class TimelineCollectorWebService {
LoggerFactory.getLogger(TimelineCollectorWebService.class);
private @Context ServletContext context;
private static final PerNodeAggTimelineCollectorMetrics METRICS =
PerNodeAggTimelineCollectorMetrics.getInstance();
/**
* Gives information about timeline collector.
@ -155,15 +151,12 @@ public class TimelineCollectorWebService {
TimelineEntities entities) {
init(res);
UserGroupInformation callerUgi = getUser(req);
boolean isAsync = async != null && async.trim().equalsIgnoreCase("true");
if (callerUgi == null) {
String msg = "The owner of the posted timeline entities is not set";
LOG.error(msg);
throw new ForbiddenException(msg);
}
long startTime = Time.monotonicNow();
boolean succeeded = false;
try {
ApplicationId appID = parseApplicationId(appId);
if (appID == null) {
@ -178,6 +171,7 @@ public class TimelineCollectorWebService {
throw new NotFoundException("Application: "+ appId + " is not found");
}
boolean isAsync = async != null && async.trim().equalsIgnoreCase("true");
if (isAsync) {
collector.putEntitiesAsync(processTimelineEntities(entities, appId,
Boolean.valueOf(isSubAppEntities)), callerUgi);
@ -186,7 +180,6 @@ public class TimelineCollectorWebService {
Boolean.valueOf(isSubAppEntities)), callerUgi);
}
succeeded = true;
return Response.ok().build();
} catch (NotFoundException | ForbiddenException e) {
throw new WebApplicationException(e,
@ -195,13 +188,6 @@ public class TimelineCollectorWebService {
LOG.error("Error putting entities", e);
throw new WebApplicationException(e,
Response.Status.INTERNAL_SERVER_ERROR);
} finally {
long latency = Time.monotonicNow() - startTime;
if (isAsync) {
METRICS.addAsyncPutEntitiesLatency(latency, succeeded);
} else {
METRICS.addPutEntitiesLatency(latency, succeeded);
}
}
}

View File

@ -1,117 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.metrics;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.MutableQuantiles;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.apache.hadoop.metrics2.lib.Interns.info;
/**
* Metrics class for TimelineCollectorWebService
* running on each NM.
*/
@Metrics(about = "Aggregated metrics of TimelineCollector's running on each NM",
context = "timelineservice")
final public class PerNodeAggTimelineCollectorMetrics {
private static final MetricsInfo METRICS_INFO =
info("PerNodeAggTimelineCollectorMetrics",
"Aggregated Metrics for TimelineCollector's running on each NM");
private static AtomicBoolean isInitialized = new AtomicBoolean(false);
private static PerNodeAggTimelineCollectorMetrics
instance = null;
@Metric(about = "PUT entities failure latency", valueName = "latency")
private MutableQuantiles putEntitiesFailureLatency;
@Metric(about = "PUT entities success latency", valueName = "latency")
private MutableQuantiles putEntitiesSuccessLatency;
@Metric(about = "async PUT entities failure latency", valueName = "latency")
private MutableQuantiles asyncPutEntitiesFailureLatency;
@Metric(about = "async PUT entities success latency", valueName = "latency")
private MutableQuantiles asyncPutEntitiesSuccessLatency;
private PerNodeAggTimelineCollectorMetrics() {
}
public static PerNodeAggTimelineCollectorMetrics getInstance() {
if (!isInitialized.get()) {
synchronized (PerNodeAggTimelineCollectorMetrics.class) {
if (instance == null) {
instance =
DefaultMetricsSystem.initialize("TimelineService").register(
METRICS_INFO.name(), METRICS_INFO.description(),
new PerNodeAggTimelineCollectorMetrics());
isInitialized.set(true);
}
}
}
return instance;
}
public synchronized static void destroy() {
isInitialized.set(false);
instance = null;
}
@VisibleForTesting
public MutableQuantiles getPutEntitiesSuccessLatency() {
return putEntitiesSuccessLatency;
}
@VisibleForTesting
public MutableQuantiles getPutEntitiesFailureLatency() {
return putEntitiesFailureLatency;
}
@VisibleForTesting
public MutableQuantiles getAsyncPutEntitiesSuccessLatency() {
return asyncPutEntitiesSuccessLatency;
}
@VisibleForTesting
public MutableQuantiles getAsyncPutEntitiesFailureLatency() {
return asyncPutEntitiesFailureLatency;
}
public void addPutEntitiesLatency(
long durationMs, boolean succeeded) {
if (succeeded) {
putEntitiesSuccessLatency.add(durationMs);
} else {
putEntitiesFailureLatency.add(durationMs);
}
}
public void addAsyncPutEntitiesLatency(
long durationMs, boolean succeeded) {
if (succeeded) {
asyncPutEntitiesSuccessLatency.add(durationMs);
} else {
asyncPutEntitiesFailureLatency.add(durationMs);
}
}
}

View File

@ -1,113 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.metrics;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.MutableQuantiles;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.apache.hadoop.metrics2.lib.Interns.info;
/**
* Metrics class for TimelineReader.
*/
@Metrics(about = "Metrics for timeline reader", context = "timelineservice")
final public class TimelineReaderMetrics {
private final static MetricsInfo METRICS_INFO = info("TimelineReaderMetrics",
"Metrics for TimelineReader");
private static AtomicBoolean isInitialized = new AtomicBoolean(false);
private static TimelineReaderMetrics instance = null;
@Metric(about = "GET entities failure latency", valueName = "latency")
private MutableQuantiles getEntitiesFailureLatency;
@Metric(about = "GET entities success latency", valueName = "latency")
private MutableQuantiles getEntitiesSuccessLatency;
@Metric(about = "GET entity types failure latency", valueName = "latency")
private MutableQuantiles getEntityTypesFailureLatency;
@Metric(about = "GET entity types success latency", valueName = "latency")
private MutableQuantiles getEntityTypesSuccessLatency;
private TimelineReaderMetrics() {
}
public static TimelineReaderMetrics getInstance() {
if (!isInitialized.get()) {
synchronized (TimelineReaderMetrics.class) {
if (instance == null) {
instance =
DefaultMetricsSystem.initialize("TimelineService").register(
METRICS_INFO.name(), METRICS_INFO.description(),
new TimelineReaderMetrics());
isInitialized.set(true);
}
}
}
return instance;
}
public synchronized static void destroy() {
isInitialized.set(false);
instance = null;
}
@VisibleForTesting
public MutableQuantiles getGetEntitiesSuccessLatency() {
return getEntitiesSuccessLatency;
}
@VisibleForTesting
public MutableQuantiles getGetEntitiesFailureLatency() {
return getEntitiesFailureLatency;
}
@VisibleForTesting
public MutableQuantiles getGetEntityTypesSuccessLatency() {
return getEntityTypesSuccessLatency;
}
@VisibleForTesting
public MutableQuantiles getGetEntityTypesFailureLatency() {
return getEntityTypesFailureLatency;
}
public void addGetEntitiesLatency(
long durationMs, boolean succeeded) {
if (succeeded) {
getEntitiesSuccessLatency.add(durationMs);
} else {
getEntitiesFailureLatency.add(durationMs);
}
}
public void addGetEntityTypesLatency(
long durationMs, boolean succeeded) {
if (succeeded) {
getEntityTypesSuccessLatency.add(durationMs);
} else {
getEntityTypesFailureLatency.add(durationMs);
}
}
}

View File

@ -1,28 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Package org.apache.hadoop.server.timelineservice contains classes to be used
* across timeline reader and collector.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
package org.apache.hadoop.yarn.server.timelineservice.metrics;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -52,7 +52,6 @@ import org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.metrics.TimelineReaderMetrics;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.apache.hadoop.yarn.webapp.BadRequestException;
@ -78,8 +77,6 @@ public class TimelineReaderWebServices {
private static final String QUERY_STRING_SEP = "?";
private static final String RANGE_DELIMITER = "-";
private static final String DATE_PATTERN = "yyyyMMdd";
private static final TimelineReaderMetrics METRICS =
TimelineReaderMetrics.getInstance();
@VisibleForTesting
static final ThreadLocal<DateFormat> DATE_FORMAT =
@ -326,7 +323,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<TimelineEntity> entities = null;
@ -347,19 +343,16 @@ public class TimelineReaderWebServices {
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
checkAccessForGenericEntities(entities, callerUGI, entityType);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime,
"createdTime start/end or limit or flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entities == null) {
entities = Collections.emptySet();
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entities;
}
@ -610,7 +603,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<TimelineEntity> entities = null;
@ -628,19 +620,16 @@ public class TimelineReaderWebServices {
fields, metricsLimit, metricsTimeStart, metricsTimeEnd));
checkAccessForGenericEntities(entities, callerUGI, entityType);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime,
"createdTime start/end or limit or flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entities == null) {
entities = Collections.emptySet();
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entities;
}
@ -708,7 +697,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
TimelineEntity entity = null;
@ -723,21 +711,18 @@ public class TimelineReaderWebServices {
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
checkAccessForGenericEntity(entity, callerUGI);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entity == null) {
LOG.info("Processed URL " + url + " but entity not found" + " (Took " +
(Time.monotonicNow() - startTime) + " ms.)");
(endTime - startTime) + " ms.)");
throw new NotFoundException("Timeline entity with uid: " + uId +
"is not found");
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entity;
}
@ -904,7 +889,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
TimelineEntity entity = null;
@ -917,21 +901,18 @@ public class TimelineReaderWebServices {
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
checkAccessForGenericEntity(entity, callerUGI);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entity == null) {
LOG.info("Processed URL " + url + " but entity not found" + " (Took " +
(Time.monotonicNow() - startTime) + " ms.)");
(endTime - startTime) + " ms.)");
throw new NotFoundException("Timeline entity {id: " + entityId +
", type: " + entityType + " } is not found");
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entity;
}
@ -974,7 +955,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
TimelineEntity entity = null;
@ -990,20 +970,17 @@ public class TimelineReaderWebServices {
entity = timelineReaderManager.getEntity(context,
TimelineReaderWebServicesUtils.createTimelineDataToRetrieve(
null, metricsToRetrieve, null, null, null, null));
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entity == null) {
LOG.info("Processed URL " + url + " but flowrun not found (Took " +
(Time.monotonicNow() - startTime) + " ms.)");
(endTime - startTime) + " ms.)");
throw new NotFoundException("Flowrun with uid: " + uId + "is not found");
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entity;
}
@ -1092,7 +1069,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
TimelineEntity entity = null;
@ -1107,23 +1083,20 @@ public class TimelineReaderWebServices {
TimelineReaderWebServicesUtils
.createTimelineDataToRetrieve(null, metricsToRetrieve, null, null,
null, null));
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entity == null) {
LOG.info("Processed URL " + url + " but flowrun not found (Took " +
(Time.monotonicNow() - startTime) + " ms.)");
(endTime - startTime) + " ms.)");
throw new NotFoundException("Flow run {flow name: " +
TimelineReaderWebServicesUtils.parseStr(flowName) + ", run id: " +
TimelineReaderWebServicesUtils.parseLongStr(flowRunId) +
" } is not found");
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entity;
}
@ -1188,7 +1161,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<TimelineEntity> entities = null;
@ -1207,19 +1179,16 @@ public class TimelineReaderWebServices {
null, null, null, fromId),
TimelineReaderWebServicesUtils.createTimelineDataToRetrieve(
null, metricsToRetrieve, fields, null, null, null));
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime,
"createdTime start/end or limit or fromId");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entities == null) {
entities = Collections.emptySet();
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entities;
}
@ -1347,7 +1316,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<TimelineEntity> entities = null;
@ -1367,19 +1335,16 @@ public class TimelineReaderWebServices {
TimelineReaderWebServicesUtils
.createTimelineDataToRetrieve(null, metricsToRetrieve, fields,
null, null, null));
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime,
"createdTime start/end or limit or fromId");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entities == null) {
entities = Collections.emptySet();
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entities;
}
@ -1490,7 +1455,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<TimelineEntity> entities = null;
@ -1506,21 +1470,18 @@ public class TimelineReaderWebServices {
TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null, null),
entityFilters, TimelineReaderWebServicesUtils.
createTimelineDataToRetrieve(null, null, null, null, null, null));
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "limit");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entities == null) {
entities = Collections.emptySet();
} else {
checkAccess(timelineReaderManager, callerUGI, entities,
FlowActivityEntity.USER_INFO_KEY, true);
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entities;
}
@ -1588,7 +1549,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
TimelineEntity entity = null;
@ -1604,20 +1564,17 @@ public class TimelineReaderWebServices {
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
checkAccessForAppEntity(entity, callerUGI);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entity == null) {
LOG.info("Processed URL " + url + " but app not found" + " (Took " +
(Time.monotonicNow() - startTime) + " ms.)");
(endTime - startTime) + " ms.)");
throw new NotFoundException("App with uid " + uId + " not found");
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entity;
}
@ -1766,7 +1723,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
TimelineEntity entity = null;
@ -1779,20 +1735,17 @@ public class TimelineReaderWebServices {
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
checkAccessForAppEntity(entity, callerUGI);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entity == null) {
LOG.info("Processed URL " + url + " but app not found" + " (Took " +
(Time.monotonicNow() - startTime) + " ms.)");
(endTime - startTime) + " ms.)");
throw new NotFoundException("App " + appId + " not found");
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entity;
}
@ -1903,7 +1856,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<TimelineEntity> entities = null;
@ -1924,19 +1876,16 @@ public class TimelineReaderWebServices {
TimelineReaderWebServicesUtils.createTimelineDataToRetrieve(
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime,
"createdTime start/end or limit or flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entities == null) {
entities = Collections.emptySet();
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entities;
}
@ -3313,7 +3262,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<String> results = null;
@ -3322,15 +3270,12 @@ public class TimelineReaderWebServices {
TimelineReaderWebServicesUtils.createTimelineReaderContext(
clusterId, userId, flowName, flowRunId, appId,
null, null, null));
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "flowrunid");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntityTypesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return results;
}
@ -3398,7 +3343,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user " +
TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<TimelineEntity> entities = null;
@ -3415,19 +3359,16 @@ public class TimelineReaderWebServices {
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
checkAccessForSubAppEntities(entities,callerUGI);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime,
"createdTime start/end or limit");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info("Processed URL " + url +
" (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entities == null) {
entities = Collections.emptySet();
}
LOG.info("Processed URL " + url +
" (Took " + (endTime - startTime) + " ms.)");
return entities;
}
@ -3473,7 +3414,6 @@ public class TimelineReaderWebServices {
LOG.info("Received URL " + url + " from user "
+ TimelineReaderWebServicesUtils.getUserName(callerUGI));
long startTime = Time.monotonicNow();
boolean succeeded = false;
init(res);
TimelineReaderManager timelineReaderManager = getTimelineReaderManager();
Set<TimelineEntity> entities = null;
@ -3487,19 +3427,15 @@ public class TimelineReaderWebServices {
confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
metricsTimeStart, metricsTimeEnd));
checkAccessForSubAppEntities(entities,callerUGI);
succeeded = true;
} catch (Exception e) {
handleException(e, url, startTime, "");
} finally {
long latency = Time.monotonicNow() - startTime;
METRICS.addGetEntitiesLatency(latency, succeeded);
LOG.info(
"Processed URL " + url + " (Took " + latency + " ms.)");
}
long endTime = Time.monotonicNow();
if (entities == null) {
entities = Collections.emptySet();
}
LOG.info(
"Processed URL " + url + " (Took " + (endTime - startTime) + " ms.)");
return entities;
}

View File

@ -1,56 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.collector;
import org.apache.hadoop.yarn.server.timelineservice.metrics.PerNodeAggTimelineCollectorMetrics;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Test PerNodeAggTimelineCollectorMetrics.
*/
public class TestPerNodeAggTimelineCollectorMetrics {
private PerNodeAggTimelineCollectorMetrics metrics;
@Test
public void testTimelineCollectorMetrics() {
Assert.assertNotNull(metrics);
Assert.assertEquals(10,
metrics.getPutEntitiesSuccessLatency().getInterval());
Assert.assertEquals(10,
metrics.getPutEntitiesFailureLatency().getInterval());
Assert.assertEquals(10,
metrics.getAsyncPutEntitiesSuccessLatency().getInterval());
Assert.assertEquals(10,
metrics.getAsyncPutEntitiesFailureLatency().getInterval());
}
@Before
public void setup() {
metrics = PerNodeAggTimelineCollectorMetrics.getInstance();
}
@After
public void tearDown() {
PerNodeAggTimelineCollectorMetrics.destroy();
}
}

View File

@ -1,56 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader;
import org.apache.hadoop.yarn.server.timelineservice.metrics.TimelineReaderMetrics;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Test TimelineReaderMetrics.
*/
public class TestTimelineReaderMetrics {
private TimelineReaderMetrics metrics;
@Test
public void testTimelineReaderMetrics() {
Assert.assertNotNull(metrics);
Assert.assertEquals(10,
metrics.getGetEntitiesSuccessLatency().getInterval());
Assert.assertEquals(10,
metrics.getGetEntitiesFailureLatency().getInterval());
Assert.assertEquals(10,
metrics.getGetEntityTypesSuccessLatency().getInterval());
Assert.assertEquals(10,
metrics.getGetEntityTypesFailureLatency().getInterval());
}
@Before
public void setup() {
metrics = TimelineReaderMetrics.getInstance();
}
@After
public void tearDown() {
TimelineReaderMetrics.destroy();
}
}