diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java index b33a0f0352..5111da899a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java @@ -41,6 +41,7 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.timelineservice.ApplicationAttemptEntity; import org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity; @@ -54,6 +55,7 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; import org.apache.hadoop.yarn.api.records.timelineservice.UserEntity; +import org.apache.hadoop.yarn.server.timelineservice.metrics.PerNodeAggTimelineCollectorMetrics; import org.apache.hadoop.yarn.webapp.ForbiddenException; import org.apache.hadoop.yarn.webapp.NotFoundException; @@ -78,6 +80,8 @@ public class TimelineCollectorWebService { LoggerFactory.getLogger(TimelineCollectorWebService.class); private @Context ServletContext context; + private static final PerNodeAggTimelineCollectorMetrics METRICS = + PerNodeAggTimelineCollectorMetrics.getInstance(); /** * Gives information about timeline collector. @@ -152,12 +156,15 @@ public Response putEntities( TimelineEntities entities) { init(res); UserGroupInformation callerUgi = getUser(req); + boolean isAsync = async != null && async.trim().equalsIgnoreCase("true"); if (callerUgi == null) { String msg = "The owner of the posted timeline entities is not set"; LOG.error(msg); throw new ForbiddenException(msg); } + long startTime = Time.monotonicNow(); + boolean succeeded = false; try { ApplicationId appID = parseApplicationId(appId); if (appID == null) { @@ -172,7 +179,6 @@ public Response putEntities( throw new NotFoundException("Application: "+ appId + " is not found"); } - boolean isAsync = async != null && async.trim().equalsIgnoreCase("true"); if (isAsync) { collector.putEntitiesAsync(processTimelineEntities(entities, appId, Boolean.valueOf(isSubAppEntities)), callerUgi); @@ -181,6 +187,7 @@ public Response putEntities( Boolean.valueOf(isSubAppEntities)), callerUgi); } + succeeded = true; return Response.ok().build(); } catch (NotFoundException | ForbiddenException e) { throw new WebApplicationException(e, @@ -189,6 +196,13 @@ public Response putEntities( LOG.error("Error putting entities", e); throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } finally { + long latency = Time.monotonicNow() - startTime; + if (isAsync) { + METRICS.addAsyncPutEntitiesLatency(latency, succeeded); + } else { + METRICS.addPutEntitiesLatency(latency, succeeded); + } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/PerNodeAggTimelineCollectorMetrics.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/PerNodeAggTimelineCollectorMetrics.java new file mode 100644 index 0000000000..0da925835a --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/PerNodeAggTimelineCollectorMetrics.java @@ -0,0 +1,117 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.metrics; + +import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.metrics2.MetricsInfo; +import org.apache.hadoop.metrics2.annotation.Metric; +import org.apache.hadoop.metrics2.annotation.Metrics; +import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.apache.hadoop.metrics2.lib.MutableQuantiles; + +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.apache.hadoop.metrics2.lib.Interns.info; + +/** + * Metrics class for TimelineCollectorWebService + * running on each NM. + */ +@Metrics(about = "Aggregated metrics of TimelineCollector's running on each NM", + context = "timelineservice") +final public class PerNodeAggTimelineCollectorMetrics { + + private static final MetricsInfo METRICS_INFO = + info("PerNodeAggTimelineCollectorMetrics", + "Aggregated Metrics for TimelineCollector's running on each NM"); + private static AtomicBoolean isInitialized = new AtomicBoolean(false); + private static PerNodeAggTimelineCollectorMetrics + instance = null; + + @Metric(about = "PUT entities failure latency", valueName = "latency") + private MutableQuantiles putEntitiesFailureLatency; + @Metric(about = "PUT entities success latency", valueName = "latency") + private MutableQuantiles putEntitiesSuccessLatency; + + @Metric(about = "async PUT entities failure latency", valueName = "latency") + private MutableQuantiles asyncPutEntitiesFailureLatency; + @Metric(about = "async PUT entities success latency", valueName = "latency") + private MutableQuantiles asyncPutEntitiesSuccessLatency; + + private PerNodeAggTimelineCollectorMetrics() { + } + + public static PerNodeAggTimelineCollectorMetrics getInstance() { + if (!isInitialized.get()) { + synchronized (PerNodeAggTimelineCollectorMetrics.class) { + if (instance == null) { + instance = + DefaultMetricsSystem.initialize("TimelineService").register( + METRICS_INFO.name(), METRICS_INFO.description(), + new PerNodeAggTimelineCollectorMetrics()); + isInitialized.set(true); + } + } + } + return instance; + } + + public synchronized static void destroy() { + isInitialized.set(false); + instance = null; + } + + @VisibleForTesting + public MutableQuantiles getPutEntitiesSuccessLatency() { + return putEntitiesSuccessLatency; + } + + @VisibleForTesting + public MutableQuantiles getPutEntitiesFailureLatency() { + return putEntitiesFailureLatency; + } + + @VisibleForTesting + public MutableQuantiles getAsyncPutEntitiesSuccessLatency() { + return asyncPutEntitiesSuccessLatency; + } + + @VisibleForTesting + public MutableQuantiles getAsyncPutEntitiesFailureLatency() { + return asyncPutEntitiesFailureLatency; + } + + public void addPutEntitiesLatency( + long durationMs, boolean succeeded) { + if (succeeded) { + putEntitiesSuccessLatency.add(durationMs); + } else { + putEntitiesFailureLatency.add(durationMs); + } + } + + public void addAsyncPutEntitiesLatency( + long durationMs, boolean succeeded) { + if (succeeded) { + asyncPutEntitiesSuccessLatency.add(durationMs); + } else { + asyncPutEntitiesFailureLatency.add(durationMs); + } + } +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/TimelineReaderMetrics.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/TimelineReaderMetrics.java new file mode 100644 index 0000000000..3131748d99 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/TimelineReaderMetrics.java @@ -0,0 +1,113 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.metrics; + +import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.metrics2.MetricsInfo; +import org.apache.hadoop.metrics2.annotation.Metric; +import org.apache.hadoop.metrics2.annotation.Metrics; +import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.apache.hadoop.metrics2.lib.MutableQuantiles; + +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.apache.hadoop.metrics2.lib.Interns.info; + +/** + * Metrics class for TimelineReader. + */ +@Metrics(about = "Metrics for timeline reader", context = "timelineservice") +final public class TimelineReaderMetrics { + + private final static MetricsInfo METRICS_INFO = info("TimelineReaderMetrics", + "Metrics for TimelineReader"); + private static AtomicBoolean isInitialized = new AtomicBoolean(false); + private static TimelineReaderMetrics instance = null; + + @Metric(about = "GET entities failure latency", valueName = "latency") + private MutableQuantiles getEntitiesFailureLatency; + @Metric(about = "GET entities success latency", valueName = "latency") + private MutableQuantiles getEntitiesSuccessLatency; + + @Metric(about = "GET entity types failure latency", valueName = "latency") + private MutableQuantiles getEntityTypesFailureLatency; + @Metric(about = "GET entity types success latency", valueName = "latency") + private MutableQuantiles getEntityTypesSuccessLatency; + + private TimelineReaderMetrics() { + } + + public static TimelineReaderMetrics getInstance() { + if (!isInitialized.get()) { + synchronized (TimelineReaderMetrics.class) { + if (instance == null) { + instance = + DefaultMetricsSystem.initialize("TimelineService").register( + METRICS_INFO.name(), METRICS_INFO.description(), + new TimelineReaderMetrics()); + isInitialized.set(true); + } + } + } + return instance; + } + + public synchronized static void destroy() { + isInitialized.set(false); + instance = null; + } + + @VisibleForTesting + public MutableQuantiles getGetEntitiesSuccessLatency() { + return getEntitiesSuccessLatency; + } + + @VisibleForTesting + public MutableQuantiles getGetEntitiesFailureLatency() { + return getEntitiesFailureLatency; + } + + @VisibleForTesting + public MutableQuantiles getGetEntityTypesSuccessLatency() { + return getEntityTypesSuccessLatency; + } + + @VisibleForTesting + public MutableQuantiles getGetEntityTypesFailureLatency() { + return getEntityTypesFailureLatency; + } + + public void addGetEntitiesLatency( + long durationMs, boolean succeeded) { + if (succeeded) { + getEntitiesSuccessLatency.add(durationMs); + } else { + getEntitiesFailureLatency.add(durationMs); + } + } + + public void addGetEntityTypesLatency( + long durationMs, boolean succeeded) { + if (succeeded) { + getEntityTypesSuccessLatency.add(durationMs); + } else { + getEntityTypesFailureLatency.add(durationMs); + } + } +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/package-info.java new file mode 100644 index 0000000000..4cf201cfb3 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/metrics/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.server.timelineservice contains classes to be used + * across timeline reader and collector. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +package org.apache.hadoop.yarn.server.timelineservice.metrics; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java index 3a4ea2e99e..db48355920 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java @@ -52,6 +52,7 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.timelineservice.metrics.TimelineReaderMetrics; import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import org.apache.hadoop.yarn.webapp.BadRequestException; @@ -77,6 +78,8 @@ public class TimelineReaderWebServices { private static final String QUERY_STRING_SEP = "?"; private static final String RANGE_DELIMITER = "-"; private static final String DATE_PATTERN = "yyyyMMdd"; + private static final TimelineReaderMetrics METRICS = + TimelineReaderMetrics.getInstance(); @VisibleForTesting static final ThreadLocal DATE_FORMAT = @@ -323,6 +326,7 @@ public Set getEntities( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set entities = null; @@ -343,16 +347,19 @@ public Set getEntities( confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); checkAccessForGenericEntities(entities, callerUGI, entityType); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "createdTime start/end or limit or flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entities == null) { entities = Collections.emptySet(); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entities; } @@ -603,6 +610,7 @@ public Set getEntities( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set entities = null; @@ -620,16 +628,19 @@ public Set getEntities( fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); checkAccessForGenericEntities(entities, callerUGI, entityType); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "createdTime start/end or limit or flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entities == null) { entities = Collections.emptySet(); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entities; } @@ -697,6 +708,7 @@ public TimelineEntity getEntity( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); TimelineEntity entity = null; @@ -711,18 +723,21 @@ public TimelineEntity getEntity( confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); checkAccessForGenericEntity(entity, callerUGI); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entity == null) { LOG.info("Processed URL " + url + " but entity not found" + " (Took " + - (endTime - startTime) + " ms.)"); + (Time.monotonicNow() - startTime) + " ms.)"); throw new NotFoundException("Timeline entity with uid: " + uId + "is not found"); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entity; } @@ -889,6 +904,7 @@ public TimelineEntity getEntity( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); TimelineEntity entity = null; @@ -901,18 +917,21 @@ public TimelineEntity getEntity( confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); checkAccessForGenericEntity(entity, callerUGI); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entity == null) { LOG.info("Processed URL " + url + " but entity not found" + " (Took " + - (endTime - startTime) + " ms.)"); + (Time.monotonicNow() - startTime) + " ms.)"); throw new NotFoundException("Timeline entity {id: " + entityId + ", type: " + entityType + " } is not found"); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entity; } @@ -955,6 +974,7 @@ public TimelineEntity getFlowRun( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); TimelineEntity entity = null; @@ -970,17 +990,20 @@ public TimelineEntity getFlowRun( entity = timelineReaderManager.getEntity(context, TimelineReaderWebServicesUtils.createTimelineDataToRetrieve( null, metricsToRetrieve, null, null, null, null)); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entity == null) { LOG.info("Processed URL " + url + " but flowrun not found (Took " + - (endTime - startTime) + " ms.)"); + (Time.monotonicNow() - startTime) + " ms.)"); throw new NotFoundException("Flowrun with uid: " + uId + "is not found"); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entity; } @@ -1069,6 +1092,7 @@ public TimelineEntity getFlowRun( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); TimelineEntity entity = null; @@ -1083,20 +1107,23 @@ public TimelineEntity getFlowRun( TimelineReaderWebServicesUtils .createTimelineDataToRetrieve(null, metricsToRetrieve, null, null, null, null)); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entity == null) { LOG.info("Processed URL " + url + " but flowrun not found (Took " + - (endTime - startTime) + " ms.)"); + (Time.monotonicNow() - startTime) + " ms.)"); throw new NotFoundException("Flow run {flow name: " + TimelineReaderWebServicesUtils.parseStr(flowName) + ", run id: " + TimelineReaderWebServicesUtils.parseLongStr(flowRunId) + " } is not found"); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entity; } @@ -1161,6 +1188,7 @@ public Set getFlowRuns( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set entities = null; @@ -1179,16 +1207,19 @@ public Set getFlowRuns( null, null, null, fromId), TimelineReaderWebServicesUtils.createTimelineDataToRetrieve( null, metricsToRetrieve, fields, null, null, null)); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "createdTime start/end or limit or fromId"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entities == null) { entities = Collections.emptySet(); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entities; } @@ -1316,6 +1347,7 @@ public Set getFlowRuns( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set entities = null; @@ -1335,16 +1367,19 @@ public Set getFlowRuns( TimelineReaderWebServicesUtils .createTimelineDataToRetrieve(null, metricsToRetrieve, fields, null, null, null)); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "createdTime start/end or limit or fromId"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entities == null) { entities = Collections.emptySet(); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entities; } @@ -1455,6 +1490,7 @@ public Set getFlows( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set entities = null; @@ -1470,18 +1506,21 @@ public Set getFlows( TimelineEntityType.YARN_FLOW_ACTIVITY.toString(), null, null), entityFilters, TimelineReaderWebServicesUtils. createTimelineDataToRetrieve(null, null, null, null, null, null)); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "limit"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entities == null) { entities = Collections.emptySet(); } else { checkAccess(timelineReaderManager, callerUGI, entities, FlowActivityEntity.USER_INFO_KEY, true); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entities; } @@ -1549,6 +1588,7 @@ public TimelineEntity getApp( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); TimelineEntity entity = null; @@ -1564,17 +1604,20 @@ public TimelineEntity getApp( confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); checkAccessForAppEntity(entity, callerUGI); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entity == null) { LOG.info("Processed URL " + url + " but app not found" + " (Took " + - (endTime - startTime) + " ms.)"); + (Time.monotonicNow() - startTime) + " ms.)"); throw new NotFoundException("App with uid " + uId + " not found"); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entity; } @@ -1723,6 +1766,7 @@ public TimelineEntity getApp( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); TimelineEntity entity = null; @@ -1735,17 +1779,20 @@ public TimelineEntity getApp( confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); checkAccessForAppEntity(entity, callerUGI); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entity == null) { LOG.info("Processed URL " + url + " but app not found" + " (Took " + - (endTime - startTime) + " ms.)"); + (Time.monotonicNow() - startTime) + " ms.)"); throw new NotFoundException("App " + appId + " not found"); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entity; } @@ -1856,6 +1903,7 @@ public Set getFlowRunApps( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set entities = null; @@ -1876,16 +1924,19 @@ public Set getFlowRunApps( TimelineReaderWebServicesUtils.createTimelineDataToRetrieve( confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "createdTime start/end or limit or flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entities == null) { entities = Collections.emptySet(); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entities; } @@ -3262,6 +3313,7 @@ public Set getEntityTypes( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set results = null; @@ -3270,12 +3322,15 @@ public Set getEntityTypes( TimelineReaderWebServicesUtils.createTimelineReaderContext( clusterId, userId, flowName, flowRunId, appId, null, null, null)); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "flowrunid"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntityTypesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return results; } @@ -3343,6 +3398,7 @@ public Set getSubAppEntities( LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set entities = null; @@ -3359,16 +3415,19 @@ public Set getSubAppEntities( confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); checkAccessForSubAppEntities(entities,callerUGI); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, "createdTime start/end or limit"); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info("Processed URL " + url + + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entities == null) { entities = Collections.emptySet(); } - LOG.info("Processed URL " + url + - " (Took " + (endTime - startTime) + " ms.)"); return entities; } @@ -3414,6 +3473,7 @@ public Set getSubAppEntities(@Context HttpServletRequest req, LOG.info("Received URL " + url + " from user " + TimelineReaderWebServicesUtils.getUserName(callerUGI)); long startTime = Time.monotonicNow(); + boolean succeeded = false; init(res); TimelineReaderManager timelineReaderManager = getTimelineReaderManager(); Set entities = null; @@ -3427,15 +3487,19 @@ public Set getSubAppEntities(@Context HttpServletRequest req, confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd)); checkAccessForSubAppEntities(entities,callerUGI); + succeeded = true; } catch (Exception e) { handleException(e, url, startTime, ""); + } finally { + long latency = Time.monotonicNow() - startTime; + METRICS.addGetEntitiesLatency(latency, succeeded); + LOG.info( + "Processed URL " + url + " (Took " + latency + " ms.)"); } - long endTime = Time.monotonicNow(); if (entities == null) { entities = Collections.emptySet(); } - LOG.info( - "Processed URL " + url + " (Took " + (endTime - startTime) + " ms.)"); + return entities; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeAggTimelineCollectorMetrics.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeAggTimelineCollectorMetrics.java new file mode 100644 index 0000000000..c9ff303763 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeAggTimelineCollectorMetrics.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.collector; + +import org.apache.hadoop.yarn.server.timelineservice.metrics.PerNodeAggTimelineCollectorMetrics; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + * Test PerNodeAggTimelineCollectorMetrics. + */ +public class TestPerNodeAggTimelineCollectorMetrics { + + private PerNodeAggTimelineCollectorMetrics metrics; + + @Test + public void testTimelineCollectorMetrics() { + Assert.assertNotNull(metrics); + Assert.assertEquals(10, + metrics.getPutEntitiesSuccessLatency().getInterval()); + Assert.assertEquals(10, + metrics.getPutEntitiesFailureLatency().getInterval()); + Assert.assertEquals(10, + metrics.getAsyncPutEntitiesSuccessLatency().getInterval()); + Assert.assertEquals(10, + metrics.getAsyncPutEntitiesFailureLatency().getInterval()); + } + + @Before + public void setup() { + metrics = PerNodeAggTimelineCollectorMetrics.getInstance(); + } + + @After + public void tearDown() { + PerNodeAggTimelineCollectorMetrics.destroy(); + } +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderMetrics.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderMetrics.java new file mode 100644 index 0000000000..fa74689d9b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderMetrics.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.reader; + +import org.apache.hadoop.yarn.server.timelineservice.metrics.TimelineReaderMetrics; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + * Test TimelineReaderMetrics. + */ +public class TestTimelineReaderMetrics { + + private TimelineReaderMetrics metrics; + + @Test + public void testTimelineReaderMetrics() { + Assert.assertNotNull(metrics); + Assert.assertEquals(10, + metrics.getGetEntitiesSuccessLatency().getInterval()); + Assert.assertEquals(10, + metrics.getGetEntitiesFailureLatency().getInterval()); + Assert.assertEquals(10, + metrics.getGetEntityTypesSuccessLatency().getInterval()); + Assert.assertEquals(10, + metrics.getGetEntityTypesFailureLatency().getInterval()); + } + + @Before + public void setup() { + metrics = TimelineReaderMetrics.getInstance(); + } + + @After + public void tearDown() { + TimelineReaderMetrics.destroy(); + } +} \ No newline at end of file