List of usage examples for org.apache.hadoop.metrics2 MetricsRecord context
String context();
From source file:com.chocolatefactory.newrelic.plugins.hadoop.NewRelicSink.java
@Override public void putMetrics(MetricsRecord record) { HashMap<String, Float> summaryMetrics = new HashMap<String, Float>(); // Create new one for each record Request request = new Request(context); for (MetricsTag tag : record.tags()) { if ((tag.value() == null) || tag.value().isEmpty()) continue; else if (useInsights) insightsMetrics.put(tag.name().toLowerCase(), tag.value()); }/*w ww. ja va 2s. co m*/ for (AbstractMetric metric : record.metrics()) { if ((metric.value() == null) || (metric.name() == null) || metric.name().isEmpty() || metric.value().toString().isEmpty()) { // NOT skipping "imax" and "imin" metrics, though they are constant and rather large // || metric.name().contains("_imin_") || metric.name().contains("_imax_")) { continue; } String metricName, metricType; String metricHashCode = record.context() + "_" + metric.name(); Float metricValue = metric.value().floatValue(); if (metricNames.containsKey(metricHashCode)) { metricName = metricNames.get(metricHashCode)[0]; metricType = metricNames.get(metricHashCode)[1]; } else { metricName = getMetricName(metric); metricType = getMetricType(metric); metricNames.put(metricHashCode, new String[] { metricName, metricType }); // Get groupings for new metrics only if (debugEnabled && getGroupings) { addMetricGroup(getMetricBaseName(record, categoryName), metricType); addMetricGroup(getMetricBaseName(record, categoryName + div + deltaName), metricType); } } if (useInsights) { insightsMetrics.put(metricName, metricValue); } // Debug // logger.info("metric name: " + metricName); // logger.info("metric type: " + metricType); // logger.info("metric value: " + metricValue); // logger.info("metric hashcode: " + metricHashCode); // If old metric value exists, use it to compute delta. If not, delta is metric value. // In any case, set oldValue to use for next delta. Float oldMetricValue = (float) 0; if (oldMetricValues.containsKey(metricHashCode)) { oldMetricValue = oldMetricValues.get(metricHashCode); // logger.info("metric OLD value: " + oldMetricValue); } Float deltaMetricValue = metricValue - oldMetricValue; // logger.info("delta value: " + deltaMetricValue); if (deltaMetricValue < 0.0) { // logger.info("delta is less than 0"); deltaMetricValue = (float) 0; } if (metricValue > 0) { oldMetricValues.put(metricHashCode + "", metricValue); // logger.info("putting value to OLD: " + metricValue); } addMetric(request, getMetricBaseName(record, categoryName) + div + metricName, metric.name(), metricType, metricValue); addMetric(request, getMetricBaseName(record, categoryName + div + deltaName) + div + metricName, metric.name(), metricType, deltaMetricValue); // If this is a metric to be included in summary metrics... include it! if (record.name().equalsIgnoreCase(hadoopProcType) && NewRelicMetrics.HadoopOverviewMetrics.contains(metricType)) { if (!summaryMetrics.containsKey(metricType)) { summaryMetrics.put(metricType, deltaMetricValue); // logger.info("putting NEW summary metric: " + deltaMetricValue); } else { Float newValue = summaryMetrics.get(metricType) + deltaMetricValue; summaryMetrics.put(metricType, newValue); // logger.info("putting UPDATED summary metric: " + newValue); } // Summary metrics are also included in the 2 top graphs in the "Overview" dashboard addMetric(request, getMetricBaseName(record, categoryName + div + overviewName) + div + metricName, metric.name(), metricType, metricValue); addMetric(request, getMetricBaseName(record, categoryName + div + overviewName + "_" + deltaName) + div + metricName, metric.name(), metricType, deltaMetricValue); } } // Get summary metrics, reset each one after output. if (!summaryMetrics.isEmpty()) { for (Entry<String, Float> summaryMetric : summaryMetrics.entrySet()) { addMetric(request, categoryName + div + overviewName + div + "total " + summaryMetric.getKey(), summaryMetric.getKey(), summaryMetric.getKey(), summaryMetric.getValue()); } } if (debugEnabled) { logger.info("Debug is enabled on New Relic Hadoop Extension. Metrics will not be sent."); if (getGroupings) { logger.info("Outputting metric groupings from the current Metrics Record."); for (Map.Entry<String, Integer> grouping : metricGroupings.entrySet()) { logger.info(grouping.getKey() + " : " + grouping.getValue()); } } } else { request.deliver(); if (useInsights) { insightsService.submitToInsights(hadoopProcType + "Event", insightsMetrics); } } }
From source file:com.chocolatefactory.newrelic.plugins.hadoop.NewRelicSink.java
public String getMetricBaseName(MetricsRecord record, String metricPrefix) { String metricBaseName = ""; if (!metricPrefix.isEmpty()) metricBaseName = metricPrefix + div + record.context(); else/* w w w. j a v a 2 s . c o m*/ metricBaseName = record.context(); if (!record.context().equalsIgnoreCase(record.name()) && !record.name().isEmpty()) metricBaseName = metricBaseName + div + record.name(); return metricBaseName; }