Example usage for org.apache.hadoop.metrics2 MetricsTag value

List of usage examples for org.apache.hadoop.metrics2 MetricsTag value

Introduction

In this page you can find the example usage for org.apache.hadoop.metrics2 MetricsTag value.

Prototype

String value

To view the source code for org.apache.hadoop.metrics2 MetricsTag value.

Click Source Link

Usage

From source file:com.chocolatefactory.newrelic.plugins.hadoop.NewRelicSink.java

@Override
public void putMetrics(MetricsRecord record) {
    HashMap<String, Float> summaryMetrics = new HashMap<String, Float>(); // Create new one for each record

    Request request = new Request(context);
    for (MetricsTag tag : record.tags()) {
        if ((tag.value() == null) || tag.value().isEmpty())
            continue;
        else if (useInsights)
            insightsMetrics.put(tag.name().toLowerCase(), tag.value());
    }//from   w w  w . j  av a  2 s  .c o  m

    for (AbstractMetric metric : record.metrics()) {

        if ((metric.value() == null) || (metric.name() == null) || metric.name().isEmpty()
                || metric.value().toString().isEmpty()) {
            // NOT skipping "imax" and "imin" metrics, though they are constant and rather large
            // || metric.name().contains("_imin_") || metric.name().contains("_imax_")) {
            continue;
        }

        String metricName, metricType;
        String metricHashCode = record.context() + "_" + metric.name();
        Float metricValue = metric.value().floatValue();
        if (metricNames.containsKey(metricHashCode)) {
            metricName = metricNames.get(metricHashCode)[0];
            metricType = metricNames.get(metricHashCode)[1];
        } else {
            metricName = getMetricName(metric);
            metricType = getMetricType(metric);
            metricNames.put(metricHashCode, new String[] { metricName, metricType });

            // Get groupings for new metrics only
            if (debugEnabled && getGroupings) {
                addMetricGroup(getMetricBaseName(record, categoryName), metricType);
                addMetricGroup(getMetricBaseName(record, categoryName + div + deltaName), metricType);
            }
        }

        if (useInsights) {
            insightsMetrics.put(metricName, metricValue);
        }

        // Debug
        // logger.info("metric name: " + metricName);
        // logger.info("metric type: " + metricType);
        // logger.info("metric value: " + metricValue);
        // logger.info("metric hashcode: " + metricHashCode);
        // If old metric value exists, use it to compute delta. If not, delta is metric value.
        // In any case, set oldValue to use for next delta.
        Float oldMetricValue = (float) 0;
        if (oldMetricValues.containsKey(metricHashCode)) {
            oldMetricValue = oldMetricValues.get(metricHashCode);
            // logger.info("metric OLD value: " + oldMetricValue);
        }

        Float deltaMetricValue = metricValue - oldMetricValue;
        // logger.info("delta value: " + deltaMetricValue);
        if (deltaMetricValue < 0.0) {
            // logger.info("delta is less than 0");
            deltaMetricValue = (float) 0;
        }

        if (metricValue > 0) {
            oldMetricValues.put(metricHashCode + "", metricValue);
            // logger.info("putting value to OLD: " + metricValue);
        }

        addMetric(request, getMetricBaseName(record, categoryName) + div + metricName, metric.name(),
                metricType, metricValue);
        addMetric(request, getMetricBaseName(record, categoryName + div + deltaName) + div + metricName,
                metric.name(), metricType, deltaMetricValue);

        // If this is a metric to be included in summary metrics... include it!
        if (record.name().equalsIgnoreCase(hadoopProcType)
                && NewRelicMetrics.HadoopOverviewMetrics.contains(metricType)) {
            if (!summaryMetrics.containsKey(metricType)) {
                summaryMetrics.put(metricType, deltaMetricValue);
                // logger.info("putting NEW summary metric: " + deltaMetricValue);
            } else {
                Float newValue = summaryMetrics.get(metricType) + deltaMetricValue;
                summaryMetrics.put(metricType, newValue);
                // logger.info("putting UPDATED summary metric: " + newValue);
            }

            // Summary metrics are also included in the 2 top graphs in the "Overview" dashboard
            addMetric(request, getMetricBaseName(record, categoryName + div + overviewName) + div + metricName,
                    metric.name(), metricType, metricValue);
            addMetric(request, getMetricBaseName(record, categoryName + div + overviewName + "_" + deltaName)
                    + div + metricName, metric.name(), metricType, deltaMetricValue);
        }
    }

    // Get summary metrics, reset each one after output.
    if (!summaryMetrics.isEmpty()) {
        for (Entry<String, Float> summaryMetric : summaryMetrics.entrySet()) {
            addMetric(request, categoryName + div + overviewName + div + "total " + summaryMetric.getKey(),
                    summaryMetric.getKey(), summaryMetric.getKey(), summaryMetric.getValue());
        }
    }

    if (debugEnabled) {
        logger.info("Debug is enabled on New Relic Hadoop Extension. Metrics will not be sent.");
        if (getGroupings) {
            logger.info("Outputting metric groupings from the current Metrics Record.");
            for (Map.Entry<String, Integer> grouping : metricGroupings.entrySet()) {
                logger.info(grouping.getKey() + " : " + grouping.getValue());
            }
        }
    } else {
        request.deliver();
        if (useInsights) {
            insightsService.submitToInsights(hadoopProcType + "Event", insightsMetrics);
        }
    }
}

From source file:io.warp10.sensision.hadoop.SensisionSink.java

License:Apache License

@Override
public void putMetrics(MetricsRecord record) {
    Map<String, String> labels = new HashMap<String, String>();
    for (MetricsTag tag : record.tags()) {
        if (null != tag.value()) {
            labels.put(tag.name(), tag.value());
        }//www.  j  a  va 2  s  .  co  m
    }

    StringBuilder sb = new StringBuilder();

    for (AbstractMetric metric : record.metrics()) {
        sb.setLength(0);
        sb.append(this.prefix);
        sb.append(metric.name());
        String cls = sb.toString();
        Sensision.set(cls, labels, record.timestamp() * Sensision.TIME_UNITS_PER_MS, null, null, null,
                metric.value(), ttl);
    }
}

From source file:org.apache.phoenix.trace.PhoenixMetricsSink.java

License:Apache License

/**
 * Add a new metric record to be written.
 *
 * @param record// w  w w .  j a  v a2 s .co  m
 */
@Override
public void putMetrics(MetricsRecord record) {
    // its not a tracing record, we are done. This could also be handled by filters, but safer
    // to do it here, in case it gets misconfigured
    if (!record.name().startsWith(TracingUtils.METRIC_SOURCE_KEY)) {
        return;
    }

    // don't initialize until we actually have something to write
    lazyInitialize();

    String stmt = "UPSERT INTO " + table + " (";
    // drop it into the queue of things that should be written
    List<String> keys = new ArrayList<String>();
    List<Object> values = new ArrayList<Object>();
    // we need to keep variable values in a separate set since they may have spaces, which
    // causes the parser to barf. Instead, we need to add them after the statement is prepared
    List<String> variableValues = new ArrayList<String>(record.tags().size());
    keys.add(TRACE.columnName);
    values.add(Long.parseLong(record.name().substring(TracingUtils.METRIC_SOURCE_KEY.length())));

    keys.add(DESCRIPTION.columnName);
    values.add(VARIABLE_VALUE);
    variableValues.add(record.description());

    // add each of the metrics
    for (AbstractMetric metric : record.metrics()) {
        // name of the metric is also the column name to which we write
        keys.add(MetricInfo.getColumnName(metric.name()));
        values.add(metric.value());
    }

    // get the tags out so we can set them later (otherwise, need to be a single value)
    int annotationCount = 0;
    int tagCount = 0;
    for (MetricsTag tag : record.tags()) {
        if (tag.name().equals(ANNOTATION.traceName)) {
            addDynamicEntry(keys, values, variableValues, ANNOTATION_FAMILY, tag, ANNOTATION, annotationCount);
            annotationCount++;
        } else if (tag.name().equals(TAG.traceName)) {
            addDynamicEntry(keys, values, variableValues, TAG_FAMILY, tag, TAG, tagCount);
            tagCount++;
        } else if (tag.name().equals(HOSTNAME.traceName)) {
            keys.add(HOSTNAME.columnName);
            values.add(VARIABLE_VALUE);
            variableValues.add(tag.value());
        } else if (tag.name().equals("Context")) {
            // ignored
        } else {
            LOG.error("Got an unexpected tag: " + tag);
        }
    }

    // add the tag count, now that we know it
    keys.add(TAG_COUNT);
    // ignore the hostname in the tags, if we know it
    values.add(tagCount);

    keys.add(ANNOTATION_COUNT);
    values.add(annotationCount);

    // compile the statement together
    stmt += COMMAS.join(keys);
    stmt += ") VALUES (" + COMMAS.join(values) + ")";

    if (LOG.isTraceEnabled()) {
        LOG.trace("Logging metrics to phoenix table via: " + stmt);
        LOG.trace("With tags: " + variableValues);
    }
    try {
        PreparedStatement ps = conn.prepareStatement(stmt);
        // add everything that wouldn't/may not parse
        int index = 1;
        for (String tag : variableValues) {
            ps.setString(index++, tag);
        }
        // Not going through the standard route of using statement.execute() as that code path
        // is blocked if the metadata hasn't been been upgraded to the new minor release. 
        MutationPlan plan = ps.unwrap(PhoenixPreparedStatement.class).compileMutation(stmt);
        MutationState state = conn.unwrap(PhoenixConnection.class).getMutationState();
        MutationState newState = plan.execute();
        state.join(newState);
    } catch (SQLException e) {
        LOG.error("Could not write metric: \n" + record + " to prepared statement:\n" + stmt, e);
    }
}

From source file:org.apache.phoenix.trace.PhoenixMetricsSink.java

License:Apache License

private void addDynamicEntry(List<String> keys, List<Object> values, List<String> variableValues, String family,
        MetricsTag tag, MetricInfo metric, int count) {
    // <family><.dynColumn><count> <VARCHAR>
    keys.add(getDynamicColumnName(family, metric.columnName, count) + " VARCHAR");

    // build the annotation value
    String val = tag.description() + " - " + tag.value();
    values.add(VARIABLE_VALUE);/*  w ww  . j  a  v  a 2s .  c  o  m*/
    variableValues.add(val);
}

From source file:org.apache.phoenix.trace.PhoenixMetricsWriterTest.java

License:Apache License

@Test
public void testTranslation() throws Exception {
    // hook up a sink we can test
    MetricsWriter mockSink = Mockito.mock(MetricsWriter.class);

    // writer that will translate to the sink (specific to hadoop version used)
    PhoenixMetricsSink writer = new PhoenixMetricsSink();
    writer.setWriterForTesting(mockSink);

    // create a simple metrics record
    final long traceid = 987654;
    MetricsInfo info = new ExposedMetricsInfoImpl(TracingCompat.getTraceMetricName(traceid),
            "Some generic trace");
    // setup some metrics for the span
    long spanid = 10;
    AbstractMetric span = new ExposedMetricCounterLong(
            new ExposedMetricsInfoImpl(MetricInfo.SPAN.traceName, ""), spanid);
    long parentid = 11;
    AbstractMetric parent = new ExposedMetricCounterLong(
            new ExposedMetricsInfoImpl(MetricInfo.PARENT.traceName, ""), parentid);
    long startTime = 12;
    AbstractMetric start = new ExposedMetricCounterLong(
            new ExposedMetricsInfoImpl(MetricInfo.START.traceName, ""), startTime);
    long endTime = 13;
    AbstractMetric end = new ExposedMetricCounterLong(new ExposedMetricsInfoImpl(MetricInfo.END.traceName, ""),
            endTime);/*from   ww w  . ja v  a  2 s . c  om*/
    final List<AbstractMetric> metrics = Lists.newArrayList(span, parent, start, end);

    // create an annotation as well
    String annotation = "test annotation for a span";
    MetricsTag tag = new MetricsTag(new ExposedMetricsInfoImpl(MetricInfo.ANNOTATION.traceName, "0"),
            annotation);
    String hostnameValue = "host-name.value";
    MetricsTag hostname = new MetricsTag(new ExposedMetricsInfoImpl(MetricInfo.HOSTNAME.traceName, ""),
            hostnameValue);
    final List<MetricsTag> tags = Lists.newArrayList(hostname, tag);

    MetricsRecord record = new ExposedMetricsRecordImpl(info, System.currentTimeMillis(), tags, metrics);

    // setup the mocking/validation for the sink
    Mockito.doAnswer(new Answer<Void>() {

        @Override
        public Void answer(InvocationOnMock invocation) throws Throwable {
            PhoenixMetricsRecord record = (PhoenixMetricsRecord) invocation.getArguments()[0];
            //validate that we got the right fields in the record
            assertEquals("phoenix.987654", record.name());
            assertEquals("Some generic trace", record.description());
            int count = 0;
            for (PhoenixAbstractMetric metric : record.metrics()) {
                count++;
                //find the matching metric in the list
                boolean found = false;
                for (AbstractMetric expected : metrics) {
                    if (expected.name().equals(metric.getName())) {
                        found = true;
                        // make sure the rest of the info matches
                        assertEquals("Metric value mismatch", expected.value(), metric.value());
                    }
                }
                assertTrue("Didn't find an expected metric to match " + metric, found);
            }
            assertEquals("Number of metrics is received is wrong", metrics.size(), count);

            count = 0;
            for (PhoenixMetricTag tag : record.tags()) {
                count++;
                // find the matching metric in the list
                boolean found = false;
                for (MetricsTag expected : tags) {
                    if (expected.name().equals(tag.name())) {
                        found = true;
                        // make sure the rest of the info matches
                        assertEquals("Tag value mismatch", expected.value(), tag.value());
                        assertEquals("Tag description mismatch", expected.description(), tag.description());
                    }
                }
                assertTrue("Didn't find an expected metric to match " + tag, found);
            }
            assertEquals("Number of tags is received is wrong", tags.size(), count);
            return null;
        }

    }).when(mockSink).addMetrics(Mockito.any(PhoenixMetricsRecord.class));

    // actually do the update
    writer.putMetrics(record);
    writer.flush();

    Mockito.verify(mockSink).addMetrics(Mockito.any(PhoenixMetricsRecord.class));
    Mockito.verify(mockSink).flush();
}

From source file:org.apache.phoenix.trace.PhoenixTraceReaderIT.java

License:Apache License

/**
 * @param records/*from w  ww  .  j  a  v a  2  s  .com*/
 * @param trace
 */
private void validateTrace(List<MetricsRecord> records, TraceHolder trace) {
    // drop each span into a sorted list so we get the expected ordering
    Iterator<SpanInfo> spanIter = trace.spans.iterator();
    for (MetricsRecord record : records) {
        SpanInfo spanInfo = spanIter.next();
        LOG.info("Checking span:\n" + spanInfo);
        Iterator<AbstractMetric> metricIter = record.metrics().iterator();
        assertEquals("Got an unexpected span id", metricIter.next().value(), spanInfo.id);
        long parentId = (Long) metricIter.next().value();
        if (parentId == Span.ROOT_SPAN_ID) {
            assertNull("Got a parent, but it was a root span!", spanInfo.parent);
        } else {
            assertEquals("Got an unexpected parent span id", parentId, spanInfo.parent.id);
        }
        assertEquals("Got an unexpected start time", metricIter.next().value(), spanInfo.start);
        assertEquals("Got an unexpected end time", metricIter.next().value(), spanInfo.end);

        Iterator<MetricsTag> tags = record.tags().iterator();

        int annotationCount = 0;
        while (tags.hasNext()) {
            // hostname is a tag, so we differentiate it
            MetricsTag tag = tags.next();
            if (tag.name().equals(MetricInfo.HOSTNAME.traceName)) {
                assertEquals("Didn't store correct hostname value", tag.value(), spanInfo.hostname);
            } else {
                int count = annotationCount++;
                assertEquals("Didn't get expected annotation", count + " - " + tag.value(),
                        spanInfo.annotations.get(count));
            }
        }
        assertEquals("Didn't get expected number of annotations", annotationCount, spanInfo.annotationCount);
    }
}