Example usage for com.amazonaws.services.cloudwatch.model MetricDatum MetricDatum

List of usage examples for com.amazonaws.services.cloudwatch.model MetricDatum MetricDatum

Introduction

In this page you can find the example usage for com.amazonaws.services.cloudwatch.model MetricDatum MetricDatum.

Prototype

MetricDatum

Source Link

Usage

From source file:aws.example.cloudwatch.PutMetricData.java

License:Open Source License

public static void main(String[] args) {

    final String USAGE = "To run this example, supply a data point:\n" + "Ex: PutMetricData <data_point>\n";

    if (args.length != 1) {
        System.out.println(USAGE);
        System.exit(1);// w w  w .  j  a va 2s .com
    }

    Double data_point = Double.parseDouble(args[0]);

    final AmazonCloudWatch cw = AmazonCloudWatchClientBuilder.defaultClient();

    Dimension dimension = new Dimension().withName("UNIQUE_PAGES").withValue("URLS");

    MetricDatum datum = new MetricDatum().withMetricName("PAGES_VISITED").withUnit(StandardUnit.None)
            .withValue(data_point).withDimensions(dimension);

    PutMetricDataRequest request = new PutMetricDataRequest().withNamespace("SITE/TRAFFIC")
            .withMetricData(datum);

    PutMetricDataResult response = cw.putMetricData(request);

    System.out.printf("Successfully put data point %f", data_point);
}

From source file:be.dataminded.nifi.plugins.PutCloudWatchCountMetricAndAlarm.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();/*from  w  ww .j a v a  2  s  .c  o  m*/
    if (flowFile == null) {
        return;
    }

    long totalTableCount = 0;
    long sumCount = 0;
    String tableName = "";
    String schemaName = "";
    String source = "";
    String tenantName = "";

    try (InputStream inputStream = session.read(flowFile)) {

        StringWriter writer = new StringWriter();
        IOUtils.copy(inputStream, writer, "UTF-8");
        String flowFileContent = writer.toString();

        // The MergeContent controller will be configured to append the JSON content with commas
        // We have to surround this list with square brackets to become a valid JSON Array
        String jsonContent = "[" + flowFileContent + "]";

        JSONArray jsonArray = new JSONArray(jsonContent);

        Iterator iterator = jsonArray.iterator();

        ArrayList<Long> counts = new ArrayList<>();

        while (iterator.hasNext()) {
            JSONObject o = (JSONObject) iterator.next();
            counts.add(o.getLong(context.getProperty(NAME_ELEMENT_TO_SUM).getValue()));
        }
        sumCount = counts.stream().mapToLong(Long::longValue).sum();

        JSONObject firstElement = (JSONObject) jsonArray.get(0);
        totalTableCount = firstElement.getLong(context.getProperty(NAME_ELEMENT_TOTAL_COUNT).getValue());
        tableName = firstElement.getString(TABLE_NAME);
        schemaName = firstElement.getString(SCHEMA_NAME);
        source = firstElement.getString(SOURCE_NAME);
        tenantName = firstElement.getString(TENANT_NAME);

    } catch (IOException e) {
        logger.error("Something went wrong when trying to read the flowFile body: " + e.getMessage());
    } catch (org.json.JSONException e) {
        logger.error("Something when trying to parse the JSON body of the flowFile: " + e.getMessage());
    } catch (Exception e) {
        logger.error("something else went wrong in body processing of this FlowFile: " + e.getMessage());
        session.transfer(flowFile, REL_FAILURE);
    }

    try {

        String environment = context.getProperty(ENVIRONMENT).getValue();
        String alarmPrefix = context.getProperty(NAME_PREFIX_ALARM).getValue();

        Map<String, Long> metrics = new HashMap<>();
        // first metric: this is the total count of the records that were exported
        metrics.put("COUNT_", sumCount);
        // second metric: this is the difference between the records exported
        // and the total amount of records counted in the DB, should always be 0 !!!
        // we take a margin into account because we can't be sure there won't be any deletes
        // between counting and executing the queries
        long diff = Math.abs(totalTableCount - sumCount);
        double diffProcent = Math.round((diff / totalTableCount) * 1000);
        metrics.put("DIFF_", (long) diffProcent);

        ArrayList<Dimension> dimensions = new ArrayList<>();
        dimensions.add(new Dimension().withName("tableName").withValue(tableName));
        dimensions.add(new Dimension().withName("tenantName").withValue(tenantName));
        dimensions.add(new Dimension().withName("sourceName").withValue(source));
        dimensions.add(new Dimension().withName("schemaName").withValue(schemaName));
        dimensions.add(new Dimension().withName("environment").withValue(environment));

        for (Map.Entry<String, Long> metric : metrics.entrySet()) {
            MetricDatum datum = new MetricDatum();
            datum.setMetricName(metric.getKey() + tableName);
            datum.setValue((double) metric.getValue());
            datum.setUnit("Count");
            datum.setDimensions(dimensions);

            final PutMetricDataRequest metricDataRequest = new PutMetricDataRequest().withNamespace("NIFI")
                    .withMetricData(datum);

            putMetricData(metricDataRequest);
        }

        // the alarm we create is a static one that will check if the diff is zero
        String comparisonOperator = context.getProperty(ALARM_COMPARISON_OPERATOR).getValue();
        String alarmStatistic = context.getProperty(ALARM_STATISTIC).getValue();
        String alarmPeriod = context.getProperty(ALARM_PERIOD).getValue();
        String alarmEvaluatePeriods = context.getProperty(ALARM_EVALUATE_PERIODS).getValue();
        String alarmAction = context.getProperty(ALARM_ACTION).getValue();

        PutMetricAlarmRequest putMetricAlarmRequest = new PutMetricAlarmRequest()
                .withMetricName("DIFF_" + tableName)
                .withAlarmName(environment + "_" + alarmPrefix + "_" + "DIFF_" + tableName)
                .withDimensions(dimensions).withComparisonOperator(comparisonOperator).withNamespace("NIFI")
                .withStatistic(alarmStatistic).withPeriod(Integer.parseInt(alarmPeriod))
                .withEvaluationPeriods(Integer.parseInt(alarmEvaluatePeriods)).withThreshold((double) 0)
                //.withTreatMissingData("notBreaching") // aws java SDK has to be upgraded for this
                .withAlarmDescription("The daily Count Alarm for table " + tableName).withActionsEnabled(true)
                .withAlarmActions(alarmAction);
        putAlarmData(putMetricAlarmRequest);

        session.transfer(flowFile, REL_SUCCESS);
        getLogger().info("Successfully published cloudwatch metric for {}", new Object[] { flowFile });
    } catch (final Exception e) {
        getLogger().error("Failed to publish cloudwatch metric for {} due to {}", new Object[] { flowFile, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }

}

From source file:cloudwatch.src.main.java.aws.example.cloudwatch.PutMetricData.java

License:Open Source License

public static void main(String[] args) {

    final String USAGE = "To run this example, supply a data point value\n"
            + "Ex: PutMetricData <data-point-value>\n";

    if (args.length != 1) {
        System.out.println(USAGE);
        System.exit(1);/*  ww  w.j av a2 s. c  o m*/
    }

    Double dataPointValue = Double.parseDouble(args[0]);

    final AmazonCloudWatch cloudWatch = AmazonCloudWatchClientBuilder.defaultClient();

    Dimension dimension = new Dimension().withName("UNIQUE_PAGES").withValue("URLS");

    MetricDatum metricDatum = new MetricDatum().withMetricName("PAGES_VISITED").withUnit(StandardUnit.None)
            .withValue(dataPointValue).withDimensions(dimension);

    PutMetricDataRequest request = new PutMetricDataRequest().withNamespace("SITE/TRAFFIC")
            .withMetricData(metricDatum);

    PutMetricDataResult response = cloudWatch.putMetricData(request);

    System.out.printf("Successfully put data point %f", dataPointValue);
}

From source file:com.amazon.kinesis.streaming.agent.metrics.AccumulatingMetricsScope.java

License:Open Source License

@Override
protected void realAddData(String name, double value, StandardUnit unit) {
    MetricDatum datum = data.get(name);//from  w ww  . ja va2 s .co  m
    if (datum == null) {
        data.put(name, new MetricDatum().withMetricName(name).withUnit(unit).withStatisticValues(
                new StatisticSet().withMaximum(value).withMinimum(value).withSampleCount(1.0).withSum(value)));
    } else {
        if (!datum.getUnit().equals(unit.name())) {
            throw new IllegalArgumentException("Cannot add to existing metric with different unit");
        }
        StatisticSet statistics = datum.getStatisticValues();
        statistics.setMaximum(Math.max(value, statistics.getMaximum()));
        statistics.setMinimum(Math.min(value, statistics.getMinimum()));
        statistics.setSampleCount(statistics.getSampleCount() + 1);
        statistics.setSum(statistics.getSum() + value);
    }
}

From source file:com.amediamanager.metrics.MetricAspect.java

License:Apache License

protected MetricDatum newDatum(String service, String operation, long startTime) {
    return new MetricDatum()
            .withDimensions(new Dimension().withName("Svc").withValue(service),
                    new Dimension().withName("Operation").withValue(operation))
            .withTimestamp(new Date(startTime));
}

From source file:com.basistech.metrics.CloudWatchReporter.java

License:Open Source License

@Override
public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters,
        SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters,
        SortedMap<String, Timer> timers) {
    Collection<MetricDatum> data = new ArrayList<>();
    for (Map.Entry<String, Gauge> meg : gauges.entrySet()) {
        if (meg.getValue().getValue() instanceof Number) {
            Number num = (Number) meg.getValue().getValue();
            double val = num.doubleValue();
            if (LOG.isDebugEnabled()) {
                LOG.debug("gauge {} val {}", meg.getKey(), val);
            }/*from   w w w .j ava 2s.  c  om*/
            data.add(new MetricDatum().withMetricName(meg.getKey()).withValue(val).withDimensions(dimensions));
        }
    }
    for (Map.Entry<String, Counter> mec : counters.entrySet()) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("counter {} val {}", mec.getKey(), mec.getValue().getCount());
        }
        data.add(new MetricDatum().withMetricName(mec.getKey()).withValue((double) mec.getValue().getCount())
                .withDimensions(dimensions));
    }
    for (Map.Entry<String, Histogram> meh : histograms.entrySet()) {
        reportHistogram(data, meh);
    }
    for (Map.Entry<String, Timer> met : timers.entrySet()) {
        reportTimer(met.getKey(), data, met);
    }

    if (!data.isEmpty()) {
        PutMetricDataRequest put = new PutMetricDataRequest();
        put.setNamespace(namespace);
        put.setMetricData(data);
        try {
            client.putMetricData(put);
        } catch (Throwable t) {
            LOG.error("Failed to put metrics", t);
        }
    }
}

From source file:com.basistech.metrics.CloudWatchReporter.java

License:Open Source License

private void reportTimer(String key, Collection<MetricDatum> data, Map.Entry<String, Timer> met) {
    Timer timer = met.getValue();
    Snapshot snapshot = timer.getSnapshot();
    if (reportAggregates) {
        reportAggregate(key, data, "count", null, timer.getCount());
        reportAggregate(key, data, "rate", "1minute", timer.getOneMinuteRate());
        reportAggregate(key, data, "rate", "5minute", timer.getFiveMinuteRate());
        reportAggregate(key, data, "rate", "15minute", timer.getFifteenMinuteRate());
        reportAggregate(key, data, "rate", "mean", timer.getMeanRate());
        reportSnapshot(data, snapshot, key);
    } else {/*from www. j ava 2 s .  c  o  m*/
        // if no data, don't bother Amazon with it.
        if (snapshot.size() == 0) {
            return;
        }
        double sum = 0;
        for (double val : snapshot.getValues()) {
            sum += val;
        }
        // Metrics works in Nanoseconds, which is not one of Amazon's favorites.
        double max = (double) TimeUnit.NANOSECONDS.toMicros(snapshot.getMax());
        double min = (double) TimeUnit.NANOSECONDS.toMicros(snapshot.getMin());
        double sumMicros = TimeUnit.NANOSECONDS.toMicros((long) sum);
        StatisticSet stats = new StatisticSet().withMaximum(max).withMinimum(min).withSum(sumMicros)
                .withSampleCount((double) snapshot.getValues().length);
        if (LOG.isDebugEnabled()) {
            LOG.debug("timer {}: {}", met.getKey(), stats);
        }
        data.add(new MetricDatum().withMetricName(met.getKey()).withDimensions(dimensions)
                .withStatisticValues(stats).withUnit(StandardUnit.Microseconds));
    }
}

From source file:com.basistech.metrics.CloudWatchReporter.java

License:Open Source License

private void reportAggregate(String key, Collection<MetricDatum> data, String valDimName, String valDimValue,
        double value) {
    Collection<Dimension> fullDimensions = new ArrayList<>();
    fullDimensions.addAll(dimensions);/*from   w  ww . j  a  v  a  2 s .  c  om*/
    fullDimensions.add(new Dimension().withName(valDimName).withValue(valDimValue));
    data.add(new MetricDatum().withMetricName(key).withDimensions(fullDimensions).withValue(value));

}

From source file:com.basistech.metrics.CloudWatchReporter.java

License:Open Source License

private void reportHistogram(Collection<MetricDatum> data, Map.Entry<String, Histogram> meh) {
    Snapshot snapshot = meh.getValue().getSnapshot();

    if (reportAggregates) {
        String key = meh.getKey();
        reportSnapshot(data, snapshot, key);
    } else {/*from   w  ww .  j  a  v  a2 s  .c  o m*/
        // if no data, don't bother Amazon with it.
        if (snapshot.size() == 0) {
            return;
        }
        double sum = 0;
        for (double val : snapshot.getValues()) {
            sum += val;
        }
        StatisticSet stats = new StatisticSet().withMaximum((double) snapshot.getMax())
                .withMinimum((double) snapshot.getMin()).withSum(sum)
                .withSampleCount((double) snapshot.getValues().length);
        if (LOG.isDebugEnabled()) {
            LOG.debug("histogram {}: {}", meh.getKey(), stats);
        }
        data.add(new MetricDatum().withMetricName(meh.getKey()).withDimensions(dimensions)
                .withStatisticValues(stats));
    }
}

From source file:com.blacklocus.metrics.DemuxedKey.java

License:Apache License

Iterable<MetricDatum> newDatums(String type, Function<MetricDatum, MetricDatum> datumSpecification) {

    // All dimension sets include the type dimension.
    PermutableChain<Dimension> withDimensionChain = new PermutableChain<Dimension>(
            new Dimension().withName(CloudWatchReporter.METRIC_TYPE_DIMENSION).withValue(type), false,
            dimensionChain);//from   w  w  w  . j ava 2  s .  c  o  m

    List<MetricDatum> data = new ArrayList<MetricDatum>();

    for (Iterable<String> nameSet : nameChain) {
        String name = StringUtils.join(nameSet, " ");
        if (StringUtils.isBlank(name)) {
            // If all name segments are permutable, there is one combination where all of them are omitted.
            // This is expected and supported but of course can not be submitted.
            continue;
        }
        for (Iterable<Dimension> dimensionSet : withDimensionChain) {
            data.add(datumSpecification.apply(
                    new MetricDatum().withMetricName(name).withDimensions(Lists.newArrayList(dimensionSet))));
        }
    }

    return data;
}