Example usage for com.amazonaws.services.cloudwatch.model MetricDatum setUnit

List of usage examples for com.amazonaws.services.cloudwatch.model MetricDatum setUnit

Introduction

In this page you can find the example usage for com.amazonaws.services.cloudwatch.model MetricDatum setUnit.

Prototype


public void setUnit(StandardUnit unit) 

Source Link

Document

When you are using a Put operation, this defines what unit you want to use when storing the metric.

Usage

From source file:be.dataminded.nifi.plugins.PutCloudWatchCountMetricAndAlarm.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();/*from   w  ww .  j a va 2 s  .co  m*/
    if (flowFile == null) {
        return;
    }

    long totalTableCount = 0;
    long sumCount = 0;
    String tableName = "";
    String schemaName = "";
    String source = "";
    String tenantName = "";

    try (InputStream inputStream = session.read(flowFile)) {

        StringWriter writer = new StringWriter();
        IOUtils.copy(inputStream, writer, "UTF-8");
        String flowFileContent = writer.toString();

        // The MergeContent controller will be configured to append the JSON content with commas
        // We have to surround this list with square brackets to become a valid JSON Array
        String jsonContent = "[" + flowFileContent + "]";

        JSONArray jsonArray = new JSONArray(jsonContent);

        Iterator iterator = jsonArray.iterator();

        ArrayList<Long> counts = new ArrayList<>();

        while (iterator.hasNext()) {
            JSONObject o = (JSONObject) iterator.next();
            counts.add(o.getLong(context.getProperty(NAME_ELEMENT_TO_SUM).getValue()));
        }
        sumCount = counts.stream().mapToLong(Long::longValue).sum();

        JSONObject firstElement = (JSONObject) jsonArray.get(0);
        totalTableCount = firstElement.getLong(context.getProperty(NAME_ELEMENT_TOTAL_COUNT).getValue());
        tableName = firstElement.getString(TABLE_NAME);
        schemaName = firstElement.getString(SCHEMA_NAME);
        source = firstElement.getString(SOURCE_NAME);
        tenantName = firstElement.getString(TENANT_NAME);

    } catch (IOException e) {
        logger.error("Something went wrong when trying to read the flowFile body: " + e.getMessage());
    } catch (org.json.JSONException e) {
        logger.error("Something when trying to parse the JSON body of the flowFile: " + e.getMessage());
    } catch (Exception e) {
        logger.error("something else went wrong in body processing of this FlowFile: " + e.getMessage());
        session.transfer(flowFile, REL_FAILURE);
    }

    try {

        String environment = context.getProperty(ENVIRONMENT).getValue();
        String alarmPrefix = context.getProperty(NAME_PREFIX_ALARM).getValue();

        Map<String, Long> metrics = new HashMap<>();
        // first metric: this is the total count of the records that were exported
        metrics.put("COUNT_", sumCount);
        // second metric: this is the difference between the records exported
        // and the total amount of records counted in the DB, should always be 0 !!!
        // we take a margin into account because we can't be sure there won't be any deletes
        // between counting and executing the queries
        long diff = Math.abs(totalTableCount - sumCount);
        double diffProcent = Math.round((diff / totalTableCount) * 1000);
        metrics.put("DIFF_", (long) diffProcent);

        ArrayList<Dimension> dimensions = new ArrayList<>();
        dimensions.add(new Dimension().withName("tableName").withValue(tableName));
        dimensions.add(new Dimension().withName("tenantName").withValue(tenantName));
        dimensions.add(new Dimension().withName("sourceName").withValue(source));
        dimensions.add(new Dimension().withName("schemaName").withValue(schemaName));
        dimensions.add(new Dimension().withName("environment").withValue(environment));

        for (Map.Entry<String, Long> metric : metrics.entrySet()) {
            MetricDatum datum = new MetricDatum();
            datum.setMetricName(metric.getKey() + tableName);
            datum.setValue((double) metric.getValue());
            datum.setUnit("Count");
            datum.setDimensions(dimensions);

            final PutMetricDataRequest metricDataRequest = new PutMetricDataRequest().withNamespace("NIFI")
                    .withMetricData(datum);

            putMetricData(metricDataRequest);
        }

        // the alarm we create is a static one that will check if the diff is zero
        String comparisonOperator = context.getProperty(ALARM_COMPARISON_OPERATOR).getValue();
        String alarmStatistic = context.getProperty(ALARM_STATISTIC).getValue();
        String alarmPeriod = context.getProperty(ALARM_PERIOD).getValue();
        String alarmEvaluatePeriods = context.getProperty(ALARM_EVALUATE_PERIODS).getValue();
        String alarmAction = context.getProperty(ALARM_ACTION).getValue();

        PutMetricAlarmRequest putMetricAlarmRequest = new PutMetricAlarmRequest()
                .withMetricName("DIFF_" + tableName)
                .withAlarmName(environment + "_" + alarmPrefix + "_" + "DIFF_" + tableName)
                .withDimensions(dimensions).withComparisonOperator(comparisonOperator).withNamespace("NIFI")
                .withStatistic(alarmStatistic).withPeriod(Integer.parseInt(alarmPeriod))
                .withEvaluationPeriods(Integer.parseInt(alarmEvaluatePeriods)).withThreshold((double) 0)
                //.withTreatMissingData("notBreaching") // aws java SDK has to be upgraded for this
                .withAlarmDescription("The daily Count Alarm for table " + tableName).withActionsEnabled(true)
                .withAlarmActions(alarmAction);
        putAlarmData(putMetricAlarmRequest);

        session.transfer(flowFile, REL_SUCCESS);
        getLogger().info("Successfully published cloudwatch metric for {}", new Object[] { flowFile });
    } catch (final Exception e) {
        getLogger().error("Failed to publish cloudwatch metric for {} due to {}", new Object[] { flowFile, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }

}

From source file:com.netflix.bdp.s3mper.alert.impl.CloudWatchAlertDispatcher.java

License:Apache License

private void sendCloudWatchConsistencyAlert() {
    MetricDatum datum = new MetricDatum();
    datum.setMetricName(cloudWatchConsistencyMetric);
    datum.setUnit(StandardUnit.Count);
    datum.setValue(1.0);//from w w w.  j  a  va 2  s . c o  m

    PutMetricDataRequest request = new PutMetricDataRequest();
    request.setNamespace(namespace);
    request.setMetricData(Collections.singleton(datum));

    cloudWatch.putMetricData(request);
}

From source file:com.netflix.bdp.s3mper.alert.impl.CloudWatchAlertDispatcher.java

License:Apache License

private void sendCloudWatchTimeoutAlert() {
    MetricDatum datum = new MetricDatum();
    datum.setMetricName(cloudWatchTimeoutMetric);
    datum.setUnit(StandardUnit.Count);
    datum.setValue(1.0);//www  .ja  v  a 2 s  . c o  m

    PutMetricDataRequest request = new PutMetricDataRequest();
    request.setNamespace(namespace);
    request.setMetricData(Collections.singleton(datum));

    cloudWatch.putMetricData(request);
}

From source file:com.nextdoor.bender.monitoring.cw.CloudwatchReporter.java

License:Apache License

@Override
public void write(ArrayList<Stat> stats, long invokeTimeMs, Set<Tag> tags) {
    Date dt = new Date();
    dt.setTime(invokeTimeMs);//w  w  w.  ja  v  a2  s  . c  o  m

    Collection<Dimension> parentDims = tagsToDimensions(tags);
    List<MetricDatum> metrics = new ArrayList<MetricDatum>();

    /*
     * Create CW metric objects from bender internal Stat objects
     */
    for (Stat stat : stats) {
        /*
         * Dimension are CW's version of metric tags. A conversion must be done.
         */
        Collection<Dimension> metricDims = tagsToDimensions(stat.getTags());
        metricDims.addAll(parentDims);

        MetricDatum metric = new MetricDatum();
        metric.setMetricName(stat.getName());
        // TODO: add units to Stat object SYSTEMS-870
        metric.setUnit(StandardUnit.None);
        metric.setTimestamp(dt);
        metric.setDimensions(metricDims);
        metric.setValue((double) stat.getValue());

        metrics.add(metric);
    }

    /*
     * Not very well documented in java docs but CW only allows 20 metrics at a time.
     */
    List<List<MetricDatum>> chunks = ListUtils.partition(metrics, 20);
    for (List<MetricDatum> chunk : chunks) {
        PutMetricDataRequest req = new PutMetricDataRequest();
        req.withMetricData(chunk);
        req.setNamespace(namespace);

        this.client.putMetricData(req);
    }
}

From source file:org.apache.nifi.processors.aws.cloudwatch.PutCloudWatchMetric.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();//  w  w w  . j ava  2 s  .  c  o m
    if (flowFile == null) {
        return;
    }
    MetricDatum datum = new MetricDatum();

    try {
        datum.setMetricName(context.getProperty(METRIC_NAME).evaluateAttributeExpressions(flowFile).getValue());
        final String valueString = context.getProperty(VALUE).evaluateAttributeExpressions(flowFile).getValue();
        if (valueString != null) {
            datum.setValue(Double.parseDouble(valueString));
        } else {
            StatisticSet statisticSet = new StatisticSet();
            statisticSet.setMaximum(Double.parseDouble(
                    context.getProperty(MAXIMUM).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setMinimum(Double.parseDouble(
                    context.getProperty(MINIMUM).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setSampleCount(Double.parseDouble(
                    context.getProperty(SAMPLECOUNT).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setSum(Double
                    .parseDouble(context.getProperty(SUM).evaluateAttributeExpressions(flowFile).getValue()));

            datum.setStatisticValues(statisticSet);
        }

        final String timestamp = context.getProperty(TIMESTAMP).evaluateAttributeExpressions(flowFile)
                .getValue();
        if (timestamp != null) {
            datum.setTimestamp(new Date(Long.parseLong(timestamp)));
        }

        final String unit = context.getProperty(UNIT).evaluateAttributeExpressions(flowFile).getValue();
        if (unit != null) {
            datum.setUnit(unit);
        }

        // add dynamic properties as dimensions
        if (!dynamicPropertyNames.isEmpty()) {
            final List<Dimension> dimensions = new ArrayList<>(dynamicPropertyNames.size());
            for (String propertyName : dynamicPropertyNames) {
                final String propertyValue = context.getProperty(propertyName)
                        .evaluateAttributeExpressions(flowFile).getValue();
                if (StringUtils.isNotBlank(propertyValue)) {
                    dimensions.add(new Dimension().withName(propertyName).withValue(propertyValue));
                }
            }
            datum.withDimensions(dimensions);
        }

        final PutMetricDataRequest metricDataRequest = new PutMetricDataRequest()
                .withNamespace(context.getProperty(NAMESPACE).evaluateAttributeExpressions(flowFile).getValue())
                .withMetricData(datum);

        putMetricData(metricDataRequest);
        session.transfer(flowFile, REL_SUCCESS);
        getLogger().info("Successfully published cloudwatch metric for {}", new Object[] { flowFile });
    } catch (final Exception e) {
        getLogger().error("Failed to publish cloudwatch metric for {} due to {}", new Object[] { flowFile, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }

}