Example usage for com.amazonaws.services.cloudwatch.model MetricDatum setDimensions

List of usage examples for com.amazonaws.services.cloudwatch.model MetricDatum setDimensions

Introduction

In this page you can find the example usage for com.amazonaws.services.cloudwatch.model MetricDatum setDimensions.

Prototype


public void setDimensions(java.util.Collection<Dimension> dimensions) 

Source Link

Document

The dimensions associated with the metric.

Usage

From source file:be.dataminded.nifi.plugins.PutCloudWatchCountMetricAndAlarm.java

License:Apache License

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();//from w ww  . j a  va  2s  . c  om
    if (flowFile == null) {
        return;
    }

    long totalTableCount = 0;
    long sumCount = 0;
    String tableName = "";
    String schemaName = "";
    String source = "";
    String tenantName = "";

    try (InputStream inputStream = session.read(flowFile)) {

        StringWriter writer = new StringWriter();
        IOUtils.copy(inputStream, writer, "UTF-8");
        String flowFileContent = writer.toString();

        // The MergeContent controller will be configured to append the JSON content with commas
        // We have to surround this list with square brackets to become a valid JSON Array
        String jsonContent = "[" + flowFileContent + "]";

        JSONArray jsonArray = new JSONArray(jsonContent);

        Iterator iterator = jsonArray.iterator();

        ArrayList<Long> counts = new ArrayList<>();

        while (iterator.hasNext()) {
            JSONObject o = (JSONObject) iterator.next();
            counts.add(o.getLong(context.getProperty(NAME_ELEMENT_TO_SUM).getValue()));
        }
        sumCount = counts.stream().mapToLong(Long::longValue).sum();

        JSONObject firstElement = (JSONObject) jsonArray.get(0);
        totalTableCount = firstElement.getLong(context.getProperty(NAME_ELEMENT_TOTAL_COUNT).getValue());
        tableName = firstElement.getString(TABLE_NAME);
        schemaName = firstElement.getString(SCHEMA_NAME);
        source = firstElement.getString(SOURCE_NAME);
        tenantName = firstElement.getString(TENANT_NAME);

    } catch (IOException e) {
        logger.error("Something went wrong when trying to read the flowFile body: " + e.getMessage());
    } catch (org.json.JSONException e) {
        logger.error("Something when trying to parse the JSON body of the flowFile: " + e.getMessage());
    } catch (Exception e) {
        logger.error("something else went wrong in body processing of this FlowFile: " + e.getMessage());
        session.transfer(flowFile, REL_FAILURE);
    }

    try {

        String environment = context.getProperty(ENVIRONMENT).getValue();
        String alarmPrefix = context.getProperty(NAME_PREFIX_ALARM).getValue();

        Map<String, Long> metrics = new HashMap<>();
        // first metric: this is the total count of the records that were exported
        metrics.put("COUNT_", sumCount);
        // second metric: this is the difference between the records exported
        // and the total amount of records counted in the DB, should always be 0 !!!
        // we take a margin into account because we can't be sure there won't be any deletes
        // between counting and executing the queries
        long diff = Math.abs(totalTableCount - sumCount);
        double diffProcent = Math.round((diff / totalTableCount) * 1000);
        metrics.put("DIFF_", (long) diffProcent);

        ArrayList<Dimension> dimensions = new ArrayList<>();
        dimensions.add(new Dimension().withName("tableName").withValue(tableName));
        dimensions.add(new Dimension().withName("tenantName").withValue(tenantName));
        dimensions.add(new Dimension().withName("sourceName").withValue(source));
        dimensions.add(new Dimension().withName("schemaName").withValue(schemaName));
        dimensions.add(new Dimension().withName("environment").withValue(environment));

        for (Map.Entry<String, Long> metric : metrics.entrySet()) {
            MetricDatum datum = new MetricDatum();
            datum.setMetricName(metric.getKey() + tableName);
            datum.setValue((double) metric.getValue());
            datum.setUnit("Count");
            datum.setDimensions(dimensions);

            final PutMetricDataRequest metricDataRequest = new PutMetricDataRequest().withNamespace("NIFI")
                    .withMetricData(datum);

            putMetricData(metricDataRequest);
        }

        // the alarm we create is a static one that will check if the diff is zero
        String comparisonOperator = context.getProperty(ALARM_COMPARISON_OPERATOR).getValue();
        String alarmStatistic = context.getProperty(ALARM_STATISTIC).getValue();
        String alarmPeriod = context.getProperty(ALARM_PERIOD).getValue();
        String alarmEvaluatePeriods = context.getProperty(ALARM_EVALUATE_PERIODS).getValue();
        String alarmAction = context.getProperty(ALARM_ACTION).getValue();

        PutMetricAlarmRequest putMetricAlarmRequest = new PutMetricAlarmRequest()
                .withMetricName("DIFF_" + tableName)
                .withAlarmName(environment + "_" + alarmPrefix + "_" + "DIFF_" + tableName)
                .withDimensions(dimensions).withComparisonOperator(comparisonOperator).withNamespace("NIFI")
                .withStatistic(alarmStatistic).withPeriod(Integer.parseInt(alarmPeriod))
                .withEvaluationPeriods(Integer.parseInt(alarmEvaluatePeriods)).withThreshold((double) 0)
                //.withTreatMissingData("notBreaching") // aws java SDK has to be upgraded for this
                .withAlarmDescription("The daily Count Alarm for table " + tableName).withActionsEnabled(true)
                .withAlarmActions(alarmAction);
        putAlarmData(putMetricAlarmRequest);

        session.transfer(flowFile, REL_SUCCESS);
        getLogger().info("Successfully published cloudwatch metric for {}", new Object[] { flowFile });
    } catch (final Exception e) {
        getLogger().error("Failed to publish cloudwatch metric for {} due to {}", new Object[] { flowFile, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }

}

From source file:com.amazon.kinesis.streaming.agent.metrics.CWMetricsScope.java

License:Open Source License

@Override
protected void realCommit() {
    if (!data.isEmpty()) {
        List<MetricDatumWithKey<CWMetricKey>> dataWithKeys = new ArrayList<MetricDatumWithKey<CWMetricKey>>();
        for (MetricDatum datum : data.values()) {
            datum.setDimensions(getDimensions());
            dataWithKeys.add(new MetricDatumWithKey<CWMetricKey>(new CWMetricKey(datum), datum));
        }//from  w  w w.  j  av  a 2 s .  co  m
        publisher.enqueue(dataWithKeys);
    }
}

From source file:com.nextdoor.bender.monitoring.cw.CloudwatchReporter.java

License:Apache License

@Override
public void write(ArrayList<Stat> stats, long invokeTimeMs, Set<Tag> tags) {
    Date dt = new Date();
    dt.setTime(invokeTimeMs);/*from w  ww.j ava 2 s. com*/

    Collection<Dimension> parentDims = tagsToDimensions(tags);
    List<MetricDatum> metrics = new ArrayList<MetricDatum>();

    /*
     * Create CW metric objects from bender internal Stat objects
     */
    for (Stat stat : stats) {
        /*
         * Dimension are CW's version of metric tags. A conversion must be done.
         */
        Collection<Dimension> metricDims = tagsToDimensions(stat.getTags());
        metricDims.addAll(parentDims);

        MetricDatum metric = new MetricDatum();
        metric.setMetricName(stat.getName());
        // TODO: add units to Stat object SYSTEMS-870
        metric.setUnit(StandardUnit.None);
        metric.setTimestamp(dt);
        metric.setDimensions(metricDims);
        metric.setValue((double) stat.getValue());

        metrics.add(metric);
    }

    /*
     * Not very well documented in java docs but CW only allows 20 metrics at a time.
     */
    List<List<MetricDatum>> chunks = ListUtils.partition(metrics, 20);
    for (List<MetricDatum> chunk : chunks) {
        PutMetricDataRequest req = new PutMetricDataRequest();
        req.withMetricData(chunk);
        req.setNamespace(namespace);

        this.client.putMetricData(req);
    }
}

From source file:com.pinterest.arcee.autoscaling.AwsAlarmManager.java

License:Apache License

@Override
public void putMetricsToAlarm(String groupName, String metricName, Collection<MetricDatumBean> metricDataPoints)
        throws Exception {
    List<MetricDatum> metricData = new ArrayList<>();
    if (metricDataPoints.isEmpty()) {
        LOG.debug(/* w  w  w .  ja v  a 2  s. co m*/
                String.format("There are no metric data for metric %s, for group %s.", metricName, groupName));
        return;
    }

    for (MetricDatumBean metricDataPoint : metricDataPoints) {
        MetricDatum metricDatum = new MetricDatum();
        metricDatum.setMetricName(metricName);
        metricDatum.setTimestamp(new Date(metricDataPoint.getTimestamp()));
        metricDatum.setValue(metricDataPoint.getValue());
        metricDatum.setDimensions(Arrays.asList(getDimention(groupName)));
        metricData.add(metricDatum);
        if (metricData.size() == MAX_AWS_METRIC_RECORDS) {
            sendMetricsInternal(metricData, groupName);
            metricData.clear();
        }
    }

    if (!metricData.isEmpty()) {
        sendMetricsInternal(metricData, groupName);
    }
}