Example usage for org.joda.time.format ISOPeriodFormat standard

List of usage examples for org.joda.time.format ISOPeriodFormat standard

Introduction

In this page you can find the example usage for org.joda.time.format ISOPeriodFormat standard.

Prototype

public static PeriodFormatter standard() 

Source Link

Document

The standard ISO format - PyYmMwWdDThHmMsS.

Usage

From source file:colossal.pipe.BaseOptions.java

License:Apache License

private Period parseDuration() {
    PeriodFormatter[] toTry = { PeriodFormat.getDefault(), ISOPeriodFormat.standard(),
            ISOPeriodFormat.alternate(), ISOPeriodFormat.alternateExtended(),
            ISOPeriodFormat.alternateExtendedWithWeeks(), ISOPeriodFormat.alternateWithWeeks() };
    for (PeriodFormatter f : toTry) {
        try {/* ww w .  j ava  2 s .c o  m*/
            return f.parsePeriod(duration);
        } catch (IllegalArgumentException iae) {
            // skip to next
        }
    }
    throw new IllegalArgumentException("Can't parse: " + duration);
}

From source file:com.arpnetworking.tsdcore.sinks.circonus.CirconusSinkActor.java

License:Apache License

private Map<String, Object> serialize(final Collection<AggregatedData> data) {
    final Map<String, Object> dataNode = Maps.newHashMap();
    for (final AggregatedData aggregatedData : data) {
        final String name = new StringBuilder()
                .append(aggregatedData.getPeriod().toString(ISOPeriodFormat.standard())).append("/")
                .append(aggregatedData.getFQDSN().getMetric()).append("/")
                .append(aggregatedData.getFQDSN().getStatistic().getName()).toString();
        // For histograms, if they're enabled, we'll build the histogram data node
        if (_enableHistograms && aggregatedData.getFQDSN().getStatistic() instanceof HistogramStatistic) {
            final HistogramStatistic.HistogramSupportingData histogramSupportingData = (HistogramStatistic.HistogramSupportingData) aggregatedData
                    .getSupportingData();
            final HistogramStatistic.HistogramSnapshot histogram = histogramSupportingData
                    .getHistogramSnapshot();
            final ArrayList<String> valueList = new ArrayList<>(histogram.getEntriesCount());
            final MathContext context = new MathContext(2, RoundingMode.DOWN);
            for (final Map.Entry<Double, Integer> entry : histogram.getValues()) {
                for (int i = 0; i < entry.getValue(); i++) {
                    final BigDecimal decimal = new BigDecimal(entry.getKey(), context);
                    final String bucketString = String.format("H[%s]=%d", decimal.toPlainString(),
                            entry.getValue());
                    valueList.add(bucketString);
                }//from  ww  w . j a  va 2s  .c  o m
            }

            final Map<String, Object> histogramValueNode = Maps.newHashMap();
            histogramValueNode.put("_type", "n"); // Histograms are type "n"
            histogramValueNode.put("_value", valueList);
            dataNode.put(name, histogramValueNode);
        } else {
            dataNode.put(name, aggregatedData.getValue().getValue());
        }
    }
    return dataNode;
}

From source file:com.arpnetworking.tsdcore.sinks.circonus.CirconusSinkActor.java

License:Apache License

private String getMetricKey(final AggregatedData data) {
    return String.format("%s_%s_%s_%s_%s_%s", data.getFQDSN().getService(), data.getFQDSN().getCluster(),
            data.getHost(), data.getPeriod().toString(ISOPeriodFormat.standard()), data.getFQDSN().getMetric(),
            data.getFQDSN().getStatistic().getName());
}

From source file:com.arpnetworking.tsdcore.sinks.DataDogSink.java

License:Apache License

/**
 * {@inheritDoc}//from  w ww .j  a v a 2  s  .  com
 */
@Override
protected Collection<byte[]> serialize(final PeriodicData periodicData) {
    final String period = periodicData.getPeriod().toString(ISOPeriodFormat.standard());
    final long timestamp = (periodicData.getStart().getMillis()
            + periodicData.getPeriod().toStandardDuration().getMillis()) / 1000;

    final List<Datum> dataDogData = Lists.newArrayList();
    for (final AggregatedData datum : periodicData.getData()) {
        if (!datum.isSpecified()) {
            continue;
        }

        dataDogData.add(new Datum(
                period + "_" + datum.getFQDSN().getMetric() + "_" + datum.getFQDSN().getStatistic().getName(),
                timestamp, (float) datum.getValue().getValue(), periodicData.getDimensions().get("host"),
                createTags(periodicData, datum)));
    }

    final String dataDogDataAsJson;
    try {
        dataDogDataAsJson = OBJECT_MAPPER.writeValueAsString(Collections.singletonMap("series", dataDogData));
    } catch (final JsonProcessingException e) {
        LOGGER.error().setMessage("Serialization error").addData("periodicData", periodicData).setThrowable(e)
                .log();
        return Collections.emptyList();
    }
    return Collections.singletonList(dataDogDataAsJson.getBytes(Charsets.UTF_8));
}

From source file:com.arpnetworking.tsdcore.sinks.InfluxDbSink.java

License:Apache License

/**
 * {@inheritDoc}//from   ww  w.j a  v  a 2 s.c om
 */
@Override
protected Collection<byte[]> serialize(final PeriodicData periodicData) {
    final String period = periodicData.getPeriod().toString(ISOPeriodFormat.standard());

    final Map<String, MetricFormat> metrics = Maps.newHashMap();

    for (final AggregatedData data : periodicData.getData()) {
        final String metricName = buildMetricName(period, data.getFQDSN());
        MetricFormat formattedData = metrics.get(metricName);

        if (formattedData == null) {
            formattedData = new MetricFormat(metricName, periodicData.getStart().getMillis(),
                    periodicData.getDimensions()).addTag("service", data.getFQDSN().getService())
                            .addTag("cluster", data.getFQDSN().getCluster());

            metrics.put(metricName, formattedData);
        }

        formattedData.addMetric(data.getFQDSN().getStatistic().getName(), data.getValue().getValue());
        //TODO(dguerreromartin): include Conditional
    }

    final StringJoiner dataList = new StringJoiner("\n");
    for (MetricFormat metric : metrics.values()) {
        dataList.add(metric.buildMetricString());
    }

    return Lists.newArrayList(dataList.toString().getBytes(StandardCharsets.UTF_8));
}

From source file:com.arpnetworking.tsdcore.sinks.KairosDbSink.java

License:Apache License

/**
 * {@inheritDoc}// w  w w .  j a  v  a  2  s .com
 */
@Override
protected Collection<byte[]> serialize(final PeriodicData periodicData) {
    // Initialize serialization structures
    final List<byte[]> completeChunks = Lists.newArrayList();
    final ByteBuffer currentChunk = ByteBuffer.allocate(_maxRequestSize);
    final ByteArrayOutputStream chunkStream = new ByteArrayOutputStream();

    // Extract and transform shared data
    final long timestamp = periodicData.getStart().plus(periodicData.getPeriod()).getMillis();
    final String serializedPeriod = periodicData.getPeriod().toString(ISOPeriodFormat.standard());
    final ImmutableMap<String, String> dimensions = periodicData.getDimensions();
    final Serializer serializer = new Serializer(timestamp, serializedPeriod, dimensions);

    // Initialize the chunk buffer
    currentChunk.put(HEADER);

    // Add aggregated data
    for (final AggregatedData datum : periodicData.getData()) {
        if (!datum.isSpecified()) {
            LOGGER.trace().setMessage("Skipping unspecified datum").addData("datum", datum).log();
            continue;
        }

        serializer.serializeDatum(completeChunks, currentChunk, chunkStream, datum);
    }

    // Add conditions
    for (final Condition condition : periodicData.getConditions()) {
        serializer.serializeCondition(completeChunks, currentChunk, chunkStream, condition);
    }

    // Add the current chunk (if any) to the completed chunks
    if (currentChunk.position() > HEADER_BYTE_LENGTH) {
        currentChunk.put(currentChunk.position() - 1, FOOTER);
        completeChunks.add(Arrays.copyOf(currentChunk.array(), currentChunk.position()));
    }

    return completeChunks;
}

From source file:com.arpnetworking.tsdcore.sinks.KMonDSink.java

License:Apache License

/**
 * {@inheritDoc}/*from   w ww . ja va  2s  . c om*/
 */
@Override
protected Collection<byte[]> serialize(final PeriodicData periodicData) {
    final Period period = periodicData.getPeriod();
    final Multimap<String, AggregatedData> indexedData = prepareData(periodicData);
    final Multimap<String, Condition> indexedConditions = prepareConditions(periodicData.getConditions());

    // Serialize
    final List<byte[]> serializedData = Lists.newArrayListWithCapacity(indexedData.size());
    final StringBuilder stringBuilder = new StringBuilder();
    for (final String key : indexedData.keySet()) {
        final Collection<AggregatedData> namedData = indexedData.get(key);
        if (!namedData.isEmpty()) {
            stringBuilder.setLength(0);
            final AggregatedData first = Iterables.getFirst(namedData, null);
            final String name = new StringBuilder().append(first.getFQDSN().getService()).append("_")
                    .append(period.toString(ISOPeriodFormat.standard())).append("_")
                    .append(first.getFQDSN().getMetric()).toString();

            int maxStatus = 0;
            boolean hasAlert = false;
            final StringBuilder dataBuilder = new StringBuilder();
            for (final AggregatedData datum : namedData) {
                if (!datum.isSpecified()) {
                    continue;
                }

                dataBuilder.append(datum.getFQDSN().getStatistic().getName()).append("%3D")
                        .append(datum.getValue().getValue()).append("%3B");

                final String conditionKey = datum.getFQDSN().getService() + "_" + datum.getFQDSN().getMetric()
                        + "_" + datum.getFQDSN().getCluster() + "_" + datum.getFQDSN().getStatistic();
                for (final Condition condition : indexedConditions.get(conditionKey)) {
                    hasAlert = true;
                    maxStatus = serializeCondition(maxStatus, dataBuilder, datum, condition);
                }
            }

            // Don't send an empty payload
            if (dataBuilder.length() == 0) {
                continue;
            }

            stringBuilder.append("run_every=").append(period.toStandardSeconds().getSeconds())
                    .append("&has_alert=").append(hasAlert).append("&path=")
                    .append(first.getFQDSN().getCluster()).append("%2f")
                    .append(periodicData.getDimensions().get("host")).append("&monitor=").append(name)
                    .append("&status=").append(maxStatus).append("&timestamp=")
                    .append((int) Unit.SECOND.convert(periodicData.getStart().getMillis(), Unit.MILLISECOND))
                    .append("&output=").append(name).append("%7C").append(dataBuilder.toString());

            stringBuilder.setLength(stringBuilder.length() - 3);
            serializedData.add(stringBuilder.toString().getBytes(Charset.forName("UTF-8")));
        }
    }

    return serializedData;
}

From source file:com.arpnetworking.tsdcore.sinks.KMonDSink.java

License:Apache License

private Multimap<String, AggregatedData> prepareData(final PeriodicData periodicData) {
    // Transform the data list to a multimap by metric name
    // Ie, get all the statistics for a unique metric

    return Multimaps.index(periodicData.getData(), input -> input.getFQDSN().getService() + "_"
            + periodicData.getPeriod().toString(ISOPeriodFormat.standard()) + "_" + input.getFQDSN().getMetric()
            + "_" + periodicData.getDimensions().get("host") + "_" + input.getFQDSN().getCluster());
}

From source file:com.arpnetworking.tsdcore.sinks.MonitordSink.java

License:Apache License

/**
 * {@inheritDoc}/*from   w w w.  j  av a2s  .  c  o  m*/
 */
@Override
protected Collection<byte[]> serialize(final PeriodicData periodicData) {
    final Period period = periodicData.getPeriod();
    final Multimap<String, AggregatedData> indexedData = prepareData(periodicData);
    final Multimap<String, Condition> indexedConditions = prepareConditions(periodicData.getConditions());

    // Serialize
    final List<byte[]> serializedData = Lists.newArrayListWithCapacity(indexedData.size());
    final StringBuilder stringBuilder = new StringBuilder();
    for (final String key : indexedData.keySet()) {
        final Collection<AggregatedData> namedData = indexedData.get(key);
        if (!namedData.isEmpty()) {
            stringBuilder.setLength(0);
            final AggregatedData first = Iterables.getFirst(namedData, null);
            final String name = new StringBuilder().append(first.getFQDSN().getService()).append("_")
                    .append(period.toString(ISOPeriodFormat.standard())).append("_")
                    .append(first.getFQDSN().getMetric()).toString();

            int maxStatus = 0;
            final StringBuilder dataBuilder = new StringBuilder();
            for (final AggregatedData datum : namedData) {
                if (!datum.isSpecified()) {
                    continue;
                }

                dataBuilder.append(datum.getFQDSN().getStatistic().getName()).append("%3D")
                        .append(datum.getValue().getValue()).append("%3B");

                final String conditionKey = datum.getFQDSN().getService() + "_" + datum.getFQDSN().getMetric()
                        + "_" + datum.getFQDSN().getCluster() + "_" + datum.getFQDSN().getStatistic();
                for (final Condition condition : indexedConditions.get(conditionKey)) {
                    dataBuilder.append(datum.getFQDSN().getStatistic().getName()).append("_")
                            .append(condition.getName()).append("%3D")
                            .append(condition.getThreshold().getValue()).append("%3B");

                    if (condition.isTriggered().isPresent() && condition.isTriggered().get()) {
                        // Collect the status of this metric
                        final Object severity = condition.getExtensions().get("severity");
                        int status = _unknownSeverityStatus;
                        if (severity != null && _severityToStatus.containsKey(severity)) {
                            status = _severityToStatus.get(severity);
                        }
                        maxStatus = Math.max(status, maxStatus);
                    }
                }
            }

            // Don't send an empty payload
            if (dataBuilder.length() == 0) {
                continue;
            }

            stringBuilder.append("run_every=").append(period.toStandardSeconds().getSeconds()).append("&path=")
                    .append(first.getFQDSN().getCluster()).append("%2f")
                    .append(periodicData.getDimensions().get("host")).append("&monitor=").append(name)
                    .append("&status=").append(maxStatus).append("&timestamp=")
                    .append((int) Unit.SECOND.convert(periodicData.getStart().getMillis(), Unit.MILLISECOND))
                    .append("&output=").append(name).append("%7C").append(dataBuilder.toString());

            stringBuilder.setLength(stringBuilder.length() - 3);
            serializedData.add(stringBuilder.toString().getBytes(Charset.forName("UTF-8")));
        }
    }

    return serializedData;
}

From source file:com.arpnetworking.tsdcore.sinks.SignalFxSink.java

License:Apache License

/**
 * {@inheritDoc}//from w ww  .  j  ava2  s  . com
 */
@Override
protected Collection<byte[]> serialize(final PeriodicData periodicData) {
    final String period = periodicData.getPeriod().toString(ISOPeriodFormat.standard());
    final long timestamp = periodicData.getStart().getMillis()
            + periodicData.getPeriod().toStandardDuration().getMillis();

    final List<byte[]> serializedData = Lists.newArrayList();
    SignalFxProtocolBuffers.DataPointUploadMessage.Builder sfxMessage = SignalFxProtocolBuffers.DataPointUploadMessage
            .newBuilder();
    int count = 0;
    for (final AggregatedData datum : periodicData.getData()) {
        if (!datum.isSpecified()) {
            continue;
        }

        final List<SignalFxProtocolBuffers.Dimension> sfxDimensions = createDimensions(periodicData, datum);
        final SignalFxProtocolBuffers.DataPoint dataPoint = createDataPoint(period, timestamp, datum,
                sfxDimensions);
        sfxMessage.addDatapoints(dataPoint);

        // In conversation with the SignalFX team we were instructed to limit the number of data points sent per
        // request based on the data points and dimensions per data point.
        count += Math.max(1, sfxDimensions.size());

        if (count >= _maxMetricDimensions) {
            serializedData.add(sfxMessage.build().toByteArray());
            sfxMessage = SignalFxProtocolBuffers.DataPointUploadMessage.newBuilder();
            count = 0;
        }
    }

    if (count > 0) {
        serializedData.add(sfxMessage.build().toByteArray());
    }
    return serializedData;
}