Example usage for java.time.format DateTimeFormatter ISO_OFFSET_DATE_TIME

List of usage examples for java.time.format DateTimeFormatter ISO_OFFSET_DATE_TIME

Introduction

In this page you can find the example usage for java.time.format DateTimeFormatter ISO_OFFSET_DATE_TIME.

Prototype

DateTimeFormatter ISO_OFFSET_DATE_TIME

To view the source code for java.time.format DateTimeFormatter ISO_OFFSET_DATE_TIME.

Click Source Link

Document

The ISO date-time formatter that formats or parses a date-time with an offset, such as '2011-12-03T10:15:30+01:00'.

Usage

From source file:com.mgmtp.perfload.perfalyzer.util.TestMetadata.java

public static TestMetadata create(final String rawResultsDir, final Properties properties) {
    ZonedDateTime start = ZonedDateTime.parse(properties.getProperty("test.start"),
            DateTimeFormatter.ISO_OFFSET_DATE_TIME);
    ZonedDateTime end = ZonedDateTime.parse(properties.getProperty("test.finish"),
            DateTimeFormatter.ISO_OFFSET_DATE_TIME);
    String duration = DurationFormatUtils.formatDurationHMS(Duration.between(start, end).toMillis());

    String operationsString = properties.getProperty("operations");
    Set<String> operations = newTreeSet(on(',').trimResults().split(operationsString));
    return new TestMetadata(start, end, duration, properties.getProperty("test.file"), rawResultsDir,
            properties.getProperty("perfload.implementation.version"), properties.getProperty("test.comment"),
            operations);/*  w  w w .  j  av  a  2s .co m*/
}

From source file:com.mgmtp.perfload.perfalyzer.reporting.OverviewItem.java

public OverviewItem(final TestMetadata testMetadata, final ResourceBundle resourceBundle, final Locale locale) {
    this.testMetadata = testMetadata;
    this.resourceBundle = resourceBundle;
    this.dateTimeFormatter = DateTimeFormatter.ISO_OFFSET_DATE_TIME.withLocale(locale);
}

From source file:com.mgmtp.perfload.perfalyzer.reporting.email.EmailSkeleton.java

public EmailSkeleton(final TestMetadata testMetadata, final ResourceBundle resourceBundle, final Locale locale,
        final List<? extends List<String>> data, final Map<String, List<? extends List<String>>> comparisonData,
        final String linkToReport) {
    this.testMetadata = testMetadata;
    this.resourceBundle = resourceBundle;
    this.data = data;
    this.comparisonData = comparisonData;
    this.linkToReport = linkToReport;
    this.dateTimeFormatter = DateTimeFormatter.ISO_OFFSET_DATE_TIME.withLocale(locale);
}

From source file:am.ik.categolj3.api.CategoLJ3ApiConfig.java

@Bean
Gson gson() {/*from   w  w w . j av a  2  s  .  c om*/
    return new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssX").registerTypeAdapter(OffsetDateTime.class,
            (JsonDeserializer<OffsetDateTime>) (json, type, context) -> DateTimeFormatter.ISO_OFFSET_DATE_TIME
                    .parse(json.getAsString(), OffsetDateTime::from))
            .registerTypeAdapter(OffsetDateTime.class,
                    (JsonSerializer<OffsetDateTime>) (json, type,
                            context) -> new JsonPrimitive(DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(json)))
            .create();
}

From source file:lumbermill.api.Codecs.java

private static JsonEvent raw(ByteString raw) {
    ObjectNode objectNode = objectMapper.createObjectNode().put("message", raw.utf8()).put("@timestamp",
            now().format(DateTimeFormatter.ISO_OFFSET_DATE_TIME));
    return new JsonEvent(objectNode);
}

From source file:fi.hsl.parkandride.itest.PredictionITest.java

@Test
public void prediction_JSON_structure() {
    Utilization u = makeDummyPredictions();

    JsonPath json = when().get(UrlSchema.FACILITY_PREDICTION, facilityId).jsonPath();
    long facilityId = json.getLong("[0].facilityId");
    String capacityType = json.getString("[0].capacityType");
    String usage = json.getString("[0].usage");
    OffsetDateTime timestamp = OffsetDateTime.parse(json.getString("[0].timestamp"),
            DateTimeFormatter.ISO_OFFSET_DATE_TIME);
    int spacesAvailable = json.getInt("[0].spacesAvailable");

    assertThat(facilityId).as("facilityId").isEqualTo(u.facilityId);
    assertThat(capacityType).as("capacityType").isEqualTo(u.capacityType.name());
    assertThat(usage).as("usage").isEqualTo(u.usage.name());
    assertThat(timestamp.getOffset()).as("time should be in local timezone")
            .isEqualTo(ZoneOffset.systemDefault().getRules().getOffset(timestamp.toInstant()));
    assertThat(spacesAvailable).as("spacesAvailable").isEqualTo(u.spacesAvailable);
}

From source file:org.elasticsearch.multi_node.RollupIT.java

public void testBigRollup() throws Exception {
    final int numDocs = 200;
    String dateFormat = "strict_date_optional_time";

    // create the test-index index
    try (XContentBuilder builder = jsonBuilder()) {
        builder.startObject();//  w w  w . j  a  v  a 2 s . co  m
        {
            builder.startObject("mappings").startObject("_doc").startObject("properties")
                    .startObject("timestamp").field("type", "date").field("format", dateFormat).endObject()
                    .startObject("value").field("type", "integer").endObject().endObject().endObject()
                    .endObject();
        }
        builder.endObject();
        final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
        Request req = new Request("PUT", "rollup-docs");
        req.setEntity(entity);
        client().performRequest(req);
    }

    // index documents for the rollup job
    final StringBuilder bulk = new StringBuilder();
    for (int i = 0; i < numDocs; i++) {
        bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n");
        ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochSecond(1531221196 + (60 * i)),
                ZoneId.of("UTC"));
        String date = zdt.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
        bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n");
    }
    bulk.append("\r\n");

    final Request bulkRequest = new Request("POST", "/_bulk");
    bulkRequest.addParameter("refresh", "true");
    bulkRequest.setJsonEntity(bulk.toString());
    client().performRequest(bulkRequest);

    // create the rollup job
    final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test");
    int pageSize = randomIntBetween(2, 50);
    createRollupJobRequest.setJsonEntity("{" + "\"index_pattern\":\"rollup-*\","
            + "\"rollup_index\":\"results-rollup\"," + "\"cron\":\"*/1 * * * * ?\"," // fast cron so test runs quickly
            + "\"page_size\":" + pageSize + "," + "\"groups\":{" + "    \"date_histogram\":{"
            + "        \"field\":\"timestamp\"," + "        \"interval\":\"5m\"" + "      }" + "},"
            + "\"metrics\":[" + "    {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}" + "]" + "}");

    Map<String, Object> createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest));
    assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE));

    // start the rollup job
    final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start");
    Map<String, Object> startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest));
    assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE));

    assertRollUpJob("rollup-job-test");

    // Wait for the job to finish, by watching how many rollup docs we've indexed
    assertBusy(() -> {
        final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/rollup-job-test");
        Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
        assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));

        Map<String, Object> job = getJob(getRollupJobResponse, "rollup-job-test");
        if (job != null) {
            assertThat(ObjectPath.eval("status.job_state", job), equalTo("started"));
            assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41));
        }
    }, 30L, TimeUnit.SECONDS);

    // Refresh the rollup index to make sure all newly indexed docs are searchable
    final Request refreshRollupIndex = new Request("POST", "results-rollup/_refresh");
    toMap(client().performRequest(refreshRollupIndex));

    String jsonRequestBody = "{\n" + "  \"size\": 0,\n" + "  \"query\": {\n" + "    \"match_all\": {}\n"
            + "  },\n" + "  \"aggs\": {\n" + "    \"date_histo\": {\n" + "      \"date_histogram\": {\n"
            + "        \"field\": \"timestamp\",\n" + "        \"interval\": \"1h\",\n"
            + "        \"format\": \"date_time\"\n" + "      },\n" + "      \"aggs\": {\n"
            + "        \"the_max\": {\n" + "          \"max\": {\n" + "            \"field\": \"value\"\n"
            + "          }\n" + "        }\n" + "      }\n" + "    }\n" + "  }\n" + "}";

    Request request = new Request("GET", "rollup-docs/_search");
    request.setJsonEntity(jsonRequestBody);
    Response liveResponse = client().performRequest(request);
    Map<String, Object> liveBody = toMap(liveResponse);

    request = new Request("GET", "results-rollup/_rollup_search");
    request.setJsonEntity(jsonRequestBody);
    Response rollupResponse = client().performRequest(request);
    Map<String, Object> rollupBody = toMap(rollupResponse);

    // Do the live agg results match the rollup agg results?
    assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", rollupBody)));

    request = new Request("GET", "rollup-docs/_rollup_search");
    request.setJsonEntity(jsonRequestBody);
    Response liveRollupResponse = client().performRequest(request);
    Map<String, Object> liveRollupBody = toMap(liveRollupResponse);

    // Does searching the live index via rollup_search work match the live search?
    assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", liveRollupBody)));

}

From source file:lumbermill.internal.influxdb.InfluxDBClient.java

/**
 * Creates  Points based on the event and config
 *///from  w  ww.  jav a2  s .  co  m
private static Observable<Point> buildPoint(MapWrap config, StringTemplate measurementTemplate,
        JsonEvent jsonEvent) {

    final MapWrap fieldsConfig = MapWrap.of(config.getObject("fields"));
    final List<String> excludeTags = config.getObject("excludeTags", DEFAULT_EXCLUDED_TAGS);

    // One field is required, otherwise the point will not be created
    boolean addedAtLeastOneField = false;
    Optional<String> measurementOptional = measurementTemplate.format(jsonEvent);
    if (!measurementOptional.isPresent()) {
        LOGGER.debug("Failed to extract measurement using {}, not points will be created",
                measurementTemplate.original());
        return Observable.empty();
    }
    Point.Builder pointBuilder = Point.measurement(measurementOptional.get());

    for (Object entry1 : fieldsConfig.toMap().entrySet()) {
        Map.Entry<String, String> entry = (Map.Entry) entry1;
        StringTemplate fieldName = StringTemplate.compile(entry.getKey());
        String valueField = entry.getValue();

        JsonNode node = jsonEvent.unsafe().get(valueField);
        if (node == null) {
            LOGGER.debug("Failed to extract any field for {}", valueField);
            continue;
        }

        Optional<String> formattedFieldNameOptional = fieldName.format(jsonEvent);
        if (!formattedFieldNameOptional.isPresent()) {
            LOGGER.debug("Failed to extract any field for {}", fieldName.original());
            continue;
        }

        addedAtLeastOneField = true;

        if (node.isNumber()) {
            pointBuilder.addField(formattedFieldNameOptional.get(), node.asDouble());
        } else if (node.isBoolean()) {
            pointBuilder.addField(formattedFieldNameOptional.get(), node.asBoolean());
        } else {
            pointBuilder.addField(formattedFieldNameOptional.get(), node.asText());
        }
    }

    Iterator<String> stringIterator = jsonEvent.unsafe().fieldNames();
    while (stringIterator.hasNext()) {
        String next = stringIterator.next();
        if (!excludeTags.contains(next)) {
            pointBuilder.tag(next, jsonEvent.valueAsString(next));
        }
    }

    Optional<String> timeField = config.exists("time") ? Optional.of(config.asString("time"))
            : Optional.empty();
    TimeUnit precision = config.getObject("precision", TimeUnit.MILLISECONDS);

    // Override @timestamp with a ISO_8601 String or a numerical value
    if (timeField.isPresent() && jsonEvent.has(config.asString("time"))) {

        if (jsonEvent.unsafe().get(timeField.get()).isTextual()) {
            pointBuilder.time(ZonedDateTime
                    .parse(jsonEvent.valueAsString("@timestamp"), DateTimeFormatter.ISO_OFFSET_DATE_TIME)
                    .toInstant().toEpochMilli(), precision);
        } else {
            pointBuilder.time(jsonEvent.asLong(timeField.get()), precision);
        }
    } else {
        // If not overriden, check if timestamp exists and use that
        if (jsonEvent.has("@timestamp")) {
            pointBuilder.time(ZonedDateTime
                    .parse(jsonEvent.valueAsString("@timestamp"), DateTimeFormatter.ISO_OFFSET_DATE_TIME)
                    .toInstant().toEpochMilli(), precision);
        }
    }

    if (!addedAtLeastOneField) {
        LOGGER.debug("Could not create a point since no fields where added");
        return Observable.empty();
    }

    Point point = pointBuilder.build();
    if (LOGGER.isTraceEnabled()) {
        LOGGER.trace("Point to be stored {}", point.toString());
    }
    return Observable.just(point);
}

From source file:de.swm.nis.logicaldecoding.tracktable.TrackTablePublisher.java

private PGobject getTimestamp(DmlEvent event) {
    PGobject timestamp = new PGobject();
    timestamp.setType("timestamp");
    try {//from w w  w  .  j a v a 2 s  .com
        ZonedDateTime time = event.getCommitTime();
        if (time == null) {
            timestamp.setValue("1970-01-01T00:00:00+00:00");
            return timestamp;
        }
        timestamp.setValue(time.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME));
    } catch (SQLException e) {
        log.error("Error while setting Timestamp SQL PGobject type:", e);
    }
    return timestamp;
}

From source file:com.mgmtp.perfload.perfalyzer.reporting.ReportCreator.java

public void createReport(final List<PerfAlyzerFile> files) throws IOException {
    Function<PerfAlyzerFile, String> classifier = perfAlyzerFile -> {
        String marker = perfAlyzerFile.getMarker();
        return marker == null ? "Overall" : marker;
    };//from  w  w w . j  a  v  a 2 s. c  o  m
    Supplier<Map<String, List<PerfAlyzerFile>>> mapFactory = () -> new TreeMap<>(Ordering.explicit(tabNames));

    Map<String, List<PerfAlyzerFile>> filesByMarker = files.stream()
            .collect(Collectors.groupingBy(classifier, mapFactory, toList()));

    Map<String, SortedSetMultimap<String, PerfAlyzerFile>> contentItemFiles = new LinkedHashMap<>();

    for (Entry<String, List<PerfAlyzerFile>> entry : filesByMarker.entrySet()) {
        SortedSetMultimap<String, PerfAlyzerFile> contentItemFilesByMarker = contentItemFiles.computeIfAbsent(
                entry.getKey(),
                s -> TreeMultimap.create(new ItemComparator(reportContentsConfigMap.get("priorities")),
                        Ordering.natural()));

        for (PerfAlyzerFile perfAlyzerFile : entry.getValue()) {
            File file = perfAlyzerFile.getFile();
            String groupKey = removeExtension(file.getPath());
            boolean excluded = false;
            for (Pattern pattern : reportContentsConfigMap.get("exclusions")) {
                Matcher matcher = pattern.matcher(groupKey);
                if (matcher.matches()) {
                    excluded = true;
                    log.debug("Excluded from report: {}", groupKey);
                    break;
                }
            }
            if (!excluded) {
                contentItemFilesByMarker.put(groupKey, perfAlyzerFile);
            }
        }
    }

    // explicitly copy it because it is otherwise filtered from the report in order to only show in the overview
    String loadProfilePlot = new File("console", "[loadprofile].png").getPath();
    copyFile(new File(soureDir, loadProfilePlot), new File(destDir, loadProfilePlot));

    Map<String, List<ContentItem>> tabItems = new LinkedHashMap<>();
    Map<String, QuickJump> quickJumps = new HashMap<>();
    Set<String> tabNames = contentItemFiles.keySet();

    for (Entry<String, SortedSetMultimap<String, PerfAlyzerFile>> tabEntry : contentItemFiles.entrySet()) {
        String tab = tabEntry.getKey();
        SortedSetMultimap<String, PerfAlyzerFile> filesForTab = tabEntry.getValue();

        List<ContentItem> contentItems = tabItems.computeIfAbsent(tab, list -> new ArrayList<>());
        Map<String, String> quickJumpMap = new LinkedHashMap<>();
        quickJumps.put(tab, new QuickJump(tab, quickJumpMap));

        int itemIndex = 0;
        for (Entry<String, Collection<PerfAlyzerFile>> itemEntry : filesForTab.asMap().entrySet()) {
            String title = itemEntry.getKey();
            Collection<PerfAlyzerFile> itemFiles = itemEntry.getValue();

            TableData tableData = null;
            String plotSrc = null;
            for (PerfAlyzerFile file : itemFiles) {
                if ("png".equals(getExtension(file.getFile().getName()))) {
                    plotSrc = file.getFile().getPath();
                    copyFile(new File(soureDir, plotSrc), new File(destDir, plotSrc));
                } else {
                    tableData = createTableData(file.getFile());
                }
            }

            // strip off potential marker
            title = substringBefore(title, "{");

            String[] titleParts = split(title, SystemUtils.FILE_SEPARATOR);
            StringBuilder sb = new StringBuilder(50);
            String separator = " - ";
            sb.append(resourceBundle.getString(titleParts[0]));
            sb.append(separator);
            sb.append(resourceBundle.getString(titleParts[1]));

            List<String> fileNameParts = extractFileNameParts(titleParts[1], true);
            if (titleParts[1].contains("[distribution]")) {
                String operation = fileNameParts.get(1);
                sb.append(separator);
                sb.append(operation);
            } else if ("comparison".equals(titleParts[0])) {
                String operation = fileNameParts.get(1);
                sb.append(separator);
                sb.append(operation);
            } else if (titleParts[1].contains("[gclog]")) {
                if (fileNameParts.size() > 1) {
                    sb.append(separator);
                    sb.append(fileNameParts.get(1));
                }
            }

            title = sb.toString();
            ContentItem item = new ContentItem(tab, itemIndex, title, tableData, plotSrc,
                    resourceBundle.getString("report.topLink"));
            contentItems.add(item);

            quickJumpMap.put(tab + "_" + itemIndex, title);
            itemIndex++;
        }
    }

    NavBar navBar = new NavBar(tabNames, quickJumps);
    String testName = removeExtension(testMetadata.getTestPlanFile());
    OverviewItem overviewItem = new OverviewItem(testMetadata, resourceBundle, locale);
    Content content = new Content(tabItems);

    String perfAlyzerVersion;
    try {
        perfAlyzerVersion = Resources.toString(Resources.getResource("perfAlyzer.version"), Charsets.UTF_8);
    } catch (IOException ex) {
        log.error("Could not read perfAlyzer version from classpath resource 'perfAlyzer.version'", ex);
        perfAlyzerVersion = "";
    }

    String dateTimeString = DateTimeFormatter.ISO_OFFSET_DATE_TIME.withLocale(locale)
            .format(ZonedDateTime.now());
    String createdString = String.format(resourceBundle.getString("footer.created"), perfAlyzerVersion,
            dateTimeString);
    HtmlSkeleton html = new HtmlSkeleton(testName, createdString, navBar, overviewItem, content);
    writeReport(html);
}