Example usage for java.time Instant ofEpochSecond

List of usage examples for java.time Instant ofEpochSecond

Introduction

In this page you can find the example usage for java.time Instant ofEpochSecond.

Prototype

public static Instant ofEpochSecond(long epochSecond) 

Source Link

Document

Obtains an instance of Instant using seconds from the epoch of 1970-01-01T00:00:00Z.

Usage

From source file:org.openmhealth.shim.withings.mapper.WithingsIntradayStepCountDataPointMapper.java

/**
 * Maps an individual list node from the array in the Withings activity measure endpoint response into a {@link
 * StepCount} data point.//  ww  w. ja v a  2 s  . com
 *
 * @param nodeWithSteps activity node from the array "activites" contained in the "body" of the endpoint response
 * @return a {@link DataPoint} object containing a {@link StepCount} measure with the appropriate values from
 * the JSON node parameter, wrapped as an {@link Optional}
 */
private Optional<DataPoint<StepCount>> asDataPoint(JsonNode nodeWithSteps,
        Long startDateTimeInUnixEpochSeconds) {
    Long stepCountValue = asRequiredLong(nodeWithSteps, "steps");
    StepCount.Builder stepCountBuilder = new StepCount.Builder(stepCountValue);

    Optional<Long> duration = asOptionalLong(nodeWithSteps, "duration");
    if (duration.isPresent()) {
        OffsetDateTime offsetDateTime = OffsetDateTime
                .ofInstant(Instant.ofEpochSecond(startDateTimeInUnixEpochSeconds), ZoneId.of("Z"));
        stepCountBuilder.setEffectiveTimeFrame(TimeInterval.ofStartDateTimeAndDuration(offsetDateTime,
                new DurationUnitValue(DurationUnit.SECOND, duration.get())));
    }

    Optional<String> userComment = asOptionalString(nodeWithSteps, "comment");
    if (userComment.isPresent()) {
        stepCountBuilder.setUserNotes(userComment.get());
    }

    StepCount stepCount = stepCountBuilder.build();
    return Optional.of(newDataPoint(stepCount, null, true, null));
}

From source file:com.coinblesk.server.controller.AdminController.java

@RequestMapping(value = "/keys", method = GET)
@ResponseBody//from  ww w .  java  2 s.c  o m
public List<KeysDTO> getAllKeys() {
    NetworkParameters params = appConfig.getNetworkParameters();

    // Pre-calculate balances for each address
    Map<Address, Coin> balances = walletService.getBalanceByAddresses();

    List<Keys> keys = keyService.allKeys();

    // ...and summed for each public key
    Map<Keys, Long> balancesPerKeys = keys.stream()
            .collect(Collectors.toMap(Function.identity(), key -> key.timeLockedAddresses().stream()
                    .map(tla -> tla.toAddress(params)).map(balances::get).mapToLong(Coin::longValue).sum()));

    // Map the Keys entities to DTOs including the containing TimeLockedAddresses
    return keys.stream().map(key -> new KeysDTO(SerializeUtils.bytesToHex(key.clientPublicKey()),
            SerializeUtils.bytesToHex(key.serverPublicKey()), SerializeUtils.bytesToHex(key.serverPrivateKey()),
            Date.from(Instant.ofEpochSecond(key.timeCreated())), key.virtualBalance(), balancesPerKeys.get(key),
            key.virtualBalance() + balancesPerKeys.get(key), key.timeLockedAddresses().stream().map(tla -> {
                Instant createdAt = Instant.ofEpochSecond(tla.getTimeCreated());
                Instant lockedUntil = Instant.ofEpochSecond(tla.getLockTime());
                Coin balance = balances.get(tla.toAddress(params));
                return new TimeLockedAddressDTO(tla.toAddress(params).toString(),
                        "http://" + (params.getClass().equals(TestNet3Params.class) ? "tbtc." : "")
                                + "blockr.io/address/info/" + tla.toAddress(params),
                        Date.from(createdAt), Date.from(lockedUntil), lockedUntil.isAfter(Instant.now()),
                        balance.longValue());
            }).collect(Collectors.toList()))).collect(Collectors.toList());
}

From source file:org.elasticsearch.multi_node.RollupIT.java

public void testBigRollup() throws Exception {
    final int numDocs = 200;
    String dateFormat = "strict_date_optional_time";

    // create the test-index index
    try (XContentBuilder builder = jsonBuilder()) {
        builder.startObject();/*  w  w w . j  ava2s.c  o m*/
        {
            builder.startObject("mappings").startObject("_doc").startObject("properties")
                    .startObject("timestamp").field("type", "date").field("format", dateFormat).endObject()
                    .startObject("value").field("type", "integer").endObject().endObject().endObject()
                    .endObject();
        }
        builder.endObject();
        final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
        Request req = new Request("PUT", "rollup-docs");
        req.setEntity(entity);
        client().performRequest(req);
    }

    // index documents for the rollup job
    final StringBuilder bulk = new StringBuilder();
    for (int i = 0; i < numDocs; i++) {
        bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n");
        ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochSecond(1531221196 + (60 * i)),
                ZoneId.of("UTC"));
        String date = zdt.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
        bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n");
    }
    bulk.append("\r\n");

    final Request bulkRequest = new Request("POST", "/_bulk");
    bulkRequest.addParameter("refresh", "true");
    bulkRequest.setJsonEntity(bulk.toString());
    client().performRequest(bulkRequest);

    // create the rollup job
    final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test");
    int pageSize = randomIntBetween(2, 50);
    createRollupJobRequest.setJsonEntity("{" + "\"index_pattern\":\"rollup-*\","
            + "\"rollup_index\":\"results-rollup\"," + "\"cron\":\"*/1 * * * * ?\"," // fast cron so test runs quickly
            + "\"page_size\":" + pageSize + "," + "\"groups\":{" + "    \"date_histogram\":{"
            + "        \"field\":\"timestamp\"," + "        \"interval\":\"5m\"" + "      }" + "},"
            + "\"metrics\":[" + "    {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}" + "]" + "}");

    Map<String, Object> createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest));
    assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE));

    // start the rollup job
    final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start");
    Map<String, Object> startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest));
    assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE));

    assertRollUpJob("rollup-job-test");

    // Wait for the job to finish, by watching how many rollup docs we've indexed
    assertBusy(() -> {
        final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/rollup-job-test");
        Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
        assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));

        Map<String, Object> job = getJob(getRollupJobResponse, "rollup-job-test");
        if (job != null) {
            assertThat(ObjectPath.eval("status.job_state", job), equalTo("started"));
            assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41));
        }
    }, 30L, TimeUnit.SECONDS);

    // Refresh the rollup index to make sure all newly indexed docs are searchable
    final Request refreshRollupIndex = new Request("POST", "results-rollup/_refresh");
    toMap(client().performRequest(refreshRollupIndex));

    String jsonRequestBody = "{\n" + "  \"size\": 0,\n" + "  \"query\": {\n" + "    \"match_all\": {}\n"
            + "  },\n" + "  \"aggs\": {\n" + "    \"date_histo\": {\n" + "      \"date_histogram\": {\n"
            + "        \"field\": \"timestamp\",\n" + "        \"interval\": \"1h\",\n"
            + "        \"format\": \"date_time\"\n" + "      },\n" + "      \"aggs\": {\n"
            + "        \"the_max\": {\n" + "          \"max\": {\n" + "            \"field\": \"value\"\n"
            + "          }\n" + "        }\n" + "      }\n" + "    }\n" + "  }\n" + "}";

    Request request = new Request("GET", "rollup-docs/_search");
    request.setJsonEntity(jsonRequestBody);
    Response liveResponse = client().performRequest(request);
    Map<String, Object> liveBody = toMap(liveResponse);

    request = new Request("GET", "results-rollup/_rollup_search");
    request.setJsonEntity(jsonRequestBody);
    Response rollupResponse = client().performRequest(request);
    Map<String, Object> rollupBody = toMap(rollupResponse);

    // Do the live agg results match the rollup agg results?
    assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", rollupBody)));

    request = new Request("GET", "rollup-docs/_rollup_search");
    request.setJsonEntity(jsonRequestBody);
    Response liveRollupResponse = client().performRequest(request);
    Map<String, Object> liveRollupBody = toMap(liveRollupResponse);

    // Does searching the live index via rollup_search work match the live search?
    assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", liveRollupBody)));

}

From source file:org.openmhealth.shim.withings.mapper.WithingsIntradayCaloriesBurnedDataPointMapper.java

/**
 * Maps an individual list node from the array in the Withings activity measure endpoint response into a {@link
 * CaloriesBurned} data point.//from   www. j a  v  a 2  s.  c  o  m
 *
 * @param nodeWithCalorie activity node from the array "activites" contained in the "body" of the endpoint response
 * that has a calories field
 * @return a {@link DataPoint} object containing a {@link CaloriesBurned} measure with the appropriate values from
 * the JSON node parameter, wrapped as an {@link Optional}
 */
private Optional<DataPoint<CaloriesBurned>> asDataPoint(JsonNode nodeWithCalorie,
        Long startDateTimeInUnixEpochSeconds) {

    Long caloriesBurnedValue = asRequiredLong(nodeWithCalorie, "calories");
    CaloriesBurned.Builder caloriesBurnedBuilder = new CaloriesBurned.Builder(
            new KcalUnitValue(KcalUnit.KILOCALORIE, caloriesBurnedValue));

    Optional<Long> duration = asOptionalLong(nodeWithCalorie, "duration");
    if (duration.isPresent()) {
        OffsetDateTime offsetDateTime = OffsetDateTime
                .ofInstant(Instant.ofEpochSecond(startDateTimeInUnixEpochSeconds), ZoneId.of("Z"));
        caloriesBurnedBuilder.setEffectiveTimeFrame(TimeInterval.ofStartDateTimeAndDuration(offsetDateTime,
                new DurationUnitValue(DurationUnit.SECOND, duration.get())));
    }

    Optional<String> userComment = asOptionalString(nodeWithCalorie, "comment");
    if (userComment.isPresent()) {
        caloriesBurnedBuilder.setUserNotes(userComment.get());
    }

    CaloriesBurned calorieBurned = caloriesBurnedBuilder.build();
    return Optional.of(newDataPoint(calorieBurned, null, true, null));

}

From source file:org.createnet.raptor.db.mapdb.MapDBConnection.java

@Override
public JsonNode get(String id) {

    Record r = getRecord(id);//ww w. java  2 s  .  c  om

    if (r == null) {
        return null;
    }

    if (r.ttl > 0 && Instant.ofEpochSecond(r.ttl).isBefore(Instant.now())) {
        delete(id);
        return null;
    }

    return r.content;
}

From source file:com.hubrick.vertx.s3.client.AbstractS3ClientTest.java

@Before
public void setUp() throws Exception {
    final S3ClientOptions clientOptions = new S3ClientOptions();
    clientOptions.setDefaultHost(HOSTNAME);
    clientOptions.setDefaultPort(MOCKSERVER_PORT);
    clientOptions.setMaxPoolSize(10);// w  ww .j av a 2 s . c o m
    clientOptions.setAwsRegion(S3TestCredentials.REGION);
    clientOptions.setAwsServiceName(S3TestCredentials.SERVICE_NAME);
    clientOptions.setHostnameOverride(HOSTNAME);

    augmentClientOptions(clientOptions);

    s3Client = new S3Client(vertx, clientOptions,
            Clock.fixed(Instant.ofEpochSecond(1478782934), ZoneId.of("UTC")));

}

From source file:org.openmhealth.shim.jawbone.mapper.JawboneDataPointMapper.java

/**
 * @param listEntryNode an individual entry node from the "items" array of a Jawbone endpoint response
 * @return a {@link DataPointHeader} for containing the appropriate information based on the input parameters
 *///from   w  w w  . ja va  2s .c  om
protected DataPointHeader getHeader(JsonNode listEntryNode, T measure) {

    DataPointAcquisitionProvenance.Builder provenanceBuilder = new DataPointAcquisitionProvenance.Builder(
            RESOURCE_API_SOURCE_NAME);

    if (isSensed(listEntryNode)) {
        provenanceBuilder.setModality(SENSED);
    }

    DataPointAcquisitionProvenance acquisitionProvenance = provenanceBuilder.build();

    asOptionalString(listEntryNode, "xid")
            .ifPresent(externalId -> acquisitionProvenance.setAdditionalProperty("external_id", externalId));
    // TODO discuss the name of the external identifier, to make it clear it's the ID used by the source

    asOptionalLong(listEntryNode, "time_updated").ifPresent(
            sourceUpdatedDateTime -> acquisitionProvenance.setAdditionalProperty("source_updated_date_time",
                    OffsetDateTime.ofInstant(Instant.ofEpochSecond(sourceUpdatedDateTime), ZoneId.of("Z"))));

    DataPointHeader header = new DataPointHeader.Builder(UUID.randomUUID().toString(), measure.getSchemaId())
            .setAcquisitionProvenance(acquisitionProvenance).build();

    // FIXME "shared" is never documented
    asOptionalBoolean(listEntryNode, "shared")
            .ifPresent(isShared -> header.setAdditionalProperty("shared", isShared));

    return header;
}

From source file:io.werval.modules.jose.JwtPluginTest.java

@Test
public void http() throws InterruptedException {
    String tokenHeaderName = WERVAL.application().config().string(JWT.HTTP_HEADER_CONFIG_KEY);
    JWT jwt = WERVAL.application().plugin(JWT.class);

    // Unauthorized access to authenticated resource
    when().get("/authenticated").then().statusCode(UNAUTHORIZED_CODE);

    // Login//from ww  w. j  a v  a 2  s.  c om
    String token = given().body("{\"email\":\"admin@example.com\",\"password\":\"admin-password\"}")
            .contentType(APPLICATION_JSON).when().post("/login").then().statusCode(OK_CODE)
            .header(tokenHeaderName, notNullValue()).log().all().extract().header(tokenHeaderName);

    // Authenticated access
    given().header(tokenHeaderName, token).when().get("/authenticated").then().statusCode(OK_CODE);

    // Authorized access
    given().header(tokenHeaderName, token).when().get("/authorized").then().statusCode(OK_CODE);

    // Gather time related claims from token
    ZoneId utc = ZoneId.of("UTC");
    Map<String, Object> claims = jwt.claimsOfToken(token);
    ZonedDateTime iat = ZonedDateTime.ofInstant(Instant.ofEpochSecond((Long) claims.get(JWT.CLAIM_ISSUED_AT)),
            utc);
    ZonedDateTime nbf = ZonedDateTime.ofInstant(Instant.ofEpochSecond((Long) claims.get(JWT.CLAIM_NOT_BEFORE)),
            utc);
    ZonedDateTime exp = ZonedDateTime.ofInstant(Instant.ofEpochSecond((Long) claims.get(JWT.CLAIM_EXPIRATION)),
            utc);

    // Wait at least one second before renewal so new dates will be different
    Thread.sleep(1200);

    // Renew token
    String renewed = given().header(tokenHeaderName, token).when().post("/renew").then().statusCode(OK_CODE)
            .header(tokenHeaderName, notNullValue()).log().all().extract().header(tokenHeaderName);

    // Gather time related claims from renewed token
    claims = jwt.claimsOfToken(renewed);
    ZonedDateTime renewedIat = ZonedDateTime
            .ofInstant(Instant.ofEpochSecond((Long) claims.get(JWT.CLAIM_ISSUED_AT)), utc);
    ZonedDateTime renewedNbf = ZonedDateTime
            .ofInstant(Instant.ofEpochSecond((Long) claims.get(JWT.CLAIM_NOT_BEFORE)), utc);
    ZonedDateTime renewedExp = ZonedDateTime
            .ofInstant(Instant.ofEpochSecond((Long) claims.get(JWT.CLAIM_EXPIRATION)), utc);

    // Assert renewed token time related claims are greater than the ones in the original token
    assertTrue(renewedIat.isAfter(iat));
    assertTrue(renewedNbf.isAfter(nbf));
    assertTrue(renewedExp.isAfter(exp));
}

From source file:de.qaware.chronix.solr.ingestion.format.OpenTsdbTelnetFormatParser.java

/**
 * Extracts the metric timestamp from the parts.
 *
 * @param parts Parts./* w ww  .j a  v  a  2s .c o m*/
 * @return Metric timestamp.
 * @throws FormatParseException If something went wrong while extracting.
 */
private Instant getMetricTimestamp(String[] parts) throws FormatParseException {
    String value = parts[2];
    try {
        if (value.length() != 10 && value.length() != 13) {
            throw new FormatParseException(
                    "Expected a timestamp length of 10 or 13, found " + value.length() + " ('" + value + "')");
        }

        // 10 digits means seconds, 13 digits mean milliseconds
        boolean secondResolution = value.length() == 10;

        long epochTime = Long.parseLong(value);
        return secondResolution ? Instant.ofEpochSecond(epochTime) : Instant.ofEpochMilli(epochTime);
    } catch (NumberFormatException e) {
        throw new FormatParseException("Can't convert '" + value + "' to long", e);
    }
}

From source file:org.openmhealth.shim.ihealth.mapper.IHealthDataPointMapper.java

/**
 * This method transforms a timestamp from an iHealth response (which is in the form of local time as epoch
 * seconds) into an {@link OffsetDateTime} with the correct date/time and offset. The timestamps provided in
 * iHealth responses are not unix epoch seconds in UTC but instead a unix epoch seconds value that is offset by the
 * time zone of the data point./*from  ww  w.j  a v a  2s.c  o m*/
 */
protected static OffsetDateTime getDateTimeWithCorrectOffset(Long localTimeAsEpochSeconds,
        ZoneOffset zoneOffset) {

    /*
    iHealth provides the local time of a measurement as if it had occurred in UTC, along with the timezone
    offset where the measurement occurred. To retrieve the correct OffsetDateTime, we must retain the local
    date/time value, but replace the timezone offset.
    */
    return OffsetDateTime.ofInstant(Instant.ofEpochSecond(localTimeAsEpochSeconds), ZoneOffset.UTC)
            .withOffsetSameLocal(zoneOffset);
}