Example usage for java.time ZoneId of

List of usage examples for java.time ZoneId of

Introduction

In this page you can find the example usage for java.time ZoneId of.

Prototype

public static ZoneId of(String zoneId) 

Source Link

Document

Obtains an instance of ZoneId from an ID ensuring that the ID is valid and available for use.

Usage

From source file:org.codice.ddf.security.session.management.impl.SessionManagementServiceImplTest.java

@Test
public void testGetExpirySoonest() throws IOException, ParserConfigurationException, SAXException {
    sessionManagementServiceImpl.setClock(Clock.fixed(Instant.EPOCH, ZoneId.of("UTC")));
    SecurityToken soonerToken = mock(SecurityToken.class);
    String saml = IOUtils/*from   w w w.  jav a 2  s. c  o  m*/
            .toString(new InputStreamReader(getClass().getClassLoader().getResourceAsStream("saml.xml")));
    saml = saml.replace("2113", "2103");
    when(soonerToken.getToken()).thenReturn(readXml(IOUtils.toInputStream(saml, "UTF-8")).getDocumentElement());
    SecurityToken laterToken = mock(SecurityToken.class);
    saml = IOUtils.toString(new InputStreamReader(getClass().getClassLoader().getResourceAsStream("saml.xml")));
    saml = saml.replace("2113", "2213");
    when(laterToken.getToken()).thenReturn(readXml(IOUtils.toInputStream(saml, "UTF-8")).getDocumentElement());
    when(tokenHolder.getSecurityToken()).thenReturn(soonerToken);
    String expiryString = sessionManagementServiceImpl.getExpiry(request);
    assertThat(expiryString, is("4206816594788"));
}

From source file:org.openmhealth.shim.withings.mapper.WithingsIntradayCaloriesBurnedDataPointMapper.java

/**
 * Maps an individual list node from the array in the Withings activity measure endpoint response into a {@link
 * CaloriesBurned} data point./*  w  w  w.  j  a v  a2  s.  c o m*/
 *
 * @param nodeWithCalorie activity node from the array "activites" contained in the "body" of the endpoint response
 * that has a calories field
 * @return a {@link DataPoint} object containing a {@link CaloriesBurned} measure with the appropriate values from
 * the JSON node parameter, wrapped as an {@link Optional}
 */
private Optional<DataPoint<CaloriesBurned>> asDataPoint(JsonNode nodeWithCalorie,
        Long startDateTimeInUnixEpochSeconds) {

    Long caloriesBurnedValue = asRequiredLong(nodeWithCalorie, "calories");
    CaloriesBurned.Builder caloriesBurnedBuilder = new CaloriesBurned.Builder(
            new KcalUnitValue(KcalUnit.KILOCALORIE, caloriesBurnedValue));

    Optional<Long> duration = asOptionalLong(nodeWithCalorie, "duration");
    if (duration.isPresent()) {
        OffsetDateTime offsetDateTime = OffsetDateTime
                .ofInstant(Instant.ofEpochSecond(startDateTimeInUnixEpochSeconds), ZoneId.of("Z"));
        caloriesBurnedBuilder.setEffectiveTimeFrame(TimeInterval.ofStartDateTimeAndDuration(offsetDateTime,
                new DurationUnitValue(DurationUnit.SECOND, duration.get())));
    }

    Optional<String> userComment = asOptionalString(nodeWithCalorie, "comment");
    if (userComment.isPresent()) {
        caloriesBurnedBuilder.setUserNotes(userComment.get());
    }

    CaloriesBurned calorieBurned = caloriesBurnedBuilder.build();
    return Optional.of(newDataPoint(calorieBurned, null, true, null));

}

From source file:org.elasticsearch.multi_node.RollupIT.java

public void testBigRollup() throws Exception {
    final int numDocs = 200;
    String dateFormat = "strict_date_optional_time";

    // create the test-index index
    try (XContentBuilder builder = jsonBuilder()) {
        builder.startObject();/*from w w  w .  j a va 2 s .com*/
        {
            builder.startObject("mappings").startObject("_doc").startObject("properties")
                    .startObject("timestamp").field("type", "date").field("format", dateFormat).endObject()
                    .startObject("value").field("type", "integer").endObject().endObject().endObject()
                    .endObject();
        }
        builder.endObject();
        final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
        Request req = new Request("PUT", "rollup-docs");
        req.setEntity(entity);
        client().performRequest(req);
    }

    // index documents for the rollup job
    final StringBuilder bulk = new StringBuilder();
    for (int i = 0; i < numDocs; i++) {
        bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n");
        ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochSecond(1531221196 + (60 * i)),
                ZoneId.of("UTC"));
        String date = zdt.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
        bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n");
    }
    bulk.append("\r\n");

    final Request bulkRequest = new Request("POST", "/_bulk");
    bulkRequest.addParameter("refresh", "true");
    bulkRequest.setJsonEntity(bulk.toString());
    client().performRequest(bulkRequest);

    // create the rollup job
    final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test");
    int pageSize = randomIntBetween(2, 50);
    createRollupJobRequest.setJsonEntity("{" + "\"index_pattern\":\"rollup-*\","
            + "\"rollup_index\":\"results-rollup\"," + "\"cron\":\"*/1 * * * * ?\"," // fast cron so test runs quickly
            + "\"page_size\":" + pageSize + "," + "\"groups\":{" + "    \"date_histogram\":{"
            + "        \"field\":\"timestamp\"," + "        \"interval\":\"5m\"" + "      }" + "},"
            + "\"metrics\":[" + "    {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}" + "]" + "}");

    Map<String, Object> createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest));
    assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE));

    // start the rollup job
    final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start");
    Map<String, Object> startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest));
    assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE));

    assertRollUpJob("rollup-job-test");

    // Wait for the job to finish, by watching how many rollup docs we've indexed
    assertBusy(() -> {
        final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/rollup-job-test");
        Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
        assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));

        Map<String, Object> job = getJob(getRollupJobResponse, "rollup-job-test");
        if (job != null) {
            assertThat(ObjectPath.eval("status.job_state", job), equalTo("started"));
            assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41));
        }
    }, 30L, TimeUnit.SECONDS);

    // Refresh the rollup index to make sure all newly indexed docs are searchable
    final Request refreshRollupIndex = new Request("POST", "results-rollup/_refresh");
    toMap(client().performRequest(refreshRollupIndex));

    String jsonRequestBody = "{\n" + "  \"size\": 0,\n" + "  \"query\": {\n" + "    \"match_all\": {}\n"
            + "  },\n" + "  \"aggs\": {\n" + "    \"date_histo\": {\n" + "      \"date_histogram\": {\n"
            + "        \"field\": \"timestamp\",\n" + "        \"interval\": \"1h\",\n"
            + "        \"format\": \"date_time\"\n" + "      },\n" + "      \"aggs\": {\n"
            + "        \"the_max\": {\n" + "          \"max\": {\n" + "            \"field\": \"value\"\n"
            + "          }\n" + "        }\n" + "      }\n" + "    }\n" + "  }\n" + "}";

    Request request = new Request("GET", "rollup-docs/_search");
    request.setJsonEntity(jsonRequestBody);
    Response liveResponse = client().performRequest(request);
    Map<String, Object> liveBody = toMap(liveResponse);

    request = new Request("GET", "results-rollup/_rollup_search");
    request.setJsonEntity(jsonRequestBody);
    Response rollupResponse = client().performRequest(request);
    Map<String, Object> rollupBody = toMap(rollupResponse);

    // Do the live agg results match the rollup agg results?
    assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", rollupBody)));

    request = new Request("GET", "rollup-docs/_rollup_search");
    request.setJsonEntity(jsonRequestBody);
    Response liveRollupResponse = client().performRequest(request);
    Map<String, Object> liveRollupBody = toMap(liveRollupResponse);

    // Does searching the live index via rollup_search work match the live search?
    assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", liveRollupBody)));

}

From source file:io.stallion.jobs.Schedule.java

/**
 * Gets the next datetime matching the schedule, passing in the current
 * date from which to look. Used for testing.
 *
 * @param startingFrom//  w  w w  .  j  av a 2  s.c  o m
 * @return
 */
public ZonedDateTime nextAt(ZonedDateTime startingFrom) {
    if (!startingFrom.getZone().equals(getZoneId())) {
        startingFrom = startingFrom.withZoneSameInstant(getZoneId());
    }
    ZonedDateTime dt = new NextDateTimeFinder(startingFrom).find();
    return dt.withZoneSameInstant(ZoneId.of("UTC"));
}

From source file:org.codice.ddf.security.servlet.expiry.SessionManagementServiceTest.java

@Test
public void testGetExpirySoonest() throws IOException, ParserConfigurationException, SAXException {
    sessionManagementService.setClock(Clock.fixed(Instant.EPOCH, ZoneId.of("UTC")));
    SecurityToken soonerToken = mock(SecurityToken.class);
    String saml = IOUtils/*from w  w  w.ja v a  2  s. c  o  m*/
            .toString(new InputStreamReader(getClass().getClassLoader().getResourceAsStream("saml.xml")));
    saml = saml.replace("2113", "2103");
    when(soonerToken.getToken()).thenReturn(readXml(IOUtils.toInputStream(saml, "UTF-8")).getDocumentElement());
    SecurityToken laterToken = mock(SecurityToken.class);
    saml = IOUtils.toString(new InputStreamReader(getClass().getClassLoader().getResourceAsStream("saml.xml")));
    saml = saml.replace("2113", "2213");
    when(laterToken.getToken()).thenReturn(readXml(IOUtils.toInputStream(saml, "UTF-8")).getDocumentElement());
    HashMap<String, SecurityToken> tokenMap = new HashMap<>();
    tokenMap.put("jaas", laterToken);
    tokenMap.put("idp", token);
    tokenMap.put("karaf", soonerToken);
    when(tokenHolder.getRealmTokenMap()).thenReturn(tokenMap);
    Response expiry = sessionManagementService.getExpiry(request);
    assertThat(expiry.getStatus(), is(200));
    assertThat(IOUtils.toString(new InputStreamReader((ByteArrayInputStream) expiry.getEntity())),
            is("4206816594788"));
}

From source file:org.apache.james.mailbox.elasticsearch.json.MessageToElasticSearchJsonTest.java

@Test
public void spamEmailShouldBeWellConvertedToJson() throws IOException {
    MessageToElasticSearchJson messageToElasticSearchJson = new MessageToElasticSearchJson(
            new DefaultTextExtractor(), ZoneId.of("Europe/Paris"), IndexAttachments.YES);
    MailboxMessage spamMail = new SimpleMailboxMessage(MESSAGE_ID, date, SIZE, BODY_START_OCTET,
            new SharedByteArrayInputStream(
                    IOUtils.toByteArray(ClassLoader.getSystemResourceAsStream("eml/spamMail.eml"))),
            new Flags(), propertyBuilder, MAILBOX_ID);
    spamMail.setUid(UID);/*www.  j  a  v a  2  s  . c o  m*/
    spamMail.setModSeq(MOD_SEQ);
    assertThatJson(messageToElasticSearchJson.convertToJson(spamMail,
            ImmutableList.of(new MockMailboxSession("username").getUser()))).when(IGNORING_ARRAY_ORDER)
                    .isEqualTo(IOUtils.toString(ClassLoader.getSystemResource("eml/spamMail.json"), CHARSET));
}

From source file:com.epam.catgenome.controller.util.MultipartFileSender.java

public void serveResource() throws IOException {
    if (response == null || request == null) {
        return;/*from   www  .  j av  a  2s  .c o  m*/
    }

    if (!Files.exists(filepath)) {
        logger.error("File doesn't exist at URI : {}", filepath.toAbsolutePath().toString());
        response.sendError(HttpServletResponse.SC_NOT_FOUND);
        return;
    }

    Long length = Files.size(filepath);
    String fileName = filepath.getFileName().toString();
    FileTime lastModifiedObj = Files.getLastModifiedTime(filepath);

    if (StringUtils.isEmpty(fileName) || lastModifiedObj == null) {
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
        return;
    }
    long lastModified = LocalDateTime
            .ofInstant(lastModifiedObj.toInstant(), ZoneId.of(ZoneOffset.systemDefault().getId()))
            .toEpochSecond(ZoneOffset.UTC);
    //String contentType = MimeTypeUtils.probeContentType(filepath);
    String contentType = null;

    // Validate request headers for caching ---------------------------------------------------
    if (!validateHeadersCaching(fileName, lastModified)) {
        return;
    }

    // Validate request headers for resume ----------------------------------------------------
    if (!validateHeadersResume(fileName, lastModified)) {
        return;
    }

    // Validate and process range -------------------------------------------------------------
    Range full = new Range(0, length - 1, length);
    List<Range> ranges = processRange(length, fileName, full);
    if (ranges == null) {
        return;
    }

    // Prepare and initialize response --------------------------------------------------------

    // Get content type by file name and set content disposition.
    String disposition = "inline";

    // If content type is unknown, then set the default value.
    // For all content types, see: http://www.w3schools.com/media/media_mimeref.asp
    // To add new content types, add new mime-mapping entry in web.xml.
    if (contentType == null) {
        contentType = "application/octet-stream";
    } else if (!contentType.startsWith("image")) {
        // Else, expect for images, determine content disposition. If content type is supported by
        // the browser, then set to inline, else attachment which will pop a 'save as' dialogue.
        String accept = request.getHeader("Accept");
        disposition = accept != null && HttpUtils.accepts(accept, contentType) ? "inline" : "attachment";
    }
    logger.debug("Content-Type : {}", contentType);
    // Initialize response.
    response.reset();
    response.setBufferSize(DEFAULT_BUFFER_SIZE);
    response.setHeader("Content-Type", contentType);
    response.setHeader("Content-Disposition", disposition + ";filename=\"" + fileName + "\"");
    logger.debug("Content-Disposition : {}", disposition);
    response.setHeader("Accept-Ranges", "bytes");
    response.setHeader("ETag", fileName);
    response.setDateHeader("Last-Modified", lastModified);
    response.setDateHeader("Expires", System.currentTimeMillis() + DEFAULT_EXPIRE_TIME);

    // Send requested file (part(s)) to client ------------------------------------------------

    // Prepare streams.
    try (InputStream input = new BufferedInputStream(Files.newInputStream(filepath));
            OutputStream output = response.getOutputStream()) {

        if (ranges.isEmpty() || ranges.get(0) == full) {

            // Return full file.
            logger.info("Return full file");
            response.setContentType(contentType);
            response.setHeader(CONTENT_RANGE_HEADER, "bytes " + full.start + "-" + full.end + "/" + full.total);
            response.setHeader("Content-Length", String.valueOf(full.length));
            Range.copy(input, output, length, full.start, full.length);

        } else if (ranges.size() == 1) {

            // Return single part of file.
            Range r = ranges.get(0);
            logger.info("Return 1 part of file : from ({}) to ({})", r.start, r.end);
            response.setContentType(contentType);
            response.setHeader(CONTENT_RANGE_HEADER, "bytes " + r.start + "-" + r.end + "/" + r.total);
            response.setHeader("Content-Length", String.valueOf(r.length));
            response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // 206.

            // Copy single part range.
            Range.copy(input, output, length, r.start, r.length);

        } else {

            // Return multiple parts of file.
            response.setContentType("multipart/byteranges; boundary=" + MULTIPART_BOUNDARY);
            response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // 206.

            // Cast back to ServletOutputStream to get the easy println methods.
            ServletOutputStream sos = (ServletOutputStream) output;

            // Copy multi part range.
            for (Range r : ranges) {
                logger.info("Return multi part of file : from ({}) to ({})", r.start, r.end);
                // Add multipart boundary and header fields for every range.
                sos.println();
                sos.println("--" + MULTIPART_BOUNDARY);
                sos.println("Content-Type: " + contentType);
                sos.println("Content-Range: bytes " + r.start + "-" + r.end + "/" + r.total);

                // Copy single part range of multi part range.
                Range.copy(input, output, length, r.start, r.length);
            }

            // End with multipart boundary.
            sos.println();
            sos.println("--" + MULTIPART_BOUNDARY + "--");
        }
    }

}

From source file:com.hubrick.vertx.s3.client.AbstractS3ClientTest.java

@Before
public void setUp() throws Exception {
    final S3ClientOptions clientOptions = new S3ClientOptions();
    clientOptions.setDefaultHost(HOSTNAME);
    clientOptions.setDefaultPort(MOCKSERVER_PORT);
    clientOptions.setMaxPoolSize(10);/*  ww w  .j av  a 2s .  c om*/
    clientOptions.setAwsRegion(S3TestCredentials.REGION);
    clientOptions.setAwsServiceName(S3TestCredentials.SERVICE_NAME);
    clientOptions.setHostnameOverride(HOSTNAME);

    augmentClientOptions(clientOptions);

    s3Client = new S3Client(vertx, clientOptions,
            Clock.fixed(Instant.ofEpochSecond(1478782934), ZoneId.of("UTC")));

}

From source file:org.openmhealth.shim.fitbit.FitbitShim.java

@Override
public ShimDataResponse getData(ShimDataRequest shimDataRequest) throws ShimException {

    AccessParameters accessParameters = shimDataRequest.getAccessParameters();
    String accessToken = accessParameters.getAccessToken();
    String tokenSecret = accessParameters.getTokenSecret();

    FitbitDataType fitbitDataType;/*from   ww  w .ja v a  2 s .  com*/
    try {
        fitbitDataType = FitbitDataType.valueOf(shimDataRequest.getDataTypeKey().trim().toUpperCase());
    } catch (NullPointerException | IllegalArgumentException e) {
        throw new ShimException("Null or Invalid data type parameter: " + shimDataRequest.getDataTypeKey()
                + " in shimDataRequest, cannot retrieve data.");
    }

    /***
     * Setup default date parameters
     */
    OffsetDateTime today = LocalDate.now().atStartOfDay(ZoneId.of("Z")).toOffsetDateTime();

    OffsetDateTime startDate = shimDataRequest.getStartDateTime() == null ? today.minusDays(1)
            : shimDataRequest.getStartDateTime();

    OffsetDateTime endDate = shimDataRequest.getEndDateTime() == null ? today.plusDays(1)
            : shimDataRequest.getEndDateTime();

    OffsetDateTime currentDate = startDate;

    if (fitbitDataType.equals(FitbitDataType.WEIGHT)) {
        return getRangeData(startDate, endDate, fitbitDataType, shimDataRequest.getNormalize(), accessToken,
                tokenSecret);
    } else {
        /**
         * Fitbit's API limits you to making a request for each given day
         * of data. Thus we make a request for each day in the submitted time
         * range and then aggregate the response based on the normalization parameter.
         */
        List<ShimDataResponse> dayResponses = new ArrayList<>();

        while (currentDate.toLocalDate().isBefore(endDate.toLocalDate())
                || currentDate.toLocalDate().isEqual(endDate.toLocalDate())) {

            dayResponses.add(getDaysData(currentDate, fitbitDataType, shimDataRequest.getNormalize(),
                    accessToken, tokenSecret));
            currentDate = currentDate.plusDays(1);
        }

        ShimDataResponse shimDataResponse = shimDataRequest.getNormalize() ? aggregateNormalized(dayResponses)
                : aggregateIntoList(dayResponses);

        return shimDataResponse;
    }
}

From source file:it.tidalwave.northernwind.frontend.media.impl.DefaultMetadataCacheTest.java

/*******************************************************************************************************************
 *
 ******************************************************************************************************************/
@BeforeMethod/*from  w w w  .  j  av  a2  s  .co m*/
public void setup() throws Exception {
    context = helper.createSpringContext();
    underTest = context.getBean(DefaultMetadataCache.class);
    underTest.setClock(() -> mockClock);
    metadataLoader = context.getBean(MetadataLoader.class);
    mediaFile = mock(ResourceFile.class);
    tiff = new TIFF();
    exif = new EXIF();
    iptc = new IPTC();
    xmp = new XMP();
    image = new ImageTestBuilder().withTiff(tiff).withExif(exif).withIptc(iptc).withXmp(xmp).build();
    siteNodeProperties = mock(ResourceProperties.class);
    mediaId = new Id("mediaId");

    when(metadataLoader.findMediaResourceFile(same(siteNodeProperties), eq(mediaId))).thenReturn(mediaFile);

    // Don't use 'thenReturn(new DefaultMetadata(image))' as a new instance must be created each time
    when(metadataLoader.loadMetadata(same(mediaFile))).thenAnswer(new Answer<DefaultMetadata>() {
        @Override
        public DefaultMetadata answer(final @Nonnull InvocationOnMock invocation) {
            return new DefaultMetadata("media.jpg", image);
        }
    });

    assertThat(underTest.getMedatataExpirationTime(),
            is(DefaultMetadataCache.DEFAULT_METADATA_EXPIRATION_TIME));
    initialTime = Instant.ofEpochMilli(1369080000000L).atZone(ZoneId.of("GMT"));
    setTime(initialTime);
}