Example usage for java.time ZoneOffset UTC

List of usage examples for java.time ZoneOffset UTC

Introduction

In this page you can find the example usage for java.time ZoneOffset UTC.

Prototype

ZoneOffset UTC

To view the source code for java.time ZoneOffset UTC.

Click Source Link

Document

The time-zone offset for UTC, with an ID of 'Z'.

Usage

From source file:org.wso2.carbon.apimgt.core.impl.APIPublisherImpl.java

/**
 * Adds a new API to the system/*w  w w. j  a v a  2  s  .c om*/
 *
 * @param apiBuilder API model object
 * @return UUID of the added API.
 * @throws APIManagementException if failed to add API
 */
@Override
public String addAPI(API.APIBuilder apiBuilder) throws APIManagementException {

    API createdAPI;
    APIGateway gateway = getApiGateway();

    apiBuilder.provider(getUsername());
    if (StringUtils.isEmpty(apiBuilder.getId())) {
        apiBuilder.id(UUID.randomUUID().toString());
    }

    LocalDateTime localDateTime = LocalDateTime.now();
    apiBuilder.createdTime(localDateTime);
    apiBuilder.lastUpdatedTime(localDateTime);
    apiBuilder.createdBy(getUsername());
    apiBuilder.updatedBy(getUsername());
    if (apiBuilder.getLabels().isEmpty()) {
        Set<String> labelSet = new HashSet<>();
        labelSet.add(APIMgtConstants.DEFAULT_LABEL_NAME);
        apiBuilder.labels(labelSet);
    }
    Map<String, Endpoint> apiEndpointMap = apiBuilder.getEndpoint();
    validateEndpoints(apiEndpointMap, false);
    try {
        if (!isApiNameExist(apiBuilder.getName()) && !isContextExist(apiBuilder.getContext())) {
            LifecycleState lifecycleState = getApiLifecycleManager().addLifecycle(APIMgtConstants.API_LIFECYCLE,
                    getUsername());
            apiBuilder.associateLifecycle(lifecycleState);

            createUriTemplateList(apiBuilder, false);

            List<UriTemplate> list = new ArrayList<>(apiBuilder.getUriTemplates().values());
            List<TemplateBuilderDTO> resourceList = new ArrayList<>();

            validateApiPolicy(apiBuilder.getApiPolicy());
            validateSubscriptionPolicies(apiBuilder);
            for (UriTemplate uriTemplate : list) {
                TemplateBuilderDTO dto = new TemplateBuilderDTO();
                dto.setTemplateId(uriTemplate.getTemplateId());
                dto.setUriTemplate(uriTemplate.getUriTemplate());
                dto.setHttpVerb(uriTemplate.getHttpVerb());
                Map<String, Endpoint> map = uriTemplate.getEndpoint();
                if (map.containsKey(APIMgtConstants.PRODUCTION_ENDPOINT)) {
                    Endpoint endpoint = map.get(APIMgtConstants.PRODUCTION_ENDPOINT);
                    dto.setProductionEndpoint(endpoint);
                }
                if (map.containsKey(APIMgtConstants.SANDBOX_ENDPOINT)) {
                    Endpoint endpoint = map.get(APIMgtConstants.SANDBOX_ENDPOINT);
                    dto.setSandboxEndpoint(endpoint);
                }
                resourceList.add(dto);
            }
            GatewaySourceGenerator gatewaySourceGenerator = getGatewaySourceGenerator();
            APIConfigContext apiConfigContext = new APIConfigContext(apiBuilder.build(),
                    config.getGatewayPackageName());
            gatewaySourceGenerator.setApiConfigContext(apiConfigContext);
            String gatewayConfig = gatewaySourceGenerator.getConfigStringFromTemplate(resourceList);
            if (log.isDebugEnabled()) {
                log.debug("API " + apiBuilder.getName() + "gateway config: " + gatewayConfig);
            }
            apiBuilder.gatewayConfig(gatewayConfig);

            if (StringUtils.isEmpty(apiBuilder.getApiDefinition())) {
                apiBuilder.apiDefinition(apiDefinitionFromSwagger20.generateSwaggerFromResources(apiBuilder));
            }
            if (!StringUtils.isEmpty(apiBuilder.getApiPermission())) {
                Map<String, Integer> roleNamePermissionList;
                roleNamePermissionList = getAPIPermissionArray(apiBuilder.getApiPermission());
                apiBuilder.permissionMap(roleNamePermissionList);
            }

            createdAPI = apiBuilder.build();
            APIUtils.validate(createdAPI);

            //Add API to gateway
            gateway.addAPI(createdAPI);
            if (log.isDebugEnabled()) {
                log.debug("API : " + apiBuilder.getName() + " has been identifier published to gateway");
            }

            Set<String> apiRoleList;

            //if the API has public visibility, add the API without any role checking
            //if the API has role based visibility, add the API with role checking
            if (API.Visibility.PUBLIC == createdAPI.getVisibility()) {
                getApiDAO().addAPI(createdAPI);
            } else if (API.Visibility.RESTRICTED == createdAPI.getVisibility()) {
                //get all the roles in the system
                Set<String> allAvailableRoles = APIUtils.getAllAvailableRoles();
                //get the roles needed to be associated with the API
                apiRoleList = createdAPI.getVisibleRoles();
                if (APIUtils.checkAllowedRoles(allAvailableRoles, apiRoleList)) {
                    getApiDAO().addAPI(createdAPI);
                }
            }

            APIUtils.logDebug("API " + createdAPI.getName() + "-" + createdAPI.getVersion() + " was created "
                    + "successfully.", log);

            // 'API_M Functions' related code
            //Create a payload with event specific details
            Map<String, String> eventPayload = new HashMap<>();
            eventPayload.put(APIMgtConstants.FunctionsConstants.API_ID, createdAPI.getId());
            eventPayload.put(APIMgtConstants.FunctionsConstants.API_NAME, createdAPI.getName());
            eventPayload.put(APIMgtConstants.FunctionsConstants.API_VERSION, createdAPI.getVersion());
            eventPayload.put(APIMgtConstants.FunctionsConstants.API_DESCRIPTION, createdAPI.getDescription());
            eventPayload.put(APIMgtConstants.FunctionsConstants.API_CONTEXT, createdAPI.getContext());
            eventPayload.put(APIMgtConstants.FunctionsConstants.API_LC_STATUS, createdAPI.getLifeCycleStatus());
            eventPayload.put(APIMgtConstants.FunctionsConstants.API_PERMISSION, createdAPI.getApiPermission());
            // This will notify all the EventObservers(Asynchronous)
            ObserverNotifier observerNotifier = new ObserverNotifier(Event.API_CREATION, getUsername(),
                    ZonedDateTime.now(ZoneOffset.UTC), eventPayload, this);
            ObserverNotifierThreadPool.getInstance().executeTask(observerNotifier);
        } else {
            String message = "Duplicate API already Exist with name/Context " + apiBuilder.getName();
            log.error(message);
            throw new APIManagementException(message, ExceptionCodes.API_ALREADY_EXISTS);
        }
    } catch (APIMgtDAOException e) {
        String errorMsg = "Error occurred while creating the API - " + apiBuilder.getName();
        log.error(errorMsg);
        throw new APIManagementException(errorMsg, e, e.getErrorHandler());
    } catch (LifecycleException | ParseException e) {
        String errorMsg = "Error occurred while Associating the API - " + apiBuilder.getName();
        log.error(errorMsg);
        throw new APIManagementException(errorMsg, e, ExceptionCodes.APIMGT_LIFECYCLE_EXCEPTION);
    } catch (APITemplateException e) {
        String message = "Error generating API configuration for API " + apiBuilder.getName();
        log.error(message, e);
        throw new APIManagementException(message, ExceptionCodes.TEMPLATE_EXCEPTION);
    } catch (GatewayException e) {
        String message = "Error occurred while adding API - " + apiBuilder.getName() + " to gateway";
        log.error(message, e);
        throw new APIManagementException(message, ExceptionCodes.GATEWAY_EXCEPTION);
    }
    return apiBuilder.getId();
}

From source file:org.apache.storm.sql.compiler.backends.trident.TestPlanCompiler.java

@Test
public void testDateKeywords() throws Exception {
    int EXPECTED_VALUE_SIZE = 1;
    String sql = "SELECT " + "LOCALTIME, CURRENT_TIME, LOCALTIMESTAMP, CURRENT_TIMESTAMP, CURRENT_DATE "
            + "FROM FOO " + "WHERE ID > 0 AND ID < 2";
    TestCompilerUtils.CalciteState state = TestCompilerUtils.sqlOverDummyTable(sql);

    final Map<String, ISqlTridentDataSource> data = new HashMap<>();
    data.put("FOO", new TestUtils.MockSqlTridentDataSource());
    PlanCompiler compiler = new PlanCompiler(data, typeFactory, dataContext);
    final AbstractTridentProcessor proc = compiler.compileForTest(state.tree());
    final TridentTopology topo = proc.build(data);
    Fields f = proc.outputStream().getOutputFields();
    proc.outputStream().partitionPersist(new TestUtils.MockStateFactory(), f, new TestUtils.MockStateUpdater(),
            new Fields());
    runTridentTopology(EXPECTED_VALUE_SIZE, proc, topo);

    long utcTimestamp = (long) dataContext.get(DataContext.Variable.UTC_TIMESTAMP.camelName);
    long currentTimestamp = (long) dataContext.get(DataContext.Variable.CURRENT_TIMESTAMP.camelName);
    long localTimestamp = (long) dataContext.get(DataContext.Variable.LOCAL_TIMESTAMP.camelName);

    System.out.println(getCollectedValues());

    java.sql.Timestamp timestamp = new java.sql.Timestamp(utcTimestamp);
    int dateInt = (int) timestamp.toLocalDateTime().atOffset(ZoneOffset.UTC).toLocalDate().toEpochDay();
    int localTimeInt = (int) (localTimestamp % DateTimeUtils.MILLIS_PER_DAY);
    int currentTimeInt = (int) (currentTimestamp % DateTimeUtils.MILLIS_PER_DAY);

    Assert.assertArrayEquals(//from ww w  . ja v  a 2s . co m
            new Values[] {
                    new Values(localTimeInt, currentTimeInt, localTimestamp, currentTimestamp, dateInt) },
            getCollectedValues().toArray());
}

From source file:com.gnadenheimer.mg.utils.Utils.java

public void backUpIfOld() {
    try {/* w w  w  .  ja  v  a  2s .c  o  m*/
        Path dir = Paths.get(getPersistenceMap().get("backUpDir")); // specify your directory

        Optional<Path> lastFilePath = Files.list(dir) // here we get the stream with full directory listing
                .filter(f -> Files.isDirectory(f)) // exclude files from listing
                .max(Comparator.comparingLong(f -> f.toFile().lastModified())); // finally get the last file using simple comparator by lastModified field

        if (lastFilePath.isPresent()) // your folder may be empty
        {
            FileTime fileTime = Files.getLastModifiedTime(lastFilePath.get());
            Long age = DAYS.between(LocalDateTime.ofInstant(fileTime.toInstant(), ZoneOffset.UTC),
                    LocalDateTime.now());
            if (age > 7) {
                exectueBackUp(getPersistenceMap().get("backUpDir"));
            }
        } else {
            exectueBackUp(getPersistenceMap().get("backUpDir"));
        }
    } catch (Exception ex) {
        LOGGER.error(Thread.currentThread().getStackTrace()[1].getMethodName(), ex);
        JOptionPane.showMessageDialog(null,
                Thread.currentThread().getStackTrace()[1].getMethodName() + " - " + ex.getMessage());
    }
}

From source file:com.gnadenheimer.mg.utils.Utils.java

/**
 * Delete AutoBackUps if older than 60 days
 *///w  w  w.java  2  s  .  co  m
public void deleteOldBackUps() {
    try {
        Path dir = Paths.get(getPersistenceMap().get("backUpDir")); // specify your directory

        Optional<Path> lastFilePath = Files.list(dir) // here we get the stream with full directory listing
                .filter(f -> Files.isDirectory(f)) // exclude files from listing
                .min(Comparator.comparingLong(f -> f.toFile().lastModified())); // finally get the last file using simple comparator by lastModified field

        if (lastFilePath.isPresent()) // your folder may be empty
        {
            FileTime fileTime = Files.getLastModifiedTime(lastFilePath.get());
            Long age = DAYS.between(LocalDateTime.ofInstant(fileTime.toInstant(), ZoneOffset.UTC),
                    LocalDateTime.now());
            if (age > 30) {
                Files.walk(lastFilePath.get(), FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder())
                        .map(Path::toFile).peek(System.out::println).forEach(File::delete);
                deleteOldBackUps();
            }
        }
    } catch (Exception ex) {
        LOGGER.error(Thread.currentThread().getStackTrace()[1].getMethodName(), ex);
        JOptionPane.showMessageDialog(null,
                Thread.currentThread().getStackTrace()[1].getMethodName() + " - " + ex.getMessage());
    }
}

From source file:org.silverpeas.core.calendar.Recurrence.java

private OffsetDateTime normalize(final Temporal temporal) {
    OffsetDateTime dateTime = asOffsetDateTime(temporal);
    if (this.startDate != null) {
        return TemporalConverter.applyByType(this.startDate,
                t -> dateTime.with(LocalTime.MIDNIGHT.atOffset(ZoneOffset.UTC)),
                t -> dateTime.with(t.toOffsetTime()));
    }/*from  w ww. j a va2s . co m*/
    return dateTime;
}

From source file:org.silverpeas.core.calendar.CalendarEventOccurrenceGenerationTest.java

@Test
public void nextOccurrenceAboutRecurrentHourEventStartingOnSummerShouldWork() {
    final OffsetDateTime startDateTimeOnParis = dateTimeOnParis(2017, 7, 11, 23, 0);
    final OffsetDateTime endDateTimeOnParis = dateTimeOnParis(2017, 7, 12, 0, 45);
    assertThat(startDateTimeOnParis.withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 7, 11, 21, 0)));
    assertThat(endDateTimeOnParis.withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 7, 11, 22, 45)));
    assertThat(dateTimeOnParis(2017, 12, 11, 23, 0).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 12, 11, 22, 0)));
    assertThat(dateTimeOnParis(2017, 12, 12, 0, 45).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 12, 11, 23, 45)));
    CalendarEvent recurrentEvent = calendarEventForTest(
            Period.between(startDateTimeOnParis, endDateTimeOnParis), PARIS_ZONE_ID)
                    .recur(Recurrence.every(1, TimeUnit.MONTH).until(10));
    ZonedDateTime from = ZonedDateTime.parse("2017-12-11T21:59:59-01:00[Atlantic/Azores]");
    CalendarEventOccurrence result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());/*  w w  w .ja v a 2 s. c o m*/
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 1, 11, 22, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 1, 11, 23, 45)));

    from = ZonedDateTime.parse("2017-12-11T22:00:00-01:00[Atlantic/Azores]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 1, 11, 22, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 1, 11, 23, 45)));

    from = ZonedDateTime.parse("2017-12-11T22:59:59+00:00[UTC]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 1, 11, 22, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 1, 11, 23, 45)));

    from = ZonedDateTime.parse("2017-12-11T23:00:00+00:00[UTC]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 1, 11, 22, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 1, 11, 23, 45)));

    from = ZonedDateTime.parse("2017-12-11T22:59:59+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2017, 12, 11, 22, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2017, 12, 11, 23, 45)));

    from = ZonedDateTime.parse("2017-12-11T23:00:00+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 1, 11, 22, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 1, 11, 23, 45)));
}

From source file:com.inqool.dcap.office.indexer.indexer.SolrBulkIndexer.java

protected SolrInputDocument modelToSolrInputDoc(ZdoModel model) {
    logger.debug("Constructing new SolrInputDocument...");

    final Map<String, SolrInputField> fields = new HashMap<>();

    //Add all Dublin Core terms
    for (String property : DCTools.getDcTermList()) {
        SolrInputField field = new SolrInputField(property);
        List<String> values = model.getAll(new PropertyImpl("http://purl.org/dc/terms/" + property));
        if (values.isEmpty())
            continue;
        //Skip fields that were not ticked to be published
        String visible = model.get(new PropertyImpl("http://purl.org/dc/terms/" + property + "_visibility"));
        if ("false".equals(visible) || "0".equals(visible)) { //0 should not occur any more
            continue;
        }/*from ww  w.j av  a2s .  c  o  m*/
        if ("isPartOf".equals(property)) { //remove ip address from isPartOf
            values.set(0, store.getOnlyIdFromUrl(values.get(0)));
        }
        if ("".equals(values.get(0))) {
            values.set(0, "unknown");
        }

        field.addValue(values, INDEX_TIME_BOOST);
        fields.put(property, field);

        //Suggester data
        if ("title".equals(property) || "creator".equals(property)) {
            SolrInputDocument suggesterDoc = new SolrInputDocument();
            String suggestVal = values.get(0).trim();
            if (!suggestVal.isEmpty() && !suggestVal.equals("unknown")) {
                suggesterDoc.addField("suggesterData", values.get(0).trim());
                dataForSuggester.add(suggesterDoc);
            }
        }
    }

    //Add system fields
    SolrInputField field = new SolrInputField("id");
    field.addValue(store.getOnlyIdFromUrl(model.getUrl()), INDEX_TIME_BOOST);
    fields.put("id", field);

    addSolrFieldFromFedoraProperty("inventoryId", ZdoTerms.inventoryId, model, fields);

    addSolrFieldFromFedoraProperty("zdoType", ZdoTerms.zdoType, model, fields);
    addSolrFieldFromFedoraProperty("zdoGroup", ZdoTerms.group, model, fields);
    addSolrFieldFromFedoraProperty("orgIdmId", ZdoTerms.organization, model, fields);
    addSolrFieldFromFedoraProperty("allowContentPublicly", ZdoTerms.allowContentPublicly, model, fields);
    addSolrFieldFromFedoraProperty("allowPdfExport", ZdoTerms.allowPdfExport, model, fields);
    addSolrFieldFromFedoraProperty("allowEpubExport", ZdoTerms.allowEpubExport, model, fields);
    addSolrFieldFromFedoraProperty("watermark", ZdoTerms.watermark, model, fields);
    addSolrFieldFromFedoraProperty("watermarkPosition", ZdoTerms.watermarkPosition, model, fields);
    addSolrFieldFromFedoraProperty("imgThumb", ZdoTerms.imgThumb, model, fields);
    addSolrFieldFromFedoraProperty("imgNormal", ZdoTerms.imgNormal, model, fields);

    String publishFromStr = model.get(ZdoTerms.publishFrom);
    if (publishFromStr != null) {
        String publishFromUtc = ZonedDateTime
                .ofInstant(Instant.ofEpochSecond(Long.valueOf(publishFromStr)), ZoneId.systemDefault())
                .withZoneSameInstant(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
        addSolrField("publishFrom", publishFromUtc, fields);
    }
    String publishToStr = model.get(ZdoTerms.publishTo);
    if (publishToStr != null) {
        String publishToUtc = ZonedDateTime
                .ofInstant(Instant.ofEpochSecond(Long.valueOf(publishToStr)), ZoneId.systemDefault())
                .withZoneSameInstant(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
        addSolrField("publishTo", publishToUtc, fields);
    }

    String created = model.get(DCTerms.created);
    if (created != null) {
        AtomicInteger yearStart = new AtomicInteger();
        AtomicInteger yearEnd = new AtomicInteger();
        AtomicBoolean startValid = new AtomicBoolean();
        AtomicBoolean endValid = new AtomicBoolean();
        YearNormalizer.normalizeCreatedYear(created, yearStart, startValid, yearEnd, endValid);
        if (startValid.get()) {
            addSolrField("yearStart", yearStart.get(), fields);
        } else {
            logger.warn("Year could not be normalized for input string " + created);
        }
        if (endValid.get()) {
            addSolrField("yearEnd", yearEnd.get(), fields);
        }
    }

    String orgName = orgNameMapping.get(model.get(ZdoTerms.organization));
    if (orgName == null) {
        orgName = "Neznm";
    }
    addSolrField("organization", orgName, fields);

    String documentTypeId = model.get(ZdoTerms.documentType); //type and subtype names must be found for id
    String documentSubTypeId = model.get(ZdoTerms.documentSubType);
    if (documentTypeId != null) {
        addSolrField("documentType", documentTypeAccess.getTypeNameForId(Integer.valueOf(documentTypeId)),
                fields);
    }
    if (documentSubTypeId != null) {
        addSolrField("documentSubType",
                documentTypeAccess.getSubTypeNameForId(Integer.valueOf(documentSubTypeId)), fields);
    }

    //Add customFields
    int fieldIndex = 0; //we actually start from 1
    do {
        fieldIndex++;
        String fieldName = model
                .get(new PropertyImpl("http://inqool.cz/zdo/1.0/customField_" + fieldIndex + "_name"));
        if (fieldName == null)
            break;
        fieldName = "customField_" + fieldName;
        String visible = model
                .get(new PropertyImpl("http://inqool.cz/zdo/1.0/customField_" + fieldIndex + "_visibility"));
        if ("false".equals(visible) || "0".equals(visible))
            continue;
        List<String> fieldValues = model
                .getAll(new PropertyImpl("http://inqool.cz/zdo/1.0/customField_" + fieldIndex));
        if ("".equals(fieldValues.get(0))) {
            fieldValues.set(0, "unknown");
        }
        SolrInputField customField = new SolrInputField(fieldName);
        customField.addValue(fieldValues, INDEX_TIME_BOOST);
        fields.put(fieldName, customField);
    } while (true);

    SolrInputDocument solrInputDocument = new SolrInputDocument(fields);
    return solrInputDocument;
}

From source file:org.codice.ddf.registry.federationadmin.impl.FederationAdmin.java

private void updateDateFields(RegistryPackageType rpt) {

    ExtrinsicObjectType nodeInfo = null;
    for (JAXBElement identifiable : rpt.getRegistryObjectList().getIdentifiable()) {
        RegistryObjectType registryObject = (RegistryObjectType) identifiable.getValue();

        if (registryObject instanceof ExtrinsicObjectType
                && RegistryConstants.REGISTRY_NODE_OBJECT_TYPE.equals(registryObject.getObjectType())) {
            nodeInfo = (ExtrinsicObjectType) registryObject;
            break;
        }//from   w ww.  j a va  2  s .  c  o m
    }
    if (nodeInfo != null) {
        boolean liveDateFound = false;
        boolean lastUpdatedFound = false;

        OffsetDateTime now = OffsetDateTime.now(ZoneId.of(ZoneOffset.UTC.toString()));
        String rightNow = now.toString();

        for (SlotType1 slot : nodeInfo.getSlot()) {
            if (slot.getName().equals(RegistryConstants.XML_LIVE_DATE_NAME)) {
                liveDateFound = true;
            } else if (slot.getName().equals(RegistryConstants.XML_LAST_UPDATED_NAME)) {
                ValueListType valueList = EbrimConstants.RIM_FACTORY.createValueListType();
                valueList.getValue().add(rightNow);
                slot.setValueList(EbrimConstants.RIM_FACTORY.createValueList(valueList));
                lastUpdatedFound = true;
            }
        }

        if (!liveDateFound) {
            SlotType1 liveDate = slotHelper.create(RegistryConstants.XML_LIVE_DATE_NAME, rightNow, DATE_TIME);

            nodeInfo.getSlot().add(liveDate);
        }

        if (!lastUpdatedFound) {
            SlotType1 lastUpdated = slotHelper.create(RegistryConstants.XML_LAST_UPDATED_NAME, rightNow,
                    DATE_TIME);

            nodeInfo.getSlot().add(lastUpdated);
        }
    }
}

From source file:org.silverpeas.core.calendar.CalendarEventOccurrenceGenerationTest.java

@Test
public void nextOccurrenceAboutRecurrentHourEventStartingOnSummerAndNowAboutHourChangingShouldWork() {
    final OffsetDateTime startDateTimeOnParis = dateTimeOnParis(2017, 7, 29, 3, 0);
    final OffsetDateTime endDateTimeOnParis = dateTimeOnParis(2017, 7, 29, 4, 30);
    assertThat(startDateTimeOnParis.withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 7, 29, 1, 0)));
    assertThat(endDateTimeOnParis.withOffsetSameInstant(ZoneOffset.UTC), is(dateTimeInUTC(2017, 7, 29, 2, 30)));
    assertThat(dateTimeOnParis(2017, 10, 28, 23, 59).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 10, 28, 21, 59)));
    assertThat(dateTimeOnParis(2017, 10, 29, 0, 0).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 10, 28, 22, 0)));
    assertThat(dateTimeOnParis(2017, 10, 29, 2, 0).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 10, 29, 0, 0)));
    assertThat(dateTimeOnParis(2017, 10, 29, 2, 59).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 10, 29, 0, 59)));
    assertThat(dateTimeOnParis(2017, 10, 29, 3, 0).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2017, 10, 29, 2, 0)));
    assertThat(dateTimeOnParis(2018, 3, 25, 2, 59).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2018, 3, 25, 1, 59)));
    assertThat(dateTimeOnParis(2018, 3, 25, 3, 0).withOffsetSameInstant(ZoneOffset.UTC),
            is(dateTimeInUTC(2018, 3, 25, 1, 0)));
    CalendarEvent recurrentEvent = calendarEventForTest(
            Period.between(startDateTimeOnParis, endDateTimeOnParis), PARIS_ZONE_ID)
                    .recur(Recurrence.every(1, TimeUnit.MONTH).until(100));
    ZonedDateTime from = ZonedDateTime.parse("2017-10-29T00:59:59-01:00[Atlantic/Azores]");
    CalendarEventOccurrence result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());/*from  ww w  .j  a v  a  2s.  c o m*/
    assertThat(result.getStartDate(), is(dateTimeInUTC(2017, 10, 29, 2, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2017, 10, 29, 3, 30)));

    from = ZonedDateTime.parse("2017-10-29T01:00:00-01:00[Atlantic/Azores]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2017, 11, 29, 2, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2017, 11, 29, 3, 30)));

    from = ZonedDateTime.parse("2017-10-29T01:59:59+00:00[UTC]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2017, 10, 29, 2, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2017, 10, 29, 3, 30)));

    from = ZonedDateTime.parse("2017-10-29T02:00:00+00:00[UTC]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2017, 11, 29, 2, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2017, 11, 29, 3, 30)));

    from = ZonedDateTime.parse("2017-10-29T02:59:59+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2017, 10, 29, 2, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2017, 10, 29, 3, 30)));

    from = ZonedDateTime.parse("2017-10-29T03:00:00+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2017, 11, 29, 2, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2017, 11, 29, 3, 30)));

    from = ZonedDateTime.parse("2018-01-29T02:59:59+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 1, 29, 2, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 1, 29, 3, 30)));

    from = ZonedDateTime.parse("2018-01-29T03:00:00+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 3, 29, 1, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 3, 29, 2, 30)));

    from = ZonedDateTime.parse("2018-02-28T02:59:59+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 3, 29, 1, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 3, 29, 2, 30)));

    from = ZonedDateTime.parse("2018-02-28T03:00:00+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 3, 29, 1, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 3, 29, 2, 30)));

    from = ZonedDateTime.parse("2018-03-29T02:59:59+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 3, 29, 1, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 3, 29, 2, 30)));

    from = ZonedDateTime.parse("2018-03-29T03:00:00+01:00[Europe/Paris]");
    result = generator.generateNextOccurrenceOf(recurrentEvent, from);
    assertThat(result, notNullValue());
    assertThat(result.getStartDate(), is(dateTimeInUTC(2018, 4, 29, 1, 0)));
    assertThat(result.getEndDate(), is(dateTimeInUTC(2018, 4, 29, 2, 30)));
}

From source file:org.apache.nifi.atlas.reporting.ReportLineageToAtlas.java

private void initAtlasProperties(ConfigurationContext context) throws IOException {
    List<String> urls = new ArrayList<>();
    parseAtlasUrls(context.getProperty(ATLAS_URLS), urls::add);
    final boolean isAtlasApiSecure = urls.stream().anyMatch(url -> url.toLowerCase().startsWith("https"));
    final String atlasAuthNMethod = context.getProperty(ATLAS_AUTHN_METHOD).getValue();

    final String confDirStr = context.getProperty(ATLAS_CONF_DIR).evaluateAttributeExpressions().getValue();
    final File confDir = confDirStr != null && !confDirStr.isEmpty() ? new File(confDirStr) : null;

    atlasProperties = new Properties();
    final File atlasPropertiesFile = new File(confDir, ATLAS_PROPERTIES_FILENAME);

    final Boolean createAtlasConf = context.getProperty(ATLAS_CONF_CREATE).asBoolean();
    if (!createAtlasConf) {
        // Load existing properties file.
        if (atlasPropertiesFile.isFile()) {
            getLogger().info("Loading {}", new Object[] { atlasPropertiesFile });
            try (InputStream in = new FileInputStream(atlasPropertiesFile)) {
                atlasProperties.load(in);
            }/*from  w  w  w . ja  v a2 s. c  om*/
        } else {
            final String fileInClasspath = "/" + ATLAS_PROPERTIES_FILENAME;
            try (InputStream in = ReportLineageToAtlas.class.getResourceAsStream(fileInClasspath)) {
                getLogger().info("Loading {} from classpath", new Object[] { fileInClasspath });
                if (in == null) {
                    throw new ProcessException(String.format(
                            "Could not find %s in classpath." + " Please add it to classpath,"
                                    + " or specify %s a directory containing Atlas properties file,"
                                    + " or enable %s to generate it.",
                            fileInClasspath, ATLAS_CONF_DIR.getDisplayName(),
                            ATLAS_CONF_CREATE.getDisplayName()));
                }
                atlasProperties.load(in);
            }
        }
    }

    // Resolve default cluster name.
    defaultClusterName = context.getProperty(ATLAS_DEFAULT_CLUSTER_NAME).evaluateAttributeExpressions()
            .getValue();
    if (defaultClusterName == null || defaultClusterName.isEmpty()) {
        // If default cluster name is not specified by processor configuration, then load it from Atlas config.
        defaultClusterName = atlasProperties.getProperty(ATLAS_PROPERTY_CLUSTER_NAME);
    }

    // If default cluster name is still not defined, processor should not be able to start.
    if (defaultClusterName == null || defaultClusterName.isEmpty()) {
        throw new ProcessException("Default cluster name is not defined.");
    }

    atlasAuthN = getAtlasAuthN(atlasAuthNMethod);
    atlasAuthN.configure(context);

    // Create Atlas configuration file if necessary.
    if (createAtlasConf) {

        atlasProperties.put(ATLAS_PROPERTY_CLUSTER_NAME, defaultClusterName);
        atlasProperties.put(ATLAS_PROPERTY_ENABLE_TLS, String.valueOf(isAtlasApiSecure));

        setKafkaConfig(atlasProperties, context);

        atlasAuthN.populateProperties(atlasProperties);

        try (FileOutputStream fos = new FileOutputStream(atlasPropertiesFile)) {
            String ts = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX").withZone(ZoneOffset.UTC)
                    .format(Instant.now());
            atlasProperties.store(fos, "Generated by Apache NiFi ReportLineageToAtlas ReportingTask at " + ts);
        }
    }

    getLogger().debug("Force reloading Atlas application properties.");
    ApplicationProperties.forceReload();

    if (confDir != null) {
        // If atlasConfDir is not set, atlas-application.properties will be searched under classpath.
        Properties props = System.getProperties();
        final String atlasConfProp = "atlas.conf";
        props.setProperty(atlasConfProp, confDir.getAbsolutePath());
        getLogger().debug("{} has been set to: {}",
                new Object[] { atlasConfProp, props.getProperty(atlasConfProp) });
    }
}