Example usage for org.joda.time DateTime minusMinutes

List of usage examples for org.joda.time DateTime minusMinutes

Introduction

In this page you can find the example usage for org.joda.time DateTime minusMinutes.

Prototype

public DateTime minusMinutes(int minutes) 

Source Link

Document

Returns a copy of this datetime minus the specified number of minutes.

Usage

From source file:be.e_contract.jwatchdog.datasource.rrd.RRDDatasource.java

License:Open Source License

@Override
public double[] getValues(int minutes) {
    LOG.debug("RRD file: " + this.rrdFile);
    RRDatabase rrd;//  w  ww .  jav  a2  s .  c o m
    try {
        rrd = new RRDatabase(this.rrdFile);
    } catch (IOException e) {
        throw new RuntimeException("RRD IO error: " + e.getMessage());
    }
    try {

        DateTime lastUpdate = new DateTime(rrd.getLastUpdate());
        LOG.debug("last update: " + lastUpdate);
        DateTime now = new DateTime();
        if (lastUpdate.isBefore(now.minusMinutes(minutes))) {
            LOG.warn("RRD outdated");
        }

        Header header = rrd.getHeader();
        int primaryDataPointInterval = header.getPDPStep();
        DateTime endDateTime = lastUpdate.minusSeconds(primaryDataPointInterval);
        DateTime startDateTime = endDateTime.minusMinutes(minutes);
        DataChunk dataChunk;
        try {
            dataChunk = rrd.getData(ConsolidationFunctionType.AVERAGE, startDateTime.toDate(),
                    endDateTime.toDate(), primaryDataPointInterval);
        } catch (IOException e) {
            throw new RuntimeException("RRD IO error: " + e.getMessage());
        }
        double[][] data = dataChunk.getData();
        Set<String> dataSourceNames = rrd.getDataSourcesName();
        LOG.debug("RRD datasources: " + dataSourceNames);

        int dsIdx;
        if (null != this.datasourceName) {
            if (!dataSourceNames.contains(this.datasourceName)) {
                LOG.warn("RRD datasource name not found: " + this.datasourceName);
                return new double[] {};
            }
            int size = dataSourceNames.size();
            for (dsIdx = 0; dsIdx < size; dsIdx++) {
                DataSource dataSource = rrd.getDataSource(dsIdx);
                if (dataSource.getName().equals(this.datasourceName)) {
                    break;
                }
            }
        } else {
            dsIdx = 0;
        }

        double[] values = new double[data.length];
        for (int idx = 0; idx < data.length; idx++) {
            values[data.length - idx - 1] = data[idx][dsIdx];
        }
        return values;
    } finally {
        try {
            rrd.close();
        } catch (IOException e) {
            LOG.error("error closing RRD: " + e.getMessage());
        }
    }
}

From source file:be.fedict.eid.idp.common.saml2.Saml2Util.java

License:Open Source License

private static void validateTime(DateTime now, DateTime notBefore, DateTime notOnOrAfter, int maxTimeOffset)
        throws AssertionValidationException {

    LOG.debug("now: " + now.toString());
    LOG.debug("notBefore: " + notBefore.toString());
    LOG.debug("notOnOrAfter : " + notOnOrAfter.toString());

    if (maxTimeOffset >= 0) {
        if (now.isBefore(notBefore)) {
            // time skew
            if (now.plusMinutes(maxTimeOffset).isBefore(notBefore)
                    || now.minusMinutes(maxTimeOffset).isAfter(notOnOrAfter)) {
                throw new AssertionValidationException(
                        "SAML2 assertion validation: invalid SAML message timeframe");
            }//from www .j  av a 2  s.com
        } else if (now.isBefore(notBefore) || now.isAfter(notOnOrAfter)) {
            throw new AssertionValidationException(
                    "SAML2 assertion validation: invalid SAML message timeframe");
        }
    }
}

From source file:ch.bfh.ti.ictm.iam.stiam.aa.util.saml.ExtendedAttributeQueryBuilder.java

License:MIT License

/**
 * Builds a complete, extended SAML attribute query with included
 * authentication statement, both of them signed, serialized to String.
 * Overrides the method from MessageBuilder.
 *
 * @return Serialized extended attribute query as String
 * @throws ConfigurationException//from  w  ww .j  a va2 s .c  o  m
 * @throws NoSuchAlgorithmException
 * @throws IOException
 * @throws FileNotFoundException
 * @throws KeyStoreException
 * @throws CertificateException
 * @throws UnrecoverableEntryException
 * @throws SecurityException
 * @throws MarshallingException
 * @throws SignatureException
 * @throws TransformerException
 * @throws XMLParserException
 */
@Override
public String build() throws ConfigurationException, NoSuchAlgorithmException, IOException, KeyStoreException,
        CertificateException, UnrecoverableEntryException, SecurityException, MarshallingException,
        SignatureException, XMLParserException, TransformerException {
    logger.debug("Starting generation of extended attribute query...");

    //////////////////// Perform initial setup
    DefaultBootstrap.bootstrap();
    final SecureRandomIdentifierGenerator idGenerator = new SecureRandomIdentifierGenerator();

    final DateTime queryTime = DateTime.now();
    final DateTime authnIssueTime = queryTime.minusMinutes(4);
    final DateTime authnNotBeforeTime = queryTime.minusMinutes(5);
    final DateTime authnNotAfterTime = queryTime.plusMinutes(5);

    //////////////////// The outer AttributeQuery
    final AttributeQuery query = (AttributeQuery) buildXMLObject(AttributeQuery.DEFAULT_ELEMENT_NAME);
    query.setID(idGenerator.generateIdentifier());
    query.setVersion(SAMLVersion.VERSION_20);
    query.setIssueInstant(queryTime);
    query.setSubject(null);
    query.setDestination(config.getSAMLDestination());

    // Issuer of the AttributeQuery
    final Issuer queryIssuer = (Issuer) buildXMLObject(Issuer.DEFAULT_ELEMENT_NAME);
    queryIssuer.setValue(config.getSAMLIssuer());
    query.setIssuer(queryIssuer);

    // Subject of the AttributeQuery
    final Subject querySubject = (Subject) buildXMLObject(Subject.DEFAULT_ELEMENT_NAME);
    query.setSubject(querySubject);

    // --> NameID of the Subject
    NameID queryNameID = (NameID) buildXMLObject(NameID.DEFAULT_ELEMENT_NAME);
    queryNameID.setFormat(config.getSAMLNameIDFormat());
    queryNameID.setValue(config.getSAMLNameID());
    querySubject.setNameID(queryNameID);

    // Attributes of the AttributeQuery
    for (String[] attr : attributes) {
        Attribute attribute = (Attribute) buildXMLObject(Attribute.DEFAULT_ELEMENT_NAME);
        attribute.setName(attr[0]);
        if (attr.length >= 2) {
            attribute.setNameFormat(attr[1]);
        }
        if (attr.length >= 3) {
            attribute.setFriendlyName(attr[2]);
        }
        query.getAttributes().add(attribute);
    }

    //////////////////// The Assertion added to the Extensions of the above query
    Assertion assertion = (Assertion) buildXMLObject(Assertion.DEFAULT_ELEMENT_NAME);
    assertion.setID(idGenerator.generateIdentifier());
    assertion.setVersion(SAMLVersion.VERSION_20);
    assertion.setIssueInstant(authnIssueTime);

    // --> Issuer of the Assertion
    Issuer assertionIssuer = (Issuer) buildXMLObject(Issuer.DEFAULT_ELEMENT_NAME);
    assertionIssuer.setValue(config.getSAMLIssuer());
    assertion.setIssuer(assertionIssuer);

    // --> Subject of the Assertion
    Subject assertionSubject = (Subject) buildXMLObject(Subject.DEFAULT_ELEMENT_NAME);
    assertion.setSubject(assertionSubject);

    // --> -->  NameID for the Subject of the Assertion
    NameID assertionNameID = (NameID) buildXMLObject(NameID.DEFAULT_ELEMENT_NAME);
    assertionNameID.setFormat(config.getSAMLNameIDFormat());
    assertionNameID.setValue(config.getSAMLNameID());
    assertionSubject.setNameID(assertionNameID);

    // --> -->  SubjectConfirmation for the Subject of the Assertion
    SubjectConfirmation assertionSubjectConfirmation = (SubjectConfirmation) buildXMLObject(
            SubjectConfirmation.DEFAULT_ELEMENT_NAME);
    assertionSubjectConfirmation.setMethod(config.getSAMLSubjectConfirmationMethod());
    assertionSubject.getSubjectConfirmations().add(assertionSubjectConfirmation);

    // --> Conditions for the Assertion
    Conditions conditions = (Conditions) buildXMLObject(Conditions.DEFAULT_ELEMENT_NAME);
    conditions.setNotBefore(authnNotBeforeTime);
    conditions.setNotOnOrAfter(authnNotAfterTime);
    assertion.setConditions(conditions);

    // --> --> AudienceRestriction for the Conditions
    AudienceRestriction audienceRestriction = (AudienceRestriction) buildXMLObject(
            AudienceRestriction.DEFAULT_ELEMENT_NAME);
    conditions.getAudienceRestrictions().add(audienceRestriction);

    // --> --> --> Audience for the AudienceRestriction
    Audience audience = (Audience) buildXMLObject(Audience.DEFAULT_ELEMENT_NAME);
    audience.setAudienceURI(config.getSAMLIssuer());
    audienceRestriction.getAudiences().add(audience);

    // --> AuthnStatement for the Assertion
    AuthnStatement authnStatement = (AuthnStatement) buildXMLObject(AuthnStatement.DEFAULT_ELEMENT_NAME);
    authnStatement.setAuthnInstant(authnIssueTime);
    assertion.getAuthnStatements().add(authnStatement);

    // -->-->  AuthnContext for the AuthnStatement
    AuthnContext authnContext = (AuthnContext) buildXMLObject(AuthnContext.DEFAULT_ELEMENT_NAME);
    authnStatement.setAuthnContext(authnContext);

    // --> -->-->  AuthnContextClassRef for AuthnContext
    AuthnContextClassRef authnContextClassRef = (AuthnContextClassRef) buildXMLObject(
            AuthnContextClassRef.DEFAULT_ELEMENT_NAME);
    authnContextClassRef.setAuthnContextClassRef(config.getSAMLAssuranceLevel());
    authnContext.setAuthnContextClassRef(authnContextClassRef);

    //////////////////// Sign the assertion and add it to the query
    Credential signingCredential = StiamConfiguration.getInstance().getSignatureCredential();
    Signature assertionSignature = (Signature) buildXMLObject(Signature.DEFAULT_ELEMENT_NAME);
    assertionSignature.setSigningCredential(signingCredential);
    SecurityHelper.prepareSignatureParams(assertionSignature, signingCredential, null, null);
    assertion.setSignature(assertionSignature);
    Configuration.getMarshallerFactory().getMarshaller(assertion).marshall(assertion);
    Signer.signObject(assertionSignature);

    // Extensions of the AttributeQuery
    // Manually build the correct QName, otherwise "md"-namespace gets marshalled...
    // see https://groups.google.com/forum/#!topic/opensaml-users/FFCQ48uqw3o for details.
    QName name = new QName(SAMLConstants.SAML20P_NS, Extensions.LOCAL_NAME, SAMLConstants.SAML20P_PREFIX);
    Extensions extensions = (Extensions) Configuration.getBuilderFactory().getBuilder(name).buildObject(name);
    extensions.getUnknownXMLObjects().add(assertion);
    query.setExtensions(extensions);

    //////////////////// Sign the query
    Signature querySignature = (Signature) buildXMLObject(Signature.DEFAULT_ELEMENT_NAME);
    querySignature.setSigningCredential(signingCredential);
    SecurityHelper.prepareSignatureParams(querySignature, signingCredential, null, null);
    query.setSignature(querySignature);
    Configuration.getMarshallerFactory().getMarshaller(query).marshall(query);
    Signer.signObject(querySignature);

    logger.debug("Extended attribute query generated!");
    return marshallToString(query);
}

From source file:ch.simuonline.idh.attribute.resolver.dc.aq.AttributeQueryDataConnector.java

License:Apache License

/**
 * //from   w w w.  ja  v a  2 s . c  o m
 * Validator of the SAML Response
 * 
 * @param response the response object to validate
 * @param randomID the randomID from the attribute query
 * @param queryTime the timestamp from the attribute query
 * @param nameID the nameID from the query subject
 * @param entityID the entityID from the attribute authority
 * @return true if validation of the response was positive, false if something with the response is wrong
 * @throws ResolutionException throws if some element of the response is not readable
 */
protected boolean validateResponse(@Nonnull Response response, String randomID, DateTime queryTime,
        AQTarget queryTarget) throws ResolutionException {
    try {

        // Check SAML version         
        if (response.getVersion() != SAMLVersion.VERSION_20) {
            log.warn("{} Validation of SALM2 response failed! Unsupported SAML version of response: {} ",
                    getLogPrefix(), response.getVersion());
            return false;
        }

        // It should have exactly one assertion in this response
        if (response.getAssertions().size() != 1) {
            log.warn(
                    "{} Validation of SALM2 response failed! There are {} assertions, but it must have exactly 1 assertion!",
                    getLogPrefix(), response.getAssertions().size());
            return false;
        }

        // Validate if response status is success
        if (!response.getStatus().getStatusCode().getValue()
                .equals("urn:oasis:names:tc:SAML:2.0:status:Success")) {
            log.warn(
                    "{} Validation of SALM2 response failed! Wrong Status Code: {}, it has to be urn:oasis:names:tc:SAML:2.0:status:Success",
                    getLogPrefix(), response.getStatus().getStatusCode().getValue());
            return false;
        }

        Assertion assertion = response.getAssertions().get(0);
        if (assertion.getVersion() != SAMLVersion.VERSION_20) {
            log.warn("{} Validation of SALM2 response failed! Assertion is not a SAML 2.0 version Assertion",
                    getLogPrefix());
            return false;
        }

        // Check if issuer has the right entityID
        Issuer assertionIssuer = assertion.getIssuer();
        String entityID = queryTarget.getEntityID();
        if (!assertionIssuer.getValue().equals(entityID)) {
            log.warn("{} Validation of SALM2 response failed! Wrong Issuer {}, it should be {}", getLogPrefix(),
                    assertion.getIssuer().getValue(), entityID);
            return false;
        }

        // verify signature of assertion
        if (assertion.isSigned() && signatureRequired) {
            log.debug("{} Begin with checking signature", getLogPrefix());
            Signature signature = assertion.getSignature();

            // checks if its a valid signature profile
            SAMLSignatureProfileValidator spv = new SAMLSignatureProfileValidator();
            try {
                spv.validate(signature);
                log.debug("{} Signature hat correct profile", getLogPrefix());
            } catch (SignatureException e) {
                log.warn("{} Validation of SALM2 response failed! Exception while validating the exception: {}",
                        getLogPrefix(), e);
                return false;
            }

            //            String certificate = "MIIDLDCCAhSgAwIBAgIVAPhMcSU5PCjjIosHSOpreF2ztyCZMA0GCSqGSIb3DQEBBQUAMBsxGTAXBgNVBAMMEHNoaWJhYS50aS5iZmguY2gwHhcNMTUwNDIxMDYzNTM2WhcNMzUwNDIxMDYzNTM2WjAbMRkwFwYDVQQDDBBzaGliYWEudGkuYmZoLmNoMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAkmZJpYDL6CbQtzUBrwvfPOAdihQyI+BZiio6060GC4H8ObyaBIbRNFVkaG6FVlfBR8XVvB6SiRIMjd4pmWii0/lfhy67/61dHs35+AwSKzYcxThsov8YtTnyhj3bAij0Ved2cmdx3ZCQyfCmPH2mI6bFssDxMczRT4oTtCCmmOQH3XoebT2HJ8CMXqTxBQzQm0f5voc8BfeGVBjduDGS5D14kS7couuTmgTIa91EnypeXH67ZHk8QlRcPFRyGHn2s3ivPjbwRrvWtqpHCry14MvQVtOcu8TJ0OsN4+h3ds2OX+ZigIcxVOfBx+VM9O8vlHwzrCm0ACoi9uy0mVbDZQIDAQABo2cwZTAdBgNVHQ4EFgQUdWPQ51FiJv3dS+MEmDlF8XcSWzkwRAYDVR0RBD0wO4IQc2hpYmFhLnRpLmJmaC5jaIYnaHR0cHM6Ly9zaGliYWEudGkuYmZoLmNoL2lkcC9zaGliYm9sZXRoMA0GCSqGSIb3DQEBBQUAA4IBAQAxXuCR2Xd7cM+LTRkateGSu3SblomsOFnWJT3hizLsRa9y2kbQz14NAn3KF5pQ1srEX4AS18AM6YVjkR2r8if96m8PmrGxwGUNwK6AYXoBQ/oRq3ZC1DZJFS8qmgmX9wr96Gb0yJbFmJeHOfvqgzPSdB+oZUX3RTPXF6QOnt7+LFvSf1EfDwadp8lq8MQAqtHszHYFMkRGPJC+KBEC6PNkFa36K3pD+E2yh9Q51Yg5eic7GyG5qyZeYIuo3hERS9w3ZlLGjQ+mkvxN5gM3U7fJAYkcdc+crABVy/XAuLoLUMiIUD0gaKjs2enRB+LQVX+rjyiyukETAvdftadGFxv8";
            //            X509Certificate cert = X509Support.decodeCertificate(certificate);
            X509Certificate cert = queryTarget.getCertificate();
            AttributeQueryKeyManager keyManager = new AttributeQueryKeyManager(cert);
            X509Credential validationCredential = new X509KeyManagerX509CredentialAdapter(keyManager,
                    "verification");

            try {
                SignatureValidator.validate(signature, validationCredential);
                log.debug("{} Signature validated, no problem", getLogPrefix());
            } catch (SignatureException e) {
                log.warn("{} Validation of SALM2 response failed! Assertion Signature is not correct!",
                        getLogPrefix());
                return false;
            }
        } else if (!assertion.isSigned() && signatureRequired) {
            log.warn(
                    "{} Validation of SALM2 response failed! Assertion Signature is required but this assertion is not sigened!",
                    getLogPrefix());
            return false;
        } else {
            log.warn("{} No signature check required!", getLogPrefix());
        }

        // CONDITIONS OF ASSERTION
        Conditions conditions = assertion.getConditions();
        if (conditions != null) {
            log.debug("{} Check conditions of assertion!", getLogPrefix());

            DateTime now = DateTime.now();
            DateTime notBefore = conditions.getNotBefore();
            log.debug("Evaluating Conditions NotBefore '{}' against now(+5min) '{}'", notBefore,
                    now.plusMinutes(5));
            if (notBefore != null && notBefore.isAfter(now.plusMinutes(5))) {
                log.warn("{} Validation of SALM2 response failed! The condition not before {} failed.",
                        getLogPrefix(), notBefore);
                return false;
            }

            DateTime notOnOrAfter = conditions.getNotOnOrAfter();
            log.debug("Evaluating Conditions NotOnOrAfter '{}' against now(-5min) '{}'", notOnOrAfter,
                    now.minusMinutes(5));
            if (notOnOrAfter != null && notOnOrAfter.isBefore(now.minusMinutes(5))) {
                log.warn("{} Validation of SALM2 response failed! The condition not on or after {} failed.",
                        getLogPrefix(), notOnOrAfter);
                return false;
            }
        } else {
            log.debug("{} Asertion does not contain conditions!", getLogPrefix());
        }

        // SUBJECT OF ASSERTION
        Subject assertionSubject = assertion.getSubject();

        if (assertionSubject == null) {
            log.warn("{} Validation of SALM2 response failed! No Subject found", getLogPrefix());
            return false;
        }

        // check if the nameid is correct
        String nameID = queryTarget.getNameID();
        if (!assertionSubject.getNameID().getValue().equals(nameID)) {
            log.warn("{} Validation of SALM2 response failed! Wrong nameID: {}, expected: {}", getLogPrefix(),
                    assertion.getSubject().getNameID().getValue(), nameID);
            return false;
        }

        // check if the in response to id is correct
        if (!assertionSubject.getSubjectConfirmations().get(0).getSubjectConfirmationData().getInResponseTo()
                .equals(randomID)) {
            log.warn("{} Validation of SALM2 response failed! Wrong InResponseTo ID: {}, it has to be {}",
                    getLogPrefix(), response.getInResponseTo(), randomID);
            return false;
        }

        // verify that the assertion has not more than one attribue statements
        if (assertion.getAttributeStatements().size() > 1) {
            log.warn(
                    "{} Validation of SALM2 response failed! {} attribute statements, but expected is 0 or one {}",
                    getLogPrefix(), assertion.getAttributeStatements().size());
            return false;
        }
    } catch (Exception e) {
        log.warn(
                "{} Validation of SALM2 response failed! {} Can not read all Elements of the assertion to verify response! Exception: {}",
                getLogPrefix(), e);
        return false;
    }

    log.debug("{} SAML2 Response validated, it's valid", getLogPrefix());
    return true;
}

From source file:com.alliander.osgp.webdevicesimulator.service.OslpChannelHandler.java

License:Open Source License

private static Message createGetPowerUsageHistoryWithDatesResponse(
        final GetPowerUsageHistoryRequest powerUsageHistoryRequest) throws ParseException {

    final DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyyMMddHHmmss").withZoneUTC();

    // 20140405 220000
    final DateTime now = new DateTime();
    final DateTime dateTimeFrom = formatter
            .parseDateTime(powerUsageHistoryRequest.getTimePeriod().getStartTime());
    DateTime dateTimeUntil = formatter.parseDateTime(powerUsageHistoryRequest.getTimePeriod().getEndTime());

    final int itemsPerPage = 2;
    final int intervalMinutes = powerUsageHistoryRequest.getTermType() == HistoryTermType.Short ? 60 : 1440;
    final int usagePerItem = powerUsageHistoryRequest.getTermType() == HistoryTermType.Short ? 2400 : 57600;

    // If from in the future, return emtpy list
    final List<PowerUsageData> powerUsageDataList = new ArrayList<PowerUsageData>();
    if (dateTimeFrom.isAfter(now)) {
        return createUsageMessage(1, itemsPerPage, 1, powerUsageDataList);
    }//from w  ww.  j ava 2s . c o m

    // Ensure until date is not in future
    dateTimeUntil = correctUsageUntilDate(dateTimeUntil, powerUsageHistoryRequest.getTermType());

    final int queryInterval = Minutes.minutesBetween(dateTimeFrom, dateTimeUntil).getMinutes();
    final int totalNumberOfItems = queryInterval / intervalMinutes;
    final int numberOfPages = (int) Math.ceil((double) totalNumberOfItems / (double) itemsPerPage);

    // Determine page number
    int currentPageNumber;
    if (powerUsageHistoryRequest.getPage() == 0) {
        currentPageNumber = 1;
    } else {
        currentPageNumber = powerUsageHistoryRequest.getPage();
    }

    int page = 1;
    int itemsToSkip = 0;
    while (currentPageNumber != page) {
        itemsToSkip += itemsPerPage;
        page++;
    }

    // Advance time to correct page starting point, last to first (like real
    // device)
    DateTime pageStartTime = dateTimeUntil.minusMinutes(intervalMinutes * itemsToSkip)
            .minusMinutes(intervalMinutes);
    final int itemsOnPage = Math.min(Math.abs(totalNumberOfItems - itemsToSkip), itemsPerPage);

    // Advance usage to start of page
    int totalUsage = (totalNumberOfItems * usagePerItem) - (usagePerItem * itemsToSkip);

    // Fill page with items
    for (int i = 0; i < itemsOnPage; i++) {
        final int range = (100) + 1;
        final int randomCumulativeMinutes = (int) (Math.random() * range) + 100;

        // Increase the meter
        final double random = usagePerItem - (usagePerItem / 50d * Math.random());
        totalUsage -= random;
        // Add power usage item to response
        final PowerUsageData powerUsageData = PowerUsageData.newBuilder()
                .setRecordTime(pageStartTime.toString(formatter)).setMeterType(MeterType.P1)
                .setTotalConsumedEnergy(totalUsage).setActualConsumedPower((int) random)
                .setPsldData(PsldData.newBuilder().setTotalLightingHours((int) random * 3))
                .setSsldData(SsldData.newBuilder().setActualCurrent1(10).setActualCurrent2(20)
                        .setActualCurrent3(30).setActualPower1(10).setActualPower2(20).setActualPower3(30)
                        .setAveragePowerFactor1(10).setAveragePowerFactor2(20).setAveragePowerFactor3(30)
                        .addRelayData(Oslp.RelayData.newBuilder()
                                .setIndex(ByteString.copyFrom(new byte[] { 1 }))
                                .setTotalLightingMinutes(INITIAL_BURNING_MINUTES - randomCumulativeMinutes))
                        .addRelayData(Oslp.RelayData.newBuilder()
                                .setIndex(ByteString.copyFrom(new byte[] { 2 }))
                                .setTotalLightingMinutes(INITIAL_BURNING_MINUTES - randomCumulativeMinutes))
                        .addRelayData(Oslp.RelayData.newBuilder()
                                .setIndex(ByteString.copyFrom(new byte[] { 3 }))
                                .setTotalLightingMinutes(INITIAL_BURNING_MINUTES - randomCumulativeMinutes))
                        .addRelayData(Oslp.RelayData.newBuilder()
                                .setIndex(ByteString.copyFrom(new byte[] { 4 }))
                                .setTotalLightingMinutes(INITIAL_BURNING_MINUTES - randomCumulativeMinutes)))
                .build();

        powerUsageDataList.add(powerUsageData);
        pageStartTime = pageStartTime.minusMinutes(intervalMinutes);

        INITIAL_BURNING_MINUTES -= CUMALATIVE_BURNING_MINUTES;
    }

    return createUsageMessage(currentPageNumber, itemsPerPage, numberOfPages, powerUsageDataList);
}

From source file:com.almende.eve.agent.google.GoogleCalendarAgent.java

License:Apache License

/**
 * Quick create an event.//from w w  w. j  a v a 2  s.com
 * 
 * @param start
 *            the start
 * @param end
 *            the end
 * @param summary
 *            the summary
 * @param location
 *            the location
 * @param calendarId
 *            the calendar id
 * @return the object node
 * @throws Exception
 *             the exception
 */
public ObjectNode createEventQuick(@Optional @Name("start") String start, @Optional @Name("end") String end,
        @Optional @Name("summary") final String summary, @Optional @Name("location") final String location,
        @Optional @Name("calendarId") final String calendarId) throws Exception {
    final ObjectNode event = JOM.createObjectNode();

    if (start == null) {
        // set start to current time, rounded to hours
        DateTime startDate = DateTime.now();
        startDate = startDate.plusHours(1);
        startDate = startDate.minusMinutes(startDate.getMinuteOfHour());
        startDate = startDate.minusSeconds(startDate.getSecondOfMinute());
        startDate = startDate.minusMillis(startDate.getMillisOfSecond());
        start = startDate.toString();
    }
    final ObjectNode startObj = JOM.createObjectNode();
    startObj.put("dateTime", start);
    event.put("start", startObj);
    if (end == null) {
        // set end to start +1 hour
        final DateTime startDate = new DateTime(start);
        final DateTime endDate = startDate.plusHours(1);
        end = endDate.toString();
    }
    final ObjectNode endObj = JOM.createObjectNode();
    endObj.put("dateTime", end);
    event.put("end", endObj);
    if (summary != null) {
        event.put("summary", summary);
    }
    if (location != null) {
        event.put("location", location);
    }

    return createEvent(event, calendarId);
}

From source file:com.almende.eve.agent.MeetingAgent.java

License:Apache License

/**
 * Get the timestamp rounded to the next half hour
 * //from ww w  .  j  av  a  2  s.  com
 * @return
 */
private DateTime getNextHalfHour() {
    DateTime next = DateTime.now();
    next = next.minusMillis(next.getMillisOfSecond());
    next = next.minusSeconds(next.getSecondOfMinute());

    if (next.getMinuteOfHour() > 30) {
        next = next.minusMinutes(next.getMinuteOfHour());
        next = next.plusMinutes(60);
    } else {
        next = next.minusMinutes(next.getMinuteOfHour());
        next = next.plusMinutes(30);
    }

    return next;
}

From source file:com.almende.pi5.common.agents.AggregatingAgent.java

License:Apache License

/**
 * Repeat steer./*from  w  ww.j  av  a  2s.com*/
 */
public void repeatSteer() {
    steer();
    sendLog();

    DateTime next = this.currentTimeslot.plusMinutes(TIMESTEP - 1).plusSeconds(25);
    DateTime prev = next.minusMinutes(TIMESTEP);
    if (DateTime.now().isBefore(prev)) {
        next = prev;
    }

    schedule(new JSONRequest("repeatSteer", null), next);
}

From source file:com.almende.pi5.common.LogLine.java

License:Apache License

/**
 * From profiles./* w  w  w  .  j av  a2 s .  c  om*/
 *
 * @param id
 *            the id
 * @param current
 *            the current
 * @param expected
 *            the expected
 * @param now
 *            the now
 * @param currentTimeslot
 *            the current timeslot
 * @param contractMode
 *            the contract mode
 * @return the log line
 */
public static LogLine fromProfiles(final String id, final CategoryProfile current,
        final CategoryProfile expected, final DateTime now, final DateTime currentTimeslot,
        final boolean contractMode) {
    final LogLine result = new LogLine();
    result.setId(id);
    result.setNow(now.getMillis());
    result.setTimeslot(currentTimeslot.getMillis());
    result.setCurrent(current.getDemand().getValueAt(now));
    result.setDemand(current.getDemand().getIntegral(currentTimeslot.minusMinutes(15), currentTimeslot));
    result.setExpected(expected.getDemand().getIntegral(currentTimeslot.minusMinutes(15), currentTimeslot));
    result.setMax(expected.getExpectedFlexibilityMaxInWatts().getIntegral(currentTimeslot.minusMinutes(15),
            currentTimeslot));
    result.setMin(expected.getExpectedFlexibilityMinInWatts().getIntegral(currentTimeslot.minusMinutes(15),
            currentTimeslot));
    result.setNextDemand(current.getDemand().getIntegral(currentTimeslot, currentTimeslot.plusMinutes(15)));
    result.setNextMax(expected.getExpectedFlexibilityMaxInWatts().getIntegral(currentTimeslot,
            currentTimeslot.plusMinutes(15)));
    result.setNextMin(expected.getExpectedFlexibilityMinInWatts().getIntegral(currentTimeslot,
            currentTimeslot.plusMinutes(15)));
    if (contractMode) {
        result.setRequest(expected.getDemand().getIntegral(currentTimeslot.minusMinutes(15), currentTimeslot));
        result.setNextRequest(
                expected.getDemand().getIntegral(currentTimeslot, currentTimeslot.plusMinutes(15)));
    }
    return result;
}

From source file:com.amazonaws.services.kinesis.scaling.auto.StreamMonitor.java

License:Open Source License

protected ScalingOperationReport processCloudwatchMetrics(
        Map<KinesisOperationType, Map<StreamMetric, Map<Datapoint, Double>>> currentUtilisationMetrics,
        Map<KinesisOperationType, StreamMetrics> streamMaxCapacity, int cwSampleDuration, DateTime now) {
    ScalingOperationReport report = null;
    ScaleDirection finalScaleDirection = null;

    // for each type of operation that the customer has requested profiling
    // (PUT, GET)
    Map<KinesisOperationType, ScaleDirection> scaleVotes = new HashMap<>();

    for (Map.Entry<KinesisOperationType, Map<StreamMetric, Map<Datapoint, Double>>> entry : currentUtilisationMetrics
            .entrySet()) {/* w  w  w . jav a  2s.  c  o  m*/
        // set the default scaling vote to 'do nothing'
        scaleVotes.put(entry.getKey(), ScaleDirection.NONE);

        Map<StreamMetric, Triplet<Integer, Integer, Double>> perMetricSamples = new HashMap<>();
        StreamMetric higherUtilisationMetric;
        Double higherUtilisationPct;

        // process each metric type, including Records and Bytes
        for (StreamMetric metric : StreamMetric.values()) {
            double currentMax = 0D;
            double currentPct = 0D;
            double latestPct = 0d;
            double latestMax = 0d;
            double latestAvg = 0d;
            DateTime lastTime = null;
            int lowSamples = 0;
            int highSamples = 0;

            Map<Datapoint, Double> metrics = new HashMap<>();

            if (!currentUtilisationMetrics.containsKey(entry.getKey())
                    || !entry.getValue().containsKey(metric)) {
                // we have no samples for this type of metric which is ok -
                // they'll later be counted as low metrics
            } else {
                metrics = entry.getValue().get(metric);
            }

            // if we got nothing back, then there are no operations of the
            // given type happening, so this is a full 'low sample'
            if (metrics.size() == 0) {
                lowSamples = this.config.getScaleDown().getScaleAfterMins();
            }

            // process the data point aggregates retrieved from CloudWatch
            // and log scale up/down votes by period
            for (Map.Entry<Datapoint, Double> datapointEntry : metrics.entrySet()) {
                currentMax = datapointEntry.getValue();
                currentPct = currentMax / streamMaxCapacity.get(entry.getKey()).get(metric);
                // keep track of the last measures
                if (lastTime == null
                        || new DateTime(datapointEntry.getKey().getTimestamp()).isAfter(lastTime)) {
                    latestPct = currentPct;
                    latestMax = currentMax;

                    // latest average is a simple moving average
                    latestAvg = latestAvg == 0d ? currentPct : (latestAvg + currentPct) / 2;
                }
                lastTime = new DateTime(datapointEntry.getKey().getTimestamp());

                // if the pct for the datapoint exceeds or is below the
                // thresholds, then add low/high samples
                if (currentPct > new Double(this.config.getScaleUp().getScaleThresholdPct()) / 100) {
                    LOG.debug(String.format("%s %s: Cached High Alarm Condition for %.2f %s/Second (%.2f%%)",
                            entry.getKey(), metric, currentMax, metric, currentPct * 100));
                    highSamples++;
                } else if (currentPct < new Double(this.config.getScaleDown().getScaleThresholdPct()) / 100) {
                    LOG.debug(String.format("%s %s: Cached Low Alarm Condition for %.2f %s/Second (%.2f%%)",
                            entry.getKey(), metric, currentMax, metric, currentPct * 100));
                    lowSamples++;
                }
            }

            // add low samples for the periods which we didn't get any
            // data points, if there are any
            if (metrics.size() < cwSampleDuration) {
                lowSamples += cwSampleDuration - metrics.size();
            }

            LOG.info(String.format(
                    metric + ": Stream %s Used %s[%s] Capacity ~ %.2f%% (%,.0f " + metric + " of %d)",
                    config.getStreamName(), entry.getKey(), metric, latestAvg * 100, latestMax,
                    streamMaxCapacity.get(entry.getKey()).get(metric)));

            // merge the per-stream metric samples together for the
            // operation
            if (!perMetricSamples.containsKey(metric)) {
                // create a new sample entry
                perMetricSamples.put(metric, new Triplet<>(highSamples, lowSamples, latestAvg));
            } else {
                // merge the samples
                Triplet<Integer, Integer, Double> previousHighLow = perMetricSamples.get(metric);
                Triplet<Integer, Integer, Double> newHighLow = new Triplet<>(
                        previousHighLow.getValue0() + highSamples, previousHighLow.getValue1() + lowSamples,
                        (previousHighLow.getValue2() + latestAvg) / 2);
                perMetricSamples.put(metric, newHighLow);
            }
        }

        /*-
         * we now have per metric samples for this operation type
         * 
         * For Example: 
         * 
         * Metric  | High Samples | Low Samples | Pct Used
         * Bytes   | 3            | 0           | .98
         * Records | 0            | 10          | .2
         * 
         * Check these values against the provided configuration. If we have
         * been above the 'scaleAfterMins' with high samples for either
         * metric, then we scale up. If not, then if we've been below the
         * scaleAfterMins with low samples, then we scale down. Otherwise
         * the vote stays as NONE
         */

        // first find out which of the dimensions of stream utilisation are
        // higher - we'll use the higher of the two for time checks
        if (perMetricSamples.get(StreamMetric.Bytes).getValue2() >= perMetricSamples.get(StreamMetric.Records)
                .getValue2()) {
            higherUtilisationMetric = StreamMetric.Bytes;
            higherUtilisationPct = perMetricSamples.get(StreamMetric.Bytes).getValue2();
        } else {
            higherUtilisationMetric = StreamMetric.Records;
            higherUtilisationPct = perMetricSamples.get(StreamMetric.Records).getValue2();
        }

        LOG.info(String.format(
                "Will decide scaling action based on metric %s[%s] due to higher utilisation metric %.2f%%",
                entry.getKey(), higherUtilisationMetric, higherUtilisationPct * 100));

        if (perMetricSamples.get(higherUtilisationMetric).getValue0() >= config.getScaleUp()
                .getScaleAfterMins()) {
            scaleVotes.put(entry.getKey(), ScaleDirection.UP);
        } else if (perMetricSamples.get(higherUtilisationMetric).getValue1() >= config.getScaleDown()
                .getScaleAfterMins()) {
            scaleVotes.put(entry.getKey(), ScaleDirection.DOWN);
        }
    }

    // process the scaling votes
    ScaleDirection getVote = scaleVotes.get(KinesisOperationType.GET);
    ScaleDirection putVote = scaleVotes.get(KinesisOperationType.PUT);

    // check if we have both get and put votes - if we have both then
    // implement the decision matrix
    if (getVote != null && putVote != null) {
        // if either of the votes are to scale up, then do so. If both are
        // None,
        // then do nothing. Otherwise scale down
        if (getVote == ScaleDirection.UP || putVote == ScaleDirection.UP) {
            finalScaleDirection = ScaleDirection.UP;
        } else if (getVote == ScaleDirection.NONE && putVote == ScaleDirection.NONE) {
            finalScaleDirection = ScaleDirection.NONE;
        } else {
            finalScaleDirection = ScaleDirection.DOWN;
        }
    } else {
        // we only have get or put votes, so use the non-null one
        finalScaleDirection = (getVote == null ? putVote : getVote);
    }

    try {
        int currentShardCount = this.scaler.getOpenShardCount(this.config.getStreamName());

        // if the metric stats indicate a scale up or down, then do the
        // action
        if (finalScaleDirection.equals(ScaleDirection.UP)) {
            // submit a scale up task
            Integer scaleUpCount = this.config.getScaleUp().getScaleCount();

            LOG.info(String.format(
                    "Requesting Scale Up of Stream %s by %s as %s has been above %s%% for %s Minutes",
                    this.config.getStreamName(),
                    (scaleUpCount != null) ? scaleUpCount : this.config.getScaleUp().getScalePct() + "%",
                    this.config.getScaleOnOperations().toString(),
                    this.config.getScaleUp().getScaleThresholdPct(),
                    this.config.getScaleUp().getScaleAfterMins()));

            // TODO migrate this block to UpdateShardCount API
            if (scaleUpCount != null) {
                report = this.scaler.updateShardCount(this.config.getStreamName(), currentShardCount,
                        currentShardCount + scaleUpCount, this.config.getMinShards(),
                        this.config.getMaxShards());
            } else {
                report = this.scaler.updateShardCount(this.config.getStreamName(), currentShardCount,
                        new Double(
                                currentShardCount * (new Double(this.config.getScaleUp().getScalePct()) / 100))
                                        .intValue(),
                        this.config.getMinShards(), this.config.getMaxShards());

            }

            // send SNS notifications
            if (this.config.getScaleUp().getNotificationARN() != null && this.snsClient != null) {
                StreamScalingUtils.sendNotification(this.snsClient,
                        this.config.getScaleUp().getNotificationARN(), "Kinesis Autoscaling - Scale Up",
                        (report == null ? "No Changes Made" : report.asJson()));
            }
        } else if (finalScaleDirection.equals(ScaleDirection.DOWN)) {
            // check the cool down interval
            if (lastScaleDown != null
                    && now.minusMinutes(this.config.getScaleDown().getCoolOffMins()).isBefore(lastScaleDown)) {
                LOG.info(String.format(
                        "Stream %s: Deferring Scale Down until Cool Off Period of %s Minutes has elapsed",
                        this.config.getStreamName(), this.config.getScaleDown().getCoolOffMins()));
            } else {
                // submit a scale down
                Integer scaleDownCount = this.config.getScaleDown().getScaleCount();
                LOG.info(String.format(
                        "Requesting Scale Down of Stream %s by %s as %s has been below %s%% for %s Minutes",
                        this.config.getStreamName(),
                        (scaleDownCount != null) ? scaleDownCount
                                : this.config.getScaleDown().getScalePct() + "%",
                        config.getScaleOnOperations().toString(),
                        this.config.getScaleDown().getScaleThresholdPct(),
                        this.config.getScaleDown().getScaleAfterMins()));
                try {
                    if (scaleDownCount != null) {
                        report = this.scaler.updateShardCount(this.config.getStreamName(), currentShardCount,
                                currentShardCount - scaleDownCount, this.config.getMinShards(),
                                this.config.getMaxShards());
                    } else {
                        report = this.scaler.updateShardCount(this.config.getStreamName(), currentShardCount,
                                new Double(currentShardCount
                                        - (new Double(this.config.getScaleDown().getScalePct()) / 100))
                                                .intValue(),
                                this.config.getMinShards(), this.config.getMaxShards());
                    }

                    lastScaleDown = new DateTime(System.currentTimeMillis());

                    // send SNS notifications
                    if (this.config.getScaleDown().getNotificationARN() != null && this.snsClient != null) {
                        StreamScalingUtils.sendNotification(this.snsClient,
                                this.config.getScaleDown().getNotificationARN(),
                                "Kinesis Autoscaling - Scale Down",
                                (report == null ? "No Changes Made" : report.asJson()));
                    }
                } catch (AlreadyOneShardException aose) {
                    // do nothing - we're already at 1 shard
                    LOG.info(String.format("Stream %s: Not Scaling Down - Already at Minimum of 1 Shard",
                            this.config.getStreamName()));
                }
            }
        } else {
            // scale direction not set, so we're not going to scale
            // up or down - everything fine
            LOG.info("No Scaling required - Stream capacity within specified tolerances");
            return this.scaler.reportFor(ScalingCompletionStatus.NoActionRequired, this.config.getStreamName(),
                    0, finalScaleDirection);
        }
    } catch (Exception e) {
        LOG.error("Failed to process stream " + this.config.getStreamName(), e);
    }

    return report;
}