Example usage for org.joda.time Days daysBetween

List of usage examples for org.joda.time Days daysBetween

Introduction

In this page you can find the example usage for org.joda.time Days daysBetween.

Prototype

public static Days daysBetween(ReadablePartial start, ReadablePartial end) 

Source Link

Document

Creates a Days representing the number of whole days between the two specified partial datetimes.

Usage

From source file:de.localtoast.launchit.AppMetaData.java

License:Open Source License

public int getEffectivePriority() {
    int effectivePriority = priorityCounter;

    Days daysBetween = Days.daysBetween(lastStarted, DateTime.now());
    if (daysBetween.isGreaterThan(PRIORITY_DECREASING_DELAY)) {
        effectivePriority = effectivePriority - (daysBetween.getDays() - PRIORITY_DECREASING_DELAY.getDays());
        if (effectivePriority < 0) {
            effectivePriority = 0;//from   w  ww.j  a v a2 s.c o  m
        }
    }

    return effectivePriority;
}

From source file:de.symeda.sormas.api.utils.DateHelper.java

License:Open Source License

/**
 * Calculate full days between the two given dates.
 *//*from  w ww . j  a  v a2  s.  c o m*/
public static int getFullDaysBetween(Date start, Date end) {
    return Days.daysBetween(new LocalDate(start.getTime()), new LocalDate(end.getTime())).getDays();
}

From source file:de.symeda.sormas.api.utils.DateHelper.java

License:Open Source License

/**
 * Calculate days between the two given dates. This includes both the start and
 * end dates, so a one-week period from Monday to Sunday will return 7.
 *///  w  w w  . ja  v  a2  s  . co m
public static int getDaysBetween(Date start, Date end) {
    return Days.daysBetween(new LocalDate(start.getTime()), new LocalDate(end.getTime())).getDays() + 1;
}

From source file:edu.coeia.util.DateUtil.java

License:Open Source License

public static int subtractDays(Date date1, Date date2) {
    DateTime install = new DateTime(date1);
    DateTime current = new DateTime(date2);

    return Days.daysBetween(current, install).getDays();
}

From source file:edu.nyu.vida.data_polygamy.relationship_computation.CorrelationReducer.java

License:BSD License

public static TimeSeriesStats getStats(int temporal, TopologyTimeSeriesWritable timeSeries1,
        TopologyTimeSeriesWritable timeSeries2, boolean temporalPermutationTest) {

    TimeSeriesStats output = new TimeSeriesStats();

    if ((timeSeries1 == null) || (timeSeries2 == null))
        return output;

    // detecting intersection

    long start1 = timeSeries1.getStart();
    long end1 = timeSeries1.getEnd();
    long start2 = timeSeries2.getStart();
    long end2 = timeSeries2.getEnd();

    if (((end1 < start2) && (start1 < start2)) || ((end1 > end2) && (start1 > end2)))
        return output;

    output.setIntersect(true);//  w w  w  .  ja  va 2s.  com

    DateTime start1Obj = new DateTime(start1 * 1000, DateTimeZone.UTC);
    DateTime end1Obj = new DateTime(end1 * 1000, DateTimeZone.UTC);
    DateTime start2Obj = new DateTime(start2 * 1000, DateTimeZone.UTC);
    DateTime end2Obj = new DateTime(end2 * 1000, DateTimeZone.UTC);

    byte[] eventTimeSeries1 = timeSeries1.getTimeSeries();
    byte[] eventTimeSeries2 = timeSeries2.getTimeSeries();

    int startRange = 0;
    int endRange = 0;

    switch (temporal) {
    case FrameworkUtils.HOUR:
        startRange = (start1 > start2) ? Hours.hoursBetween(start2Obj, start1Obj).getHours()
                : Hours.hoursBetween(start1Obj, start2Obj).getHours();
        endRange = (end1 > end2) ? Hours.hoursBetween(end2Obj, end1Obj).getHours()
                : Hours.hoursBetween(end1Obj, end2Obj).getHours();
        break;
    case FrameworkUtils.DAY:
        startRange = (start1 > start2) ? Days.daysBetween(start2Obj, start1Obj).getDays()
                : Days.daysBetween(start1Obj, start2Obj).getDays();
        endRange = (end1 > end2) ? Days.daysBetween(end2Obj, end1Obj).getDays()
                : Days.daysBetween(end1Obj, end2Obj).getDays();
        break;
    case FrameworkUtils.WEEK:
        startRange = (start1 > start2) ? Weeks.weeksBetween(start2Obj, start1Obj).getWeeks()
                : Weeks.weeksBetween(start1Obj, start2Obj).getWeeks();
        endRange = (end1 > end2) ? Weeks.weeksBetween(end2Obj, end1Obj).getWeeks()
                : Weeks.weeksBetween(end1Obj, end2Obj).getWeeks();
        break;
    case FrameworkUtils.MONTH:
        startRange = (start1 > start2) ? Months.monthsBetween(start2Obj, start1Obj).getMonths()
                : Months.monthsBetween(start1Obj, start2Obj).getMonths();
        endRange = (end1 > end2) ? Months.monthsBetween(end2Obj, end1Obj).getMonths()
                : Months.monthsBetween(end1Obj, end2Obj).getMonths();
        break;
    case FrameworkUtils.YEAR:
        startRange = (start1 > start2) ? Years.yearsBetween(start2Obj, start1Obj).getYears()
                : Years.yearsBetween(start1Obj, start2Obj).getYears();
        endRange = (end1 > end2) ? Years.yearsBetween(end2Obj, end1Obj).getYears()
                : Years.yearsBetween(end1Obj, end2Obj).getYears();
        break;
    default:
        startRange = (start1 > start2) ? Hours.hoursBetween(start2Obj, start1Obj).getHours()
                : Hours.hoursBetween(start1Obj, start2Obj).getHours();
        endRange = (end1 > end2) ? Hours.hoursBetween(end2Obj, end1Obj).getHours()
                : Hours.hoursBetween(end1Obj, end2Obj).getHours();
        break;
    }

    int indexStart1 = (start2 > start1) ? startRange : 0;
    int indexStart2 = (start2 > start1) ? 0 : startRange;
    int indexEnd1 = (end2 > end1) ? eventTimeSeries1.length : eventTimeSeries1.length - endRange;
    int indexEnd2 = (end2 > end1) ? eventTimeSeries2.length - endRange : eventTimeSeries2.length;

    /*DateTime startIntersect = FrameworkUtils.addTime(temporal, indexStart1, start1Obj);
    if (!(startIntersect.isEqual(FrameworkUtils.addTime(temporal, indexStart2, start2Obj)))) {
    System.out.println("Something went wrong... Different starts");
    System.exit(-1);
    }*/

    byte[] timeSeries1Int = Arrays.copyOfRange(eventTimeSeries1, indexStart1, indexEnd1);
    byte[] timeSeries2Int = Arrays.copyOfRange(eventTimeSeries2, indexStart2, indexEnd2);

    if (timeSeries1Int.length != timeSeries2Int.length) {
        System.out.println("Something went wrong... Different sizes");
        System.exit(-1);
    }

    int nMatchEvents = 0;
    int nMatchPosEvents = 0;
    int nMatchNegEvents = 0;
    int nPosFirstNonSecond = 0;
    int nNegFirstNonSecond = 0;
    int nNonFirstPosSecond = 0;
    int nNonFirstNegSecond = 0;

    //String eventDateTime = null;
    int indexD1 = (temporalPermutationTest) ? new Random().nextInt(timeSeries1Int.length) : 0;
    int indexD2 = (temporalPermutationTest) ? new Random().nextInt(timeSeries2Int.length) : 0;
    for (int i = 0; i < timeSeries1Int.length; i++) {
        int j = (indexD1 + i) % timeSeries1Int.length;
        int k = (indexD2 + i) % timeSeries2Int.length;
        byte result = (byte) (timeSeries1Int[j] | timeSeries2Int[k]);

        //eventDateTime = FrameworkUtils.getTemporalStr(
        //        temporal, FrameworkUtils.addTime(temporal, j, startIntersect));

        switch (result) {
        case FrameworkUtils.nonEventsMatch: // both non events
            // do nothing
            break;
        case FrameworkUtils.posEventsMatch: // both positive
            nMatchEvents++;
            nMatchPosEvents++;
            //output.addMatchPosEvents(eventDateTime);
            break;
        case FrameworkUtils.nonEventPosEventMatch: // one positive, one non-event
            if (timeSeries1Int[j] == FrameworkUtils.positiveEvent)
                nPosFirstNonSecond++;
            else
                nNonFirstPosSecond++;
            break;
        case FrameworkUtils.negEventsMatch: // both negative
            nMatchEvents++;
            nMatchPosEvents++;
            //output.addMatchPosEvents(eventDateTime);
            break;
        case FrameworkUtils.nonEventNegEventMatch: // one negative, one non-event
            if (timeSeries1Int[j] == FrameworkUtils.negativeEvent)
                nNegFirstNonSecond++;
            else
                nNonFirstNegSecond++;
            break;
        case FrameworkUtils.negEventPosEventMatch: // one negative, one positive
            nMatchEvents++;
            nMatchNegEvents++;
            //output.addMatchNegEvents(eventDateTime);
            break;
        default:
            System.out.println("Something went wrong... Wrong case");
            System.exit(-1);
        }
    }

    output.setParameters(nMatchEvents, nMatchPosEvents, nMatchNegEvents, nPosFirstNonSecond, nNegFirstNonSecond,
            nNonFirstPosSecond, nNonFirstNegSecond);

    return output;
}

From source file:edu.nyu.vida.data_polygamy.utils.FrameworkUtils.java

License:BSD License

public static int getTimeSteps(int tempRes, int startTime, int endTime) {

    if (startTime > endTime) {
        return 0;
    }//w  w  w.ja v a2  s .  co m

    int timeSteps = 0;
    DateTime start = new DateTime(((long) startTime) * 1000, DateTimeZone.UTC);
    DateTime end = new DateTime(((long) endTime) * 1000, DateTimeZone.UTC);

    switch (tempRes) {
    case FrameworkUtils.HOUR:
        timeSteps = Hours.hoursBetween(start, end).getHours();
        break;
    case FrameworkUtils.DAY:
        timeSteps = Days.daysBetween(start, end).getDays();
        break;
    case FrameworkUtils.WEEK:
        timeSteps = Weeks.weeksBetween(start, end).getWeeks();
        break;
    case FrameworkUtils.MONTH:
        timeSteps = Months.monthsBetween(start, end).getMonths();
        break;
    case FrameworkUtils.YEAR:
        timeSteps = Years.yearsBetween(start, end).getYears();
        break;
    default:
        timeSteps = Hours.hoursBetween(start, end).getHours();
        break;
    }
    timeSteps++;

    return timeSteps;
}

From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java

License:Open Source License

/**
 * Calculates the cluster novelty feature for each cluster generated
 * on a specific run date.//w w  w . j  a v a  2  s.c o  m
 *
 * @param log_date the run date
 * @param window the number of days previous to use in feature calculation
 * @return a table of values where the keys are cluster ids and the values 
 *       are the feature values
 * @throws SQLException if there is an error calculating the feature values
 */
public Map<Integer, Double> calculateNoveltyFeature(Date log_date, int window) throws SQLException {
    HashMap<Integer, Double> retval = new HashMap<Integer, Double>();
    ArrayList<Date> prevDates = getPrevDates(log_date, window);

    if (prevDates.size() > 0) {
        StringBuffer querybuf = new StringBuffer();
        Formatter formatter = new Formatter(querybuf);
        String curdatestr = df.format(log_date);
        formatter.format(properties.getProperty(NOVELTY_QUERY1_1KEY), curdatestr, curdatestr, curdatestr,
                curdatestr);
        for (Date prevDate : prevDates) {
            formatter.format(" " + properties.getProperty(NOVELTY_QUERY1_2KEY) + " ", df.format(prevDate));
        }
        formatter.format(properties.getProperty(NOVELTY_QUERY1_3KEY), curdatestr, curdatestr);

        ResultSet rs2 = null;
        Hashtable<Integer, Hashtable<String, Long>> new_resolved_ips = new Hashtable<Integer, Hashtable<String, Long>>();
        try {
            rs2 = dbi.executeQueryWithResult(querybuf.toString());
            while (rs2.next()) {
                int cluster_id = rs2.getInt(2);
                if (!new_resolved_ips.containsKey(cluster_id)) {
                    new_resolved_ips.put(cluster_id, new Hashtable<String, Long>());
                }
                String secondLevelDomainName = rs2.getString(1);
                long newips = rs2.getLong(3);
                Hashtable<String, Long> clustertable = new_resolved_ips.get(cluster_id);
                clustertable.put(secondLevelDomainName, newips);
            }
        } catch (Exception e) {
            if (log.isErrorEnabled()) {
                log.error(e);
            }
        } finally {
            if (rs2 != null && !rs2.isClosed()) {
                rs2.close();
            }
            formatter.close();
        }

        Hashtable<String, List<Integer>> numDays = new Hashtable<String, List<Integer>>();
        for (Date prevDate : prevDates) {
            String prevDateStr = df.format(prevDate);
            querybuf = new StringBuffer();
            formatter = new Formatter(querybuf);
            formatter.format(properties.getProperty(NOVELTY_QUERY2KEY), curdatestr, prevDateStr, curdatestr,
                    prevDateStr);
            ResultSet rs3 = null;
            try {
                rs3 = dbi.executeQueryWithResult(querybuf.toString());
                while (rs3.next()) {
                    String sldn = rs3.getString(1);
                    if (!numDays.containsKey(sldn)) {
                        numDays.put(sldn, new ArrayList<Integer>());
                    }
                    Date pd = rs3.getDate(2);
                    DateTime start = new DateTime(pd.getTime());
                    DateTime end = new DateTime(log_date.getTime());
                    Days d = Days.daysBetween(start, end);
                    int diffDays = d.getDays();
                    numDays.get(sldn).add(diffDays);
                }
            } catch (Exception e) {
                if (log.isErrorEnabled()) {
                    log.error(e);
                }
            } finally {
                if (rs3 != null && !rs3.isClosed()) {
                    rs3.close();
                }
                formatter.close();
            }
        }

        Hashtable<Integer, List<Float>> clusterValues = new Hashtable<Integer, List<Float>>();
        for (int clusterID : new_resolved_ips.keySet()) {
            clusterValues.put(clusterID, new ArrayList<Float>());

            Hashtable<String, Long> sldnValues = new_resolved_ips.get(clusterID);
            for (String sldn : sldnValues.keySet()) {
                if (numDays.keySet().contains(sldn)) {
                    long newIPCount = sldnValues.get(sldn);
                    float f = ((float) newIPCount) / Collections.max(numDays.get(sldn));
                    clusterValues.get(clusterID).add(f);

                }
            }
        }

        for (int clusterID : clusterValues.keySet()) {
            if (clusterValues.get(clusterID) == null) { //I dont think it is possible for this to ever be true
                retval.put(clusterID, null);
            } else {
                double sum = 0;
                for (double d : clusterValues.get(clusterID)) {
                    sum += d;
                }
                double val = 0;
                if (clusterValues.get(clusterID).size() > 0) {
                    val = sum / clusterValues.get(clusterID).size();
                }
                retval.put(clusterID, val);
            }
        }
    }
    return retval;
}

From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java

License:Open Source License

/**
 * Gets run dates previous to a specific date within a window
 * of days from that date./*  w  w  w.  j  a  v a  2 s  .c o m*/
 *
 * @param log_date the run date
 * @param window the number of days previous to the current date
 * @return the list of previous run dates
 * @throws SQLException if there is an error retrieving the previous
 *       run dates
 */
public ArrayList<Date> getPrevDates(Date log_date, int window) throws SQLException {
    ArrayList<Date> prevDates = new ArrayList<Date>();
    if (prevDateBufDate != null && prevDateBuf != null && prevDateBufDate.equals(log_date)
            && prevDateBufWindow >= window) {

        //pull the dates within the day window from the prevDateBuf cache
        Date pd = null;
        int windowcount = 0;
        for (Date d : prevDateBuf) {
            if (windowcount >= window) {
                break;
            }
            if (pd == null) {
                pd = d;
                windowcount++;
            } else {
                DateTime morerecent = new DateTime(d.getTime());
                DateTime lessrecent = new DateTime(pd.getTime());
                Days days = Days.daysBetween(morerecent, lessrecent);
                windowcount += days.getDays();
                pd = d;
            }
            prevDates.add(d);
        }

    } else {
        String domainsprefix = properties.getProperty(DOMAINSPREFIXKEY);
        String resipsprefix = properties.getProperty(RESIPSPREFIXKEY);

        ArrayList<String> tablenames = new ArrayList<String>();
        ResultSet rs1 = null;
        try {
            rs1 = dbi.executeQueryWithResult(properties.getProperty(TABLES_QUERY1KEY));
            while (rs1.next()) {
                tablenames.add(rs1.getString(1));
            }
        } catch (Exception e) {
            if (log.isErrorEnabled()) {
                log.error(e);
            }
        } finally {
            if (rs1 != null && !rs1.isClosed()) {
                rs1.close();
            }
        }

        GregorianCalendar cal = new GregorianCalendar();
        cal.setTime(log_date);
        for (int i = 0; i < window; i++) {
            cal.roll(Calendar.DAY_OF_YEAR, false);
            Date temp = cal.getTime();
            String datestr = df.format(temp);
            if (tablenames.contains(domainsprefix + "_" + datestr)
                    && tablenames.contains(resipsprefix + "_" + datestr)) {
                prevDates.add(temp);
            }
        }

        //cache the values for later
        if (prevDateBuf == null) {
            prevDateBuf = new ArrayList<Date>();
        } else {
            prevDateBuf.clear();
        }
        prevDateBuf.addAll(prevDates);
        prevDateBufDate = log_date;
        prevDateBufWindow = window;
    }
    return prevDates;
}

From source file:energy.usef.mdc.workflow.altstep.MdcMeterDataQueryStubFail.java

License:Apache License

/**
 * {@inheritDoc}/*from ww  w. ja  v  a  2  s  .c  om*/
 */
@Override
public WorkflowContext invoke(WorkflowContext context) {
    Integer ptuDuration = (Integer) context.getValue(MeterDataQueryStepParameter.IN.PTU_DURATION.name());
    @SuppressWarnings("unchecked")
    List<String> stateData = (List<String>) context.getValue(MeterDataQueryStepParameter.IN.CONNECTIONS.name());
    MeterDataQueryTypeDto meterDataQueryTypeDto = (MeterDataQueryTypeDto) context
            .getValue(MeterDataQueryStepParameter.IN.META_DATA_QUERY_TYPE.name());

    LocalDate startDate = (LocalDate) context.getValue(MeterDataQueryStepParameter.IN.DATE_RANGE_START.name());
    LocalDate endDate = (LocalDate) context.getValue(MeterDataQueryStepParameter.IN.DATE_RANGE_END.name());

    LOGGER.info(
            "PBC invoked with the connection list size {}, the start range day {}, the end range date {}, meter data query "
                    + "type {}",
            stateData.size(), startDate, endDate, meterDataQueryTypeDto);

    int days = Days.daysBetween(startDate, endDate).getDays();
    // Map days to MeterData objects
    List<MeterDataDto> meterDatas = IntStream.rangeClosed(0, days).mapToObj(startDate::plusDays)
            .map(day -> fetchMeterDataDtoForDay(day, ptuDuration, stateData, meterDataQueryTypeDto))
            .filter(meterData -> meterData != null).collect(Collectors.toList());

    context.setValue(MeterDataQueryStepParameter.OUT.METER_DATA.name(), meterDatas);
    LOGGER.debug("Meter data DTO list with the size {} is generated", meterDatas.size());
    return context;
}

From source file:energy.usef.mdc.workflow.step.MdcMeterDataQueryStub.java

License:Apache License

/**
 * {@inheritDoc}/*from   w  ww.ja  v  a2 s . c om*/
 */
@Override
public WorkflowContext invoke(WorkflowContext context) {
    Integer ptuDuration = (Integer) context.getValue(IN.PTU_DURATION.name());
    @SuppressWarnings("unchecked")
    List<String> stateData = (List<String>) context.getValue(IN.CONNECTIONS.name());
    MeterDataQueryTypeDto meterDataQueryTypeDto = (MeterDataQueryTypeDto) context
            .getValue(MeterDataQueryStepParameter.IN.META_DATA_QUERY_TYPE.name());

    LocalDate startDate = (LocalDate) context.getValue(IN.DATE_RANGE_START.name());
    LocalDate endDate = (LocalDate) context.getValue(IN.DATE_RANGE_END.name());

    LOGGER.info(
            "PBC invoked with the connection list size {}, the start range day {}, the end range date {}, meter data query "
                    + "type {}",
            stateData.size(), startDate, endDate, meterDataQueryTypeDto);

    int days = Days.daysBetween(startDate, endDate).getDays();
    // Map days to MeterData objects
    List<MeterDataDto> meterDatas = IntStream.rangeClosed(0, days).mapToObj(startDate::plusDays)
            .map(day -> fetchMeterDataDtoForDay(day, ptuDuration, stateData, meterDataQueryTypeDto))
            .filter(meterData -> meterData != null).collect(Collectors.toList());

    context.setValue(MeterDataQueryStepParameter.OUT.METER_DATA.name(), meterDatas);
    LOGGER.debug("Meter data DTO list with the size {} is generated", meterDatas.size());
    return context;
}