Example usage for org.joda.time DateTime getDayOfMonth

List of usage examples for org.joda.time DateTime getDayOfMonth

Introduction

In this page you can find the example usage for org.joda.time DateTime getDayOfMonth.

Prototype

public int getDayOfMonth() 

Source Link

Document

Get the day of month field value.

Usage

From source file:org.isisaddons.app.kitchensink.fixture.date.DateObjectsFixture.java

License:Apache License

private DateObject create(final String name, final DateTime dt, final ExecutionContext ec) {
    final DateObject dateObject = dateObjects.createDateObject(name, dt.getYear(), dt.getMonthOfYear(),
            dt.getDayOfMonth(), dt.getHourOfDay(), dt.getMinuteOfHour());

    objects.add(dateObject);/*w w  w .j a v a 2s .  com*/

    return ec.addResult(this, dateObject);
}

From source file:org.itechkenya.leavemanager.api.DateTimeUtil.java

License:Open Source License

/**
 * @return true if dateTime1 and dateTime2 share the same day of month and month of year.
 *//*from  w  w w . j a v  a  2s .c o  m*/
private static boolean shareDayAndMonth(DateTime dateTime1, DateTime dateTime2) {
    return dateTime1.getDayOfMonth() == dateTime2.getDayOfMonth()
            && dateTime1.getMonthOfYear() == dateTime2.getMonthOfYear();
}

From source file:org.itechkenya.leavemanager.domain.Contract.java

License:Open Source License

public BigDecimal calculateLeaveBalanceAtYearEnd(int year) {
    BigDecimal balance = BigDecimal.ZERO;
    if (this.calculateContractYear() > 1) {
        this.calculateLeaveEventValues();
        DateTime contractStartDateTime;
        for (LeaveEvent leaveEvent : this.getLeaveEventList()) {
            contractStartDateTime = new DateTime(this.getStartDate());
            Date contractDateThisYear = DateTimeUtil.createDate(year + 1,
                    contractStartDateTime.getMonthOfYear(), contractStartDateTime.getDayOfMonth());
            if (leaveEvent.getStartDate().compareTo(contractDateThisYear) != 1) {
                balance = leaveEvent.getBalance();
            }/*from  w  w w .j a  v a 2s .  co  m*/
        }
    }
    return balance;
}

From source file:org.itechkenya.leavemanager.domain.Contract.java

License:Open Source License

public List<PreviouslyCompletedPeriod> calculatePreviouslyCompletedPeriod() {

    List<PreviouslyCompletedPeriod> previousCompletedPeriods = new ArrayList<>();

    DateTime today = new DateTime(new Date());
    DateTime contractStartDate = new DateTime(this.getStartDate());

    SimpleDateFormat monthSdf = new SimpleDateFormat("yyyyMM");
    SimpleDateFormat yearSdf = new SimpleDateFormat("yyyy");

    DateTime earnDateTime;/*  w ww . j av  a 2 s  .c o  m*/
    DateTime recordDateTime;
    Date date;

    if (today.getDayOfMonth() >= contractStartDate.getDayOfMonth()) {
        earnDateTime = today.minusMonths(1);
    } else {
        earnDateTime = today.minusMonths(2);
    }
    recordDateTime = earnDateTime.plusMonths(1);
    date = DateTimeUtil.createDate(recordDateTime.getYear(), recordDateTime.getMonthOfYear(),
            contractStartDate.getDayOfMonth());
    previousCompletedPeriods
            .add(new PreviouslyCompletedPeriod(monthSdf.format(earnDateTime.toDate()), date, PeriodType.MONTH));

    int contractYearCount = this.calculateContractYear();
    if (contractYearCount > 1) {
        int previousContractYear = this.calculatePreviousContractYear(contractYearCount);

        Date recordDate = DateTimeUtil.createDate(previousContractYear + 1, contractStartDate.getMonthOfYear(),
                contractStartDate.getDayOfMonth());

        previousCompletedPeriods.add(new PreviouslyCompletedPeriod(String.valueOf(previousContractYear),
                recordDate, PeriodType.YEAR));
    }
    return previousCompletedPeriods;
}

From source file:org.jarvis.core.services.CoreSunsetSunrise.java

License:Apache License

/**
 * write this object to statistics (base on next day)
 * @param lat // ww  w .j  a v  a2  s.  c  o m
 * @param lng 
 * @return String
 */
public SunApiRest get(String lat, String lng) {
    /**
     * build response
     */
    DateTime today = DateTime.now().plusDays(1);
    String d = today.getYear() + "-" + today.getMonthOfYear() + "-" + today.getDayOfMonth();
    Response entity;
    entity = client.target(baseurl).path("/json").queryParam("lat", lat).queryParam("lng", lng)
            .queryParam("date", d).queryParam("formatted", "0").request(MediaType.APPLICATION_JSON)
            .accept(MediaType.APPLICATION_JSON).acceptEncoding("charset=UTF-8").get();

    /**
     * verify result
     */
    if (entity.getStatus() == 200) {
        String raw = entity.readEntity(String.class);
        SunApiResultRest result = null;
        try {
            result = mapper.readValue(raw, SunApiResultRest.class);
        } catch (IOException e) {
            throw new TechnicalException(e);
        }
        if (result == null) {
            return new SunApiRest();
        }
        return result.results;
    } else {
        throw new TechnicalException(entity.getStatus() + ":/json");
    }
}

From source file:org.jasig.portal.portlets.statistics.BaseStatisticsReportController.java

License:Apache License

/**
 * Build the aggregation {@link DataTable}
 *//*from   w w  w.  jav  a2 s  . co  m*/
protected final DataTable buildAggregationReport(F form) throws TypeMismatchException {
    //Pull data out of form for per-group fetching
    final AggregationInterval interval = form.getInterval();
    final DateMidnight start = form.getStart();
    final DateMidnight end = form.getEnd();

    final DateTime startDateTime = start.toDateTime();
    //Use a query end of the end date at 23:59:59
    final DateTime endDateTime = end.plusDays(1).toDateTime().minusSeconds(1);

    //Get the list of DateTimes used on the X axis in the report
    final List<DateTime> reportTimes = this.intervalHelper.getIntervalStartDateTimesBetween(interval,
            startDateTime, endDateTime, maxIntervals);

    final Map<D, SortedSet<T>> groupedAggregations = createColumnDiscriminatorMap(form);

    //Determine the ValueType of the date/time column. Use the most specific column type possible
    final ValueType dateTimeColumnType;
    if (interval.isHasTimePart()) {
        //If start/end are the same day just display the time
        if (startDateTime.toDateMidnight().equals(endDateTime.toDateMidnight())) {
            dateTimeColumnType = ValueType.TIMEOFDAY;
        }
        //interval has time data and start/end are on different days, show full date time
        else {
            dateTimeColumnType = ValueType.DATETIME;
        }
    }
    //interval is date only
    else {
        dateTimeColumnType = ValueType.DATE;
    }

    //Setup the date/time column description
    final ColumnDescription dateTimeColumn;
    switch (dateTimeColumnType) {
    case TIMEOFDAY: {
        dateTimeColumn = new ColumnDescription("time", dateTimeColumnType, "Time");
        break;
    }
    default: {
        dateTimeColumn = new ColumnDescription("date", dateTimeColumnType, "Date");
    }
    }

    final DataTable table = new JsonDataTable();
    table.addColumn(dateTimeColumn);

    //Setup columns in the DataTable 
    final Set<D> columnGroups = groupedAggregations.keySet();
    for (final D columnMapping : columnGroups) {
        final Collection<ColumnDescription> columnDescriptions = this.getColumnDescriptions(columnMapping,
                form);
        table.addColumns(columnDescriptions);
    }

    //Query for all aggregation data in the time range for all groups.  Only the
    //interval and discriminator data is used from the keys.
    final Set<K> keys = createAggregationsQueryKeyset(columnGroups, form);
    final BaseAggregationDao<T, K> baseAggregationDao = this.getBaseAggregationDao();
    final Collection<T> aggregations = baseAggregationDao.getAggregations(startDateTime, endDateTime, keys,
            extractGroupsArray(columnGroups));

    //Organize the results by group and sort them chronologically by adding them to the sorted set
    for (final T aggregation : aggregations) {
        final D discriminator = aggregation.getAggregationDiscriminator();
        final SortedSet<T> results = groupedAggregations.get(discriminator);
        results.add(aggregation);
    }

    //Build Map from discriminator column mapping to result iterator to allow putting results into
    //the correct column AND the correct time slot in the column
    Comparator<? super D> comparator = getDiscriminatorComparator();
    final Map<D, PeekingIterator<T>> groupedAggregationIterators = new TreeMap<D, PeekingIterator<T>>(
            (comparator));
    for (final Entry<D, SortedSet<T>> groupedAggregationEntry : groupedAggregations.entrySet()) {
        groupedAggregationIterators.put(groupedAggregationEntry.getKey(),
                Iterators.peekingIterator(groupedAggregationEntry.getValue().iterator()));
    }

    /*
     * populate the data, filling in blank spots. The full list of interval DateTimes is used to create every row in the
     * query range. Then the iterator
     */
    for (final DateTime rowTime : reportTimes) {
        // create the row
        final TableRow row = new TableRow();

        // add the date to the first cell
        final Value dateTimeValue;
        switch (dateTimeColumnType) {
        case DATE: {
            dateTimeValue = new DateValue(rowTime.getYear(), rowTime.getMonthOfYear() - 1,
                    rowTime.getDayOfMonth());
            break;
        }
        case TIMEOFDAY: {
            dateTimeValue = new TimeOfDayValue(rowTime.getHourOfDay(), rowTime.getMinuteOfHour(), 0);
            break;
        }
        default: {
            dateTimeValue = new DateTimeValue(rowTime.getYear(), rowTime.getMonthOfYear() - 1,
                    rowTime.getDayOfMonth(), rowTime.getHourOfDay(), rowTime.getMinuteOfHour(), 0, 0);
            break;
        }
        }
        row.addCell(new TableCell(dateTimeValue));

        for (final PeekingIterator<T> groupedAggregationIteratorEntry : groupedAggregationIterators.values()) {
            List<Value> values = null;

            if (groupedAggregationIteratorEntry.hasNext()) {
                final T aggr = groupedAggregationIteratorEntry.peek();
                if (rowTime.equals(aggr.getDateTime())) {
                    //Data is for the correct time slot, advance the iterator
                    groupedAggregationIteratorEntry.next();

                    values = createRowValues(aggr, form);
                }
            }

            //Gap in the data, fill it in using a null aggregation
            if (values == null) {
                values = createRowValues(null, form);
            }

            //Add the values to the row
            for (final Value value : values) {
                row.addCell(value);
            }
        }

        table.addRow(row);
    }

    return table;
}

From source file:org.jasig.portlet.calendar.adapter.ExchangeCalendarAdapter.java

License:Apache License

/**
 * Get an XMLGregorianCalendar for the specified date.
 *
 * @param date//  w  ww . j av a  2  s . c  o  m
 * @return
 * @throws DatatypeConfigurationException
 */
protected XMLGregorianCalendar getXmlDate(DateTime date) throws DatatypeConfigurationException {
    // construct an XMLGregorianCalendar
    DatatypeFactory datatypeFactory = DatatypeFactory.newInstance();
    XMLGregorianCalendar start = datatypeFactory.newXMLGregorianCalendar();
    start.setYear(date.getYear());
    start.setMonth(date.getMonthOfYear());
    start.setTime(date.getHourOfDay(), date.getMinuteOfHour(), date.getSecondOfMinute(),
            date.getMillisOfSecond());
    start.setDay(date.getDayOfMonth());
    return start;
}

From source file:org.jbpm.designer.web.server.SimulationServlet.java

License:Apache License

private String getDateString(long seDate) {
    Date d = new Date(seDate);
    DateTime dt = new DateTime(seDate);
    StringBuffer retBuf = new StringBuffer();
    retBuf.append(dt.getYear()).append(",");
    retBuf.append(dt.getMonthOfYear()).append(",");
    retBuf.append(dt.getDayOfMonth()).append(",");
    retBuf.append(dt.getHourOfDay()).append(",");
    retBuf.append(dt.getMinuteOfHour()).append(",");
    retBuf.append(dt.getSecondOfMinute()).append(",");
    retBuf.append(dt.getMillisOfSecond());
    return retBuf.toString();
}

From source file:org.jgrasstools.hortonmachine.modules.hydrogeomorphology.energybalance.OmsEnergyBalance.java

License:Open Source License

@Execute
public void process() throws Exception {
    outPnet = new HashMap<Integer, double[]>();
    outPrain = new HashMap<Integer, double[]>();
    outPsnow = new HashMap<Integer, double[]>();
    outSwe = new HashMap<Integer, double[]>();
    outNetradiation = new HashMap<Integer, double[]>();
    outNetshortradiation = new HashMap<Integer, double[]>();

    if (safePoint == null)
        safePoint = new SafePoint();
    // retrieve number of bands
    num_EI = 0;/*from   w w  w  .ja  v  a 2s  .c  o m*/
    for (EIEnergy energy : inEnergy) {
        int j = energy.energeticBandId;
        if (j > num_EI) {
            num_EI = j;
        }
    }
    num_EI++;
    num_ES = 0;
    for (EIAreas area : inAreas) {
        int j = area.altimetricBandId;
        if (j > num_ES) {
            num_ES = j;
        }
    }
    num_ES++;

    if (basinid2BasinindexMap == null) {
        // get basin features from feature link
        basinsFeatures = new ArrayList<SimpleFeature>();
        FeatureIterator<SimpleFeature> featureIterator = inBasins.features();

        basinNum = inBasins.size();
        SimpleFeatureType featureType = inBasins.getSchema();

        int basinIdFieldIndex = featureType.indexOf(fBasinid);
        if (basinIdFieldIndex == -1) {
            throw new IllegalArgumentException(
                    "The field of the basin id couldn't be found in the supplied basin data.");
        }
        if (fBasinlandcover != null) {
            usoFieldIndex = featureType.indexOf(fBasinlandcover);
            if (usoFieldIndex == -1) {
                throw new IllegalArgumentException(
                        "The field of the soil type (usofield) couldn't be found in the supplied basin data.");
            }
        }
        basinid2BasinindexMap = new HashMap<Integer, Integer>();
        basinindex2BasinidMap = new HashMap<Integer, Integer>();

        pm.beginTask("Read basins data.", inBasins.size());
        int index = 0;
        Abasin = new double[basinNum];
        while (featureIterator.hasNext()) {
            pm.worked(1);
            SimpleFeature feature = featureIterator.next();
            basinsFeatures.add(feature);
            basinid2BasinindexMap.put(((Number) feature.getAttribute(basinIdFieldIndex)).intValue(), index);
            basinindex2BasinidMap.put(index, ((Number) feature.getAttribute(basinIdFieldIndex)).intValue());
            Geometry basinGeometry = (Geometry) feature.getDefaultGeometry();
            Abasin[index] = basinGeometry.getArea() / 1000000.0; // area in km2 as the input
            // area for energetic and
            // altimetric bands
            index++;

            // read land cover if requested
            if (usoFieldIndex != -1) {
                if (usoList == null) {
                    usoList = new ArrayList<Integer>();
                }
                int uso = ((Number) feature.getAttribute(usoFieldIndex)).intValue();
                usoList.add(uso);
            }

        }
        featureIterator.close();
        pm.done();
    }

    // get rain from scalar link
    double[] rain = new double[basinNum];
    Set<Integer> basinIdSet = inRain.keySet();
    pm.beginTask("Read rain data.", basinIdSet.size());
    for (Integer basinId : basinIdSet) {
        pm.worked(1);
        Integer index = basinid2BasinindexMap.get(basinId);
        if (index == null) {
            continue;
        }
        double[] value = inRain.get(basinId);
        if (!isNovalue(value[0])) {
            rain[index] = value[0];
        } else {
            rain[index] = 0.0;
        }
    }
    pm.done();

    // get energy values from scalar link ([12][num_EI][basinNum]) 12 ==
    // 0,1,2,3,4,5,5,4,3,2,1,0 ones at the beginning of the simulation
    if (EI == null) {
        EI = new double[12][num_EI][basinNum];
        pm.beginTask("Read energy index data.", inEnergy.size());
        for (EIEnergy energy : inEnergy) {
            pm.worked(1);
            int tempId = energy.basinId;
            Integer index = basinid2BasinindexMap.get(tempId);
            if (index == null) {
                // basinid2BasinindexMap.remove(tempId);
                continue;
            }
            int j = energy.energeticBandId;
            int k = energy.virtualMonth;
            int kInverse = 11 - k;

            EI[k][j][index] = energy.energyValue;
            EI[kInverse][j][index] = energy.energyValue;
        }
        pm.done();
    }
    // get area bande fascie from scalar link ([num_ES][num_EI][basinNum]) ones at the
    // beginning of the simulation
    if (A == null) {
        A = new double[num_ES][num_EI][basinNum];

        pm.beginTask("Read area per heigth and band data.", inAreas.size());

        HashMap<Integer, HashMap<Integer, HashMap<Integer, Double>>> idbasinMap = new HashMap<Integer, HashMap<Integer, HashMap<Integer, Double>>>();
        for (EIAreas area : inAreas) {
            int idBasin = area.basinId;
            HashMap<Integer, HashMap<Integer, Double>> idfasceMap = idbasinMap.get(idBasin);
            if (idfasceMap == null) {
                idfasceMap = new HashMap<Integer, HashMap<Integer, Double>>();
                idbasinMap.put(idBasin, idfasceMap);
            }
            int idAltimetricBand = area.altimetricBandId;
            HashMap<Integer, Double> idbandeMap = idfasceMap.get(idAltimetricBand);
            if (idbandeMap == null) {
                idbandeMap = new HashMap<Integer, Double>();
                idfasceMap.put(idAltimetricBand, idbandeMap);
            }
            int idEnergeticBand = area.energyBandId;
            double areaValue = area.areaValue;
            idbandeMap.put(idEnergeticBand, areaValue);
            pm.worked(1);
        }
        pm.done();

        for (int i = 0; i < basinNum; i = i + 1) {
            Integer index = basinindex2BasinidMap.get(i);
            if (index == null) {
                basinid2BasinindexMap.remove(i);
                continue;
            }
            HashMap<Integer, HashMap<Integer, Double>> fasceMap = idbasinMap.get(index);

            for (int j = 0; j < num_ES; j++) {
                HashMap<Integer, Double> bandeMap = fasceMap.get(j);
                for (int k = 0; k < num_EI; k++) {
                    A[j][k][i] = bandeMap.get(k);
                }
            }
        }
    }

    // get T (temperatures per basin per band) from scalar input link at each time step
    double[][] T = null;
    if (inTemp != null) {
        T = new double[basinNum][num_ES];
        pm.beginTask("Read temperature data.", inTemp.size());
        Set<Integer> basinIdsSet = inTemp.keySet();
        for (Integer basinId : basinIdsSet) {
            pm.worked(1);
            Integer index = basinid2BasinindexMap.get(basinId);
            if (index == null) {
                // data for a basin that is not considered, ignore it
                continue;
            }
            double[] values = inTemp.get(basinId);
            T[index] = values;
        }
        pm.done();
    }

    // get V (wind speed per basin per band) from scalar link at each time step
    double[][] V = null;
    if (inWind != null) {
        V = new double[basinNum][num_ES];
        pm.beginTask("Read wind speed data.", inWind.size());
        Set<Integer> basinIdsSet = inWind.keySet();
        for (Integer basinId : basinIdsSet) {
            pm.worked(1);
            Integer index = basinid2BasinindexMap.get(basinId);
            if (index == null) {
                // data for a basin that is not considered, ignore it
                continue;
            }
            double[] values = inWind.get(basinId);
            V[index] = values;
        }
    }

    // get P (pressure per basin per band) from scalar link at each time step
    double[][] P = null;
    if (inPressure != null) {
        P = new double[basinNum][num_ES];
        pm.beginTask("Read pressure data.", inPressure.size());
        Set<Integer> basinIdsSet = inPressure.keySet();
        for (Integer basinId : basinIdsSet) {
            pm.worked(1);
            Integer index = basinid2BasinindexMap.get(basinId);
            if (index == null) {
                // data for a basin that is not considered, ignore it
                continue;
            }
            double[] values = inPressure.get(basinId);
            P[index] = values;
        }
        pm.done();
    }

    // get RH (relative humidity per basin per band) from scalar link at each time step
    double[][] RH = null;
    if (inRh != null) {
        RH = new double[basinNum][num_ES];
        pm.beginTask("Read humidity data.", inRh.size());
        Set<Integer> basinIdsSet = inRh.keySet();
        for (Integer basinId : basinIdsSet) {
            pm.worked(1);
            Integer index = basinid2BasinindexMap.get(basinId);
            if (index == null) {
                // data for a basin that is not considered, ignore it
                continue;
            }
            double[] values = inRh.get(basinId);
            RH[index] = values;
        }
        pm.done();
    }

    // get dtday (daily temperature range per basin per band) from scalar link at each time
    // step
    double[][] DTd = null;
    if (inDtday != null) {
        DTd = new double[basinNum][num_ES];
        pm.beginTask("Read dtday data.", inDtday.size());
        Set<Integer> basinIdsSet = inDtday.keySet();
        for (Integer basinId : basinIdsSet) {
            pm.worked(1);
            Integer index = basinid2BasinindexMap.get(basinId);
            if (index == null) {
                // data for a basin that is not considered, ignore it
                continue;
            }
            double[] values = inDtday.get(basinId);
            DTd[index] = values;
        }
        pm.done();
    }

    // get dtmonth (monthly temperature range per basin per band) from scalar link at each
    // time step
    double[][] DTm = null;
    if (inDtmonth != null) {
        DTm = new double[basinNum][num_ES];
        pm.beginTask("Read dtday data.", inDtmonth.size());
        Set<Integer> basinIdsSet = inDtmonth.keySet();
        for (Integer basinId : basinIdsSet) {
            pm.worked(1);
            Integer index = basinid2BasinindexMap.get(basinId);
            if (index == null) {
                // data for a basin that is not considered, ignore it
                continue;
            }
            double[] values = inDtmonth.get(basinId);
            DTm[index] = values;
        }
        pm.done();
    }

    /*
     * set the current time: day, month and hour
     */
    DateTime currentDatetime = formatter.parseDateTime(tCurrent);
    int currentMonth = currentDatetime.getMonthOfYear();
    int currentDay = currentDatetime.getDayOfMonth();
    int currentMinute = currentDatetime.getMinuteOfDay();
    double hour = currentMinute / 60.0;
    System.out.println("ora: " + hour);

    if (averageTemperature == null) {
        averageTemperature = new double[2 * basinNum];
    } else {
        Arrays.fill(averageTemperature, 0.0);
    }
    /*
     * these have to be taken from initial values 
     */
    if (safePoint.SWE == null) {
        if (pInitsafepoint != null && new File(pInitsafepoint).exists()) {
            safePoint = getSafePointData();
        } else {
            safePoint.SWE = new double[num_ES][num_EI][basinNum];
            if (pInitswe == -9999.0) {
                pInitswe = 0.0;
            }
            for (int i = 0; i < basinNum; i++) {
                double sweTmp = pInitswe;
                if (usoList != null) {
                    int usoTmp = usoList.get(i);
                    if (usoTmp == pGlacierid) {
                        sweTmp = GLACIER_SWE;
                    }
                }
                for (int k = 0; k < num_ES; k++) {
                    for (int j = 0; j < num_EI; j++) {
                        safePoint.SWE[j][k][i] = sweTmp;
                    }
                }
            }
            safePoint.U = new double[num_ES][num_EI][basinNum];
            safePoint.SnAge = new double[num_ES][num_EI][basinNum];
            safePoint.Ts = new double[num_ES][num_EI][basinNum];
        }
    }

    // this has to be taken from a file, scalarreader
    // TODO add the input canopyLink for the canopy height for each altimetric band
    /*
     * if there is no canopy input matrix for the model create an empty canopy matrix for each elevation band and for each basin
     */
    double[][] canopy = new double[num_ES][basinNum];
    for (int i = 0; i < canopy.length; i++) {
        for (int j = 0; j < canopy[0].length; j++) {
            canopy[i][j] = pCanopyh;
        }
    }
    checkParametersAndRunEnergyBalance(rain, T, V, P, RH, currentMonth, currentDay, hour, Abasin, A, EI, DTd,
            DTm, canopy);

}

From source file:org.jimcat.gui.histogram.image.DateTakenDimension.java

License:Open Source License

/**
 * get a label text for given index//from   w ww  .j a v a  2 s  . c  o m
 * 
 * @param index
 * @return label text for given index
 */
private String getDayLabel(int index) {
    DateTime date = indexToDate(DAYS, index);
    int day = date.getDayOfMonth();
    if (day == 1 || day == 15) {
        return DAY_LABEL_FORMATTER.print(date);
    }
    return null;
}