Example usage for org.joda.time DateTimeZone forID

List of usage examples for org.joda.time DateTimeZone forID

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone forID.

Prototype

@FromString
public static DateTimeZone forID(String id) 

Source Link

Document

Gets a time zone instance for the specified time zone id.

Usage

From source file:org.fao.geonet.kernel.harvest.harvester.AbstractHarvester.java

License:Open Source License

/**
 * Run the harvest process. This has to be protected or better for CGLib to proxy to it.
 *///from   w w w . j  ava2 s  .  co m
protected OperResult harvest() {
    OperResult operResult = OperResult.OK;
    Boolean releaseLock = false;
    try {
        if (lock.isHeldByCurrentThread() || (releaseLock = lock.tryLock(LONG_WAIT, TimeUnit.SECONDS))) {
            long startTime = System.currentTimeMillis();
            running = true;
            cancelMonitor.set(false);
            try {

                String logfile = initializeLog();
                this.log.info("Starting harvesting of " + this.getParams().getName());
                error = null;
                errors.clear();
                final Logger logger = this.log;
                final String nodeName = getParams().getName() + " (" + getClass().getSimpleName() + ")";
                final String lastRun = new DateTime().withZone(DateTimeZone.forID("UTC")).toString();
                try {
                    login();

                    //--- update lastRun
                    harvesterSettingsManager.setValue("harvesting/id:" + id + "/info/lastRun", lastRun);

                    //--- proper harvesting
                    logger.info("Started harvesting from node : " + nodeName);
                    HarvestWithIndexProcessor h = new HarvestWithIndexProcessor(dataMan, logger);
                    // todo check (was: processwithfastindexing)
                    h.process();
                    logger.info("Ended harvesting from node : " + nodeName);

                    if (getParams().isOneRunOnly()) {
                        stop(Status.INACTIVE);
                    }
                } catch (InvalidParameterValueEx e) {
                    logger.error("The harvester " + this.getParams().getName() + "[" + this.getType()
                            + "] didn't accept some of the parameters sent.");

                    errors.add(new HarvestError(context, e));
                    error = e;
                    operResult = OperResult.ERROR;
                } catch (Throwable t) {
                    operResult = OperResult.ERROR;
                    logger.warning("Raised exception while harvesting from : " + nodeName);
                    logger.warning(" (C) Class   : " + t.getClass().getSimpleName());
                    logger.warning(" (C) Message : " + t.getMessage());
                    logger.error(t);
                    error = t;
                    errors.add(new HarvestError(context, t));
                } finally {
                    List<HarvestError> harvesterErrors = getErrors();
                    if (harvesterErrors != null) {
                        errors.addAll(harvesterErrors);
                    }
                }

                long elapsedTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime);

                logHarvest(logfile, logger, nodeName, lastRun, elapsedTime);
            } finally {
                cancelMonitor.set(false);
                running = false;
            }
        } else {
            log.error("Harvester '" + this.getID() + "' looks deadlocked.");
            log.error("Harvester '" + this.getID() + "' hasn't initiated.");
            operResult = OperResult.ERROR;
        }
    } catch (InterruptedException e) {
        log.error(e);
    } finally {
        if (lock.isHeldByCurrentThread() && releaseLock) {
            lock.unlock();
        }
    }

    return operResult;

}

From source file:org.fao.geonet.kernel.harvest.HarvestManagerImpl.java

License:Open Source License

public synchronized OperResult clearBatch(String id) throws Exception {
    if (Log.isDebugEnabled(Geonet.HARVEST_MAN))
        Log.debug(Geonet.HARVEST_MAN, "Clearing harvesting with id : " + id);

    AbstractHarvester<?> ah = hmHarvesters.get(id);

    if (ah == null) {
        return OperResult.NOT_FOUND;
    }/*  w  w  w  .j  a v a2s.co m*/

    long elapsedTime = System.currentTimeMillis();

    String harvesterUUID = ah.getParams().getUuid();

    final Specification<Metadata> specification = (Specification<Metadata>) MetadataSpecs
            .hasHarvesterUuid(harvesterUUID);
    int numberOfRecordsRemoved = dataMan.batchDeleteMetadataAndUpdateIndex(specification);
    ah.emptyResult();
    elapsedTime = (System.currentTimeMillis() - elapsedTime) / 1000;

    // clear last run info
    removeInfo(id, context.getUserSession().getUserId());
    ah.emptyResult();

    Element historyEl = new Element("result");
    historyEl.addContent(new Element("cleared").setAttribute("recordsRemoved", numberOfRecordsRemoved + ""));
    final String lastRun = new DateTime().withZone(DateTimeZone.forID("UTC")).toString();
    ISODate lastRunDate = new ISODate(lastRun);

    HarvestHistoryRepository historyRepository = context.getBean(HarvestHistoryRepository.class);
    HarvestHistory history = new HarvestHistory();
    history.setDeleted(true);
    history.setElapsedTime((int) elapsedTime);
    history.setHarvestDate(lastRunDate);
    history.setHarvesterName(ah.getParams().getName());
    history.setHarvesterType(ah.getType());
    history.setHarvesterUuid(ah.getParams().getUuid());
    history.setInfo(historyEl);
    history.setParams(ah.getParams().getNodeElement());

    historyRepository.save(history);
    return OperResult.OK;
}

From source file:org.fao.geonet.util.JODAISODate.java

License:Open Source License

public static String parseISODateTimes(String input1, String input2) {
    DateTimeFormatter dto = ISODateTimeFormat.dateTime();
    PeriodFormatter p = ISOPeriodFormat.standard();
    DateTime odt1;/*from w  w  w. jav a  2s  .c o  m*/
    String odt = "";

    // input1 should be some sort of ISO time 
    // eg. basic: 20080909, full: 2008-09-09T12:21:00 etc
    // convert everything to UTC so that we remove any timezone
    // problems
    try {
        DateTime idt = parseBasicOrFullDateTime(input1);
        odt1 = dto.parseDateTime(idt.toString()).withZone(DateTimeZone.forID("UTC"));
        odt = odt1.toString();

    } catch (Exception e) {
        e.printStackTrace();
        return dt;
    }

    if (input2 == null || input2.equals(""))
        return odt;

    // input2 can be an ISO time as for input1 but also an ISO time period
    // eg. -P3D or P3D - if an ISO time period then it must be added to the
    // DateTime generated for input1 (odt1)
    // convert everything to UTC so that we remove any timezone
    // problems
    try {
        boolean minus = false;
        if (input2.startsWith("-P")) {
            input2 = input2.substring(1);
            minus = true;
        }

        if (input2.startsWith("P")) {
            Period ip = p.parsePeriod(input2);
            DateTime odt2;
            if (!minus)
                odt2 = odt1.plus(ip.toStandardDuration().getMillis());
            else
                odt2 = odt1.minus(ip.toStandardDuration().getMillis());
            odt = odt + "|" + odt2.toString();
        } else {
            DateTime idt = parseBasicOrFullDateTime(input2);
            DateTime odt2 = dto.parseDateTime(idt.toString()).withZone(DateTimeZone.forID("UTC"));
            odt = odt + "|" + odt2.toString();
        }
    } catch (Exception e) {
        e.printStackTrace();
        return odt + "|" + dt;
    }

    return odt;
}

From source file:org.flockdata.engine.track.service.EntityServiceNeo4J.java

License:Open Source License

/**
 * Creates a unique Entity for the fortress. FortressUserNode is automatically
 * created if it does not exist.//  w ww. j a v  a 2 s  .  c o m
 *
 * @return unique primary key to be used for subsequent log calls
 */
public TrackResultBean createEntity(FortressSegment segment, DocumentType documentType,
        EntityInputBean entityInputBean, Collection<Tag> tags) throws FlockException {

    Entity entity = null;
    if (entityInputBean.getMetaKey() != null) {
        entity = getEntity(segment.getCompany(), entityInputBean.getMetaKey());
    }

    if (entity == null && (entityInputBean.getCode() != null && !entityInputBean.getCode().equals(EMPTY)))
        entity = findByCode(segment.getFortress(), documentType, entityInputBean.getCode());

    if (entity != null) {
        logger.trace("Existing entity found by Caller Ref [{}] found [{}]", entityInputBean.getCode(),
                entity.getMetaKey());
        //entityInputBean.setMetaKey(entity.getMetaKey());

        logger.trace("Existing entity [{}]", entity);
        TrackResultBean trackResult = new TrackResultBean(segment.getFortress(), entity, entityInputBean);
        trackResult.entityExisted();
        trackResult.setContentInput(entityInputBean.getContent());
        trackResult.setDocumentType(documentType);
        if (entityInputBean.getContent() != null && entityInputBean.getContent().getWhen() != null) {
            // Communicating the POTENTIAL last update so it can be recorded in the tag relationships
            entity.setFortressLastWhen(entityInputBean.getContent().getWhen().getTime());
        }
        boolean saveEntity = false;

        // Entity properties can be updated
        if (entityInputBean.getProperties() != null) {
            if (entity.setProperties(entityInputBean.getProperties())) {
                saveEntity = true;

            }
        }
        if (entityInputBean.getSegment() != null) {
            if (!entity.getSegment().getId().equals(segment.getId())) {
                entity.setSegment(segment);
                saveEntity = true;
                // ToDo - delete the search doc in the previous segment !!
            }
        }
        // We can update the entity name?
        if (entityInputBean.getName() != null && !entity.getName().equals(entityInputBean.getName())) {
            saveEntity = true;
            entity.setName(entityInputBean.getName());
        }

        if (saveEntity)
            entityDao.save(entity);
        // Could be rewriting tags
        // DAT-153 - move this to the end of the process?
        EntityLog entityLog = entityDao.getLastEntityLog(entity);
        trackResult.setTags(
                entityTagService.associateTags(segment.getCompany(), entity, entityLog, entityInputBean));
        return trackResult;
    }

    try {
        entity = makeEntity(segment, documentType, entityInputBean);
    } catch (FlockException e) {
        logger.error(e.getMessage());
        return new TrackResultBean(
                "Error processing entityInput [{}]" + entityInputBean + ". Error " + e.getMessage());
    }

    TrackResultBean trackResult = new TrackResultBean(segment.getFortress(), entity, entityInputBean);
    trackResult.setDocumentType(documentType);

    // Flag the entity as having been newly created. The flag is transient and
    // this saves on having to pass the property as a method variable when
    // associating the tags
    entity.setNew();
    trackResult.setNewEntity();

    if (tags != null)
        tags.clear();
    trackResult.setTags(entityTagService.associateTags(segment.getCompany(), entity, null, entityInputBean));

    trackResult.setContentInput(entityInputBean.getContent());
    if (entity.isNewEntity() && entityInputBean.getContent() != null) {
        // DAT-342
        // We prep the content up-front in order to get it distributed to other services
        // ASAP
        // Minimal defaults that are otherwise set in the LogService
        FortressUser contentUser = null;
        if (entityInputBean.getContent().getFortressUser() != null)
            contentUser = fortressService.getFortressUser(segment.getFortress(),
                    entityInputBean.getContent().getFortressUser());

        if (entityInputBean.getContent().getEvent() == null) {
            entityInputBean.getContent().setEvent(Log.CREATE);
        }
        Log log = entityDao.prepareLog(segment.getCompany(),
                (contentUser != null ? contentUser : entity.getCreatedBy()), trackResult, null, null);

        DateTime contentWhen = (trackResult.getContentInput().getWhen() == null
                ? new DateTime(DateTimeZone.forID(segment.getFortress().getTimeZone()))
                : new DateTime(trackResult.getContentInput().getWhen()));
        EntityLog entityLog = new EntityLog(entity, log, contentWhen);

        //if (trackResult.getContentInput().getWhen()!= null )

        logger.debug("Setting preparedLog for entity {}", entity);
        //LogResultBean logResult = new LogResultBean(trackResult.getContentInput());
        //logResult.setLogToIndex(entityLog);
        trackResult.setCurrentLog(entityLog);
    }

    return trackResult;

}

From source file:org.flockdata.engine.track.service.LogRetryService.java

License:Open Source License

/**
 * Event log record for the supplied entity from the supplied input
 *
 * @param trackResult          trackLog details containing the data to log
 * @param thisFortressUser User name in calling system that is making the change
 * @return populated log information with any error messages
 *///from ww w. j a va  2s  . com
private LogResultBean createLog(TrackResultBean trackResult, FortressUser thisFortressUser)
        throws FlockException, IOException {
    Fortress fortress = trackResult.getEntity().getFortress();
    // ToDo: ??? noticed during tracking over AMQP
    if (thisFortressUser != null) {
        if (thisFortressUser.getFortress() == null)
            thisFortressUser.setFortress(fortress);
    }

    LogResultBean resultBean = new LogResultBean(trackResult.getContentInput());
    //ToDo: May want to track a "View" event which would not change the What data.
    if (!trackResult.getContentInput().hasData()) {
        trackResult.setLogStatus(ContentInputBean.LogStatus.IGNORE);
        trackResult.addServiceMessage("No content information provided. Ignoring this request");
        //logger.debug(trackResult.getServiceMessages());
        return resultBean;
    }

    // Transactions checks
    final TxRef txRef = txService.handleTxRef(trackResult.getContentInput(), fortress.getCompany());
    trackResult.setTxReference(txRef);

    EntityLog lastLog = getLastLog(trackResult.getEntity());

    logger.debug("createLog metaKey {}, ContentWhen {}, lastLogWhen {}, log {}",
            trackResult.getEntity().getMetaKey(), new DateTime(trackResult.getContentInput().getWhen()),
            (lastLog == null ? "[null]" : new DateTime(lastLog.getFortressWhen())), lastLog);

    DateTime contentWhen = (trackResult.getContentInput().getWhen() == null
            ? new DateTime(DateTimeZone.forID(fortress.getTimeZone()))
            : new DateTime(trackResult.getContentInput().getWhen()));

    // Is this content historic relative to what we know?
    lastLog = resolveHistoricLog(trackResult.getEntity(), lastLog, contentWhen);

    if (trackResult.getContentInput().getEvent() == null) {
        trackResult.getContentInput().setEvent(lastLog == null ? Log.CREATE : Log.UPDATE);
    }

    Log preparedLog = null;
    if (trackResult.getCurrentLog() != null)
        preparedLog = trackResult.getCurrentLog().getLog();

    if (preparedLog == null) // log is prepared during the entity process and stashed here ONLY if it is a brand new entity
        preparedLog = entityDao.prepareLog(fortress.getCompany(), thisFortressUser, trackResult, txRef,
                (lastLog != null ? lastLog.getLog() : null));
    else
        trackResult.setTxReference(txRef);

    if (lastLog != null) {
        logger.debug("createLog, existing log found {}", lastLog);
        boolean unchanged = kvService.isSame(trackResult.getEntity(), lastLog.getLog(), preparedLog);
        if (unchanged) {
            logger.debug("Ignoring a change we already have {}", trackResult);
            if (trackResult.getContentInput().isForceReindex()) { // Caller is recreating the search index
                trackResult.setLogStatus((ContentInputBean.LogStatus.REINDEX));
                resultBean.setLogToIndex(lastLog);
                trackResult
                        .addServiceMessage("Ignoring a change we already have. Honouring request to re-index");
            } else {
                trackResult.setLogStatus((ContentInputBean.LogStatus.IGNORE));
                trackResult.addServiceMessage("Ignoring a change we already have");
                trackResult.setLogIgnored();
            }

            return resultBean;
        }

    } else { // first ever log for the entity
        logger.debug("createLog - first log created {}", contentWhen);
        //if (!entity.getLastUser().getId().equals(thisFortressUser.getId())){
        trackResult.getEntity().setLastUser(thisFortressUser);
        trackResult.getEntity().setCreatedBy(thisFortressUser);
        if (trackResult.getEntity().getCreatedBy() == null)
            trackResult.getEntity().setCreatedBy(thisFortressUser);
    }

    // Prepares the change
    trackResult.getContentInput().setChangeEvent(preparedLog.getEvent());
    //resultBean.setLog(preparedLog);

    if (trackResult.getEntity().getId() == null)
        trackResult.setLogStatus(ContentInputBean.LogStatus.TRACK_ONLY);
    else
        trackResult.setLogStatus(ContentInputBean.LogStatus.OK);

    // This call also saves the entity
    EntityLog entityLog = entityDao.writeLog(trackResult.getEntity(), preparedLog, contentWhen);

    resultBean.setSysWhen(entityLog.getSysWhen());

    boolean moreRecent = (lastLog == null || lastLog.getFortressWhen().compareTo(contentWhen.getMillis()) <= 0);

    if (moreRecent)
        trackResult.setCurrentLog(entityLog); // Notional log to index.

    return resultBean;

}

From source file:org.flockdata.neo4j.service.EntityService.java

License:Open Source License

public TrackResultBean createEntity(EntityPayload payload, EntityInputBean entityInputBean)
        throws FlockException {

    Node entityNode = null;//from ww  w  .j  av a2  s . c  om
    if (entityInputBean.getMetaKey() != null) {
        entityNode = findByMetaKey(entityInputBean.getMetaKey());
    }

    if (entityNode == null
            && (entityInputBean.getCallerRef() != null && !entityInputBean.getCallerRef().equals("")))
        entityNode = findByCallerRef(payload.getFortress(), payload.getDocumentType(),
                entityInputBean.getCallerRef());

    if (entityNode != null) {
        logger.trace("Existing entity found by Caller Ref [{}] found [{}]", entityInputBean.getCallerRef(),
                entityNode.getProperty(Entity.UUID_KEY));
        entityInputBean.setMetaKey(entityNode.getProperty(Entity.UUID_KEY).toString());

        logger.trace("Existing entity [{}]", entityNode);
        TrackResultBean trackResult = new TrackResultBean(payload.getFortress(), entityNode, entityInputBean);
        trackResult.entityExisted();
        trackResult.setContentInput(entityInputBean.getContent());
        trackResult.setDocumentType(payload.getDocumentType());

        // Process mutable properties for an entity
        if (entityInputBean.getContent() != null && entityInputBean.getContent().getWhen() != null) {
            // Communicating the POTENTIAL last update so it can be recorded in the tag relationships
            entityNode.setProperty("fortressLastWhen", entityInputBean.getContent().getWhen().getTime());
        }

        // Optimize? Remove existing properties and replace with the incoming payload
        if (entityInputBean.getProperties() != null) {
            for (String key : entityNode.getPropertyKeys()) {
                if (key.startsWith(TagResultBean.PROPS_PREFIX))
                    entityNode.removeProperty(key);
            }
            for (String key : entityInputBean.getProperties().keySet()) {
                entityNode.setProperty(TagResultBean.PROPS_PREFIX + key,
                        entityInputBean.getProperties().get(key));
            }
        }
        // We can update the entity name?
        if (entityInputBean.getName() != null
                && !entityNode.getProperty("name").equals(entityInputBean.getName())) {
            entityNode.setProperty("name", entityInputBean.getName());
        }

        // Could be rewriting tags
        // DAT-153 - move this to the end of the process?
        // FixMe
        EntityLog entityLog = logService.getLastLog(entityNode.getId());
        trackResult.setTags(entityTagService.associateTags(trackResult, entityNode, entityLog));
        return trackResult;
    }

    try {
        entityNode = saveEntityNode(payload, entityInputBean);
    } catch (FlockException e) {
        logger.error(e.getMessage());
        return new TrackResultBean(
                "Error processing entityInput [{}]" + entityInputBean + ". Error " + e.getMessage());
    }
    // Flag the entity as having been newly created. The flag is transient and
    // this saves on having to pass the property as a method variable when
    // associating the tags
    TrackResultBean trackResult = new TrackResultBean(payload.getFortress(), entityNode, entityInputBean);
    trackResult.setDocumentType(payload.getDocumentType());
    trackResult.setNew();

    // FixingMe
    trackResult.setTags(
            entityTagService.associateTags(trackResult, entityNode, logService.getLastLog(entityNode.getId())));

    trackResult.setContentInput(entityInputBean.getContent());

    if (trackResult.isNew() && entityInputBean.getContent() != null) {
        // DAT-342
        // We prep the content up-front in order to get it distributed to other services
        // ASAP
        // Minimal defaults that are otherwise set in the LogService

        // FixMe
        //            FortressUser contentUser = null;
        //            if (entityInputBean.getContent().getFortressUser() != null)
        //                contentUser = fortressService.getFortressUser(fortress, entityInputBean.getContent().getFortressUser());

        if (entityInputBean.getContent().getEvent() == null) {
            entityInputBean.getContent().setEvent(Log.CREATE);
        }
        // FixMe
        //Log log = entityDao.prepareLog(fortress.getCompany(), (contentUser != null ? contentUser : entity.getCreatedBy()), trackResult, null, null);

        DateTime contentWhen = (trackResult.getContentInput().getWhen() == null
                ? new DateTime(DateTimeZone.forID(payload.getFortress().getTimeZone()))
                : new DateTime(trackResult.getContentInput().getWhen()));
        //            EntityLog entityLog = new EntityLog(entity, log, contentWhen);

        //if (trackResult.getContentInput().getWhen()!= null )

        // FixMe
        //            logger.debug("Setting preparedLog for entity {}", entity);
        //            LogResultBean logResult = new LogResultBean(trackResult.getContentInput());

        //            logResult.setLogToIndex(entityLog);
        //trackResult.setEntityLog(logResult);
        //trackResult.setPreparedLog( entityLog );
    }

    return trackResult;

}

From source file:org.forgerock.openidm.util.DateUtil.java

License:CDDL license

/**
 * Creates a DateUtil using a specified timezone and generates ISO8601
 * timestamps./*from w w  w  .java2 s  . c om*/
 *
 * @param zone
 *            string representation of a timezone. i.e. "UTC" or
 *            "Asia/Tokyo"
 */
private DateUtil(String zone) {
    this(DateTimeZone.forID(zone));
}

From source file:org.geomesa.QuickStart.java

License:Apache License

static FeatureCollection createNewFeatures(SimpleFeatureType simpleFeatureType, int numNewFeatures) {
    DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();

    String id;/* w  w  w  .jav  a 2s .c om*/
    Object[] NO_VALUES = {};
    String[] PEOPLE_NAMES = { "Addams", "Bierce", "Clemens" };
    Long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L;
    Random random = new Random(5771);
    DateTime MIN_DATE = new DateTime(2014, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC"));
    Double MIN_X = -78.0;
    Double MIN_Y = -39.0;
    Double DX = 2.0;
    Double DY = 2.0;

    for (int i = 0; i < numNewFeatures; i++) {
        // create the new (unique) identifier and empty feature shell
        id = "Observation." + Integer.toString(i);
        SimpleFeature simpleFeature = SimpleFeatureBuilder.build(simpleFeatureType, NO_VALUES, id);

        // be sure to tell GeoTools explicitly that you want to use the ID you provided
        simpleFeature.getUserData().put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE);

        // populate the new feature's attributes

        // string value
        simpleFeature.setAttribute("Who", PEOPLE_NAMES[i % PEOPLE_NAMES.length]);

        // long value
        simpleFeature.setAttribute("What", i);

        // location:  construct a random point within a 2-degree-per-side square
        double x = MIN_X + random.nextDouble() * DX;
        double y = MIN_Y + random.nextDouble() * DY;
        Geometry geometry = WKTUtils$.MODULE$.read("POINT(" + x + " " + y + ")");

        // date-time:  construct a random instant within a year
        simpleFeature.setAttribute("Where", geometry);
        DateTime dateTime = MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR));
        simpleFeature.setAttribute("When", dateTime.toDate());

        // another string value
        // "Why"; left empty, showing that not all attributes need values

        // accumulate this new feature in the collection
        featureCollection.add(simpleFeature);
    }

    return featureCollection;
}

From source file:org.georchestra.analytics.StatisticsController.java

License:Open Source License

public StatisticsController(String localTimezone) {
    // Parser to convert from local time to DB time (UTC)
    this.localInputFormatter = DateTimeFormat.forPattern("yyyy-MM-dd")
            .withZone(DateTimeZone.forID(localTimezone));
    this.dbOutputFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")
            .withZone(DateTimeZone.forID("UTC"));

    // Used to parse date from DB based on granularity
    this.dbHourInputFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH").withZone(DateTimeZone.forID("UTC"));
    this.dbHourOutputFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH")
            .withZone(DateTimeZone.forID(localTimezone));

    this.dbDayInputFormatter = DateTimeFormat.forPattern("y-M-d").withZone(DateTimeZone.forID("UTC"));
    this.dbDayOutputFormatter = DateTimeFormat.forPattern("yyyy-MM-dd")
            .withZone(DateTimeZone.forID(localTimezone));

    this.dbWeekInputFormatter = DateTimeFormat.forPattern("y-w").withZone(DateTimeZone.forID("UTC"));
    this.dbWeekOutputFormatter = DateTimeFormat.forPattern("yyyy-ww")
            .withZone(DateTimeZone.forID(localTimezone));

    this.dbMonthInputFormatter = DateTimeFormat.forPattern("y-M").withZone(DateTimeZone.forID("UTC"));
    this.dbMonthOutputFormatter = DateTimeFormat.forPattern("yyyy-MM")
            .withZone(DateTimeZone.forID(localTimezone));
}

From source file:org.gephi.graph.impl.Serialization.java

License:Apache License

private DateTimeZone deserializeTimeZone(final DataInput is) throws IOException, ClassNotFoundException {
    String id = (String) deserialize(is);

    DateTimeZone tz = DateTimeZone.forID(id);
    model.store.timeZone = tz;//from   w w w . j  a va2s.c o m

    return tz;
}