Example usage for org.joda.time.format ISODateTimeFormat basicDateTime

List of usage examples for org.joda.time.format ISODateTimeFormat basicDateTime

Introduction

In this page you can find the example usage for org.joda.time.format ISODateTimeFormat basicDateTime.

Prototype

public static DateTimeFormatter basicDateTime() 

Source Link

Document

Returns a basic formatter that combines a basic date and time, separated by a 'T' (yyyyMMdd'T'HHmmss.SSSZ).

Usage

From source file:com.wlami.mibox.server.services.MetadataNotifier.java

License:Open Source License

/**
 * Get an array of all metadata filenames which have been updated since the
 * specified datetime. Used for synchronization of metadata.
 * /*ww w.  ja va 2  s.co m*/
 * @param datetimeAsString
 *            Only names of metadata files which have been updated since
 *            this datetime will be returned.
 * @return An array of new updated metadata filenames.
 */
@GET
@Produces(MediaType.APPLICATION_JSON)
public JSONArray listUpdatedMetadataSince(@PathParam("datetime") String datetimeAsString,
        @Context HttpHeaders headers) {
    // Check whether user is properly logged in
    User user = HttpHeaderUtil.getUserFromHttpHeaders(headers, em);
    if (user == null) {
        throw new WebApplicationException(Response.status(Status.UNAUTHORIZED).build());
    }

    DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.basicDateTime();
    DateTime datetime = null;
    try {
        datetime = dateTimeFormatter.parseDateTime(datetimeAsString);
    } catch (IllegalArgumentException e) {
        log.warn("listUpdatedMetadataSince: Date time format not recognized!", e.getMessage());
        throw new WebApplicationException(
                Response.status(Status.BAD_REQUEST).entity("Could not parse date").build());
    }
    List<String> names = user.getByLastUpdatedSince(datetime, em);
    return new JSONArray(names);
}

From source file:de.raion.xmppbot.command.core.ScheduleCommand.java

License:Apache License

private ScheduledCommand setParameters(XmppContext context, ScheduledCommand command) {

    // command received from a multiuserchannel
    if (context.getMultiUserChat() != null && (multiUserChatName == null)) {
        command.setMultiUserChatName(context.getMultiUserChat().getRoom());
    } else if (context.getChat() != null && (chatName == null)) {
        command.setParticipant(context.getChat().getParticipant());
    } else if (multiUserChatName != null) {
        command.setMultiUserChatName(multiUserChatName);
    } else if (chatName != null) {
        command.setParticipant(chatName);
    }/*from  w  ww  .j  a v  a  2  s .co  m*/

    if (daily != null) {
        command.putOption("-daily", daily);
    }
    if (workingDay != null) {
        command.putOption("-workingday", workingDay);
    }
    if (minutes != null) {
        command.putOption("-minutes", minutes.toString());
    }

    command.putOption("-timestamp", ISODateTimeFormat.basicDateTime().print(creationDate.getTime()));

    return command;
}

From source file:de.zib.gndms.infra.grams.AbstractDirectoryAux.java

License:Apache License

public FileStats stat(File file) {
    final FileStats fileStats = new FileStats();
    try {/* w  w w  .  j a  v a2  s  .  c  o m*/
        fileStats.path = file.getCanonicalPath();
        fileStats.size = new Long(file.length());
        fileStats.mtime = ISODateTimeFormat.basicDateTime().print(file.lastModified());
    } catch (IOException e) {
        logger.warn("Could not get file stats of file " + file, e);
        return null;
    }

    return fileStats;
}

From source file:edu.cornell.mannlib.vitro.webapp.servlet.setup.ApplicationModelSetup.java

License:Open Source License

/**
 * All of the list views should now reside in files in DISPLAY_MODEL_LOAD_AT_STARTUP_DIR.
 * This will check for custom list view annotation statements in the displayModel, check
 * if they exist in the files in DISPLAY_MODEL_LOAD_AT_STARTUP_DIR, and write any that don't
 * exist there to a file in DISPLAY_MODEL_LOAD_AT_STARTUP_DIR.  After that the statements
 * will be removed from the displayDBModel.
 * //w  ww. j a  v  a  2s .c  o  m
 *   returns true if there were old list view statements in the DB, returns false
 *   if there were none.  displayLoadAlways should be reloaded from the file system
 *   if this returns true as this method may have changed the files.
 *   
 *   displayLoadAtStartup and displayModel may be modified.
 */
private void checkForOldListViews(ServletContext ctx, OntModel displayModel, Model displayLoadAtStartup) {
    // run construct for old custom list view statements from displayModel
    Model oldListViewModel = getOldListViewStatements(displayModel);
    if (log.isDebugEnabled()) {
        log.debug("Printing the old list view statements from the display model to System.out.");
        oldListViewModel.write(System.out, "N3-PP");
    }

    // find statements in old stmts that are not in loadedAtStartup and 
    // save them in a new file in DISPLAY_MODEL_LOAD_AT_STARTUP_DIR
    // so that in the future they will be in loadedAtStartup
    Model stmtsInOldAndFiles = displayLoadAtStartup.intersection(displayModel);
    Model unhandledOldListViewStmts = oldListViewModel.difference(stmtsInOldAndFiles);

    boolean saved = false;
    boolean neededSave = false;

    if (unhandledOldListViewStmts != null && !unhandledOldListViewStmts.isEmpty()) {
        log.debug("need to deal with old list view statements from the display model");
        neededSave = true;
        try {
            //create a file for the old statements in the loadAtStartup directory
            String newFileName = ctx.getRealPath(DISPLAY_MODEL_LOAD_AT_STARTUP_DIR + File.separator
                    + new DateTime().toString(ISODateTimeFormat.basicDateTime()) + ".n3");
            File file = new File(newFileName);
            file.createNewFile();

            log.info("Relocating " + unhandledOldListViewStmts.size()
                    + " custom list view statements from DB and saving to " + file.getAbsolutePath()
                    + File.separator + file.getName()
                    + ". These will be loaded from this file when the system starts up.");

            FileOutputStream fileOut = new FileOutputStream(file);
            unhandledOldListViewStmts.write(fileOut, "N3-PP");
            fileOut.close();
            saved = true;
        } catch (Throwable th) {
            log.warn("Could not save old list view statements.  Leaving them in the DB", th);
        }

        //need to reload displayLoadAlways because DISPLAY_MODEL_LOAD_AT_STARTUP_DIR may have changed
        displayLoadAtStartup.removeAll().add(readInDisplayModelLoadAtStartup(ctx));
    }

    if (oldListViewModel != null && !oldListViewModel.isEmpty()) {
        //At this point, there are old list view statements in the DB but they
        //should are all redundant with ones in DISPLAY_MODEL_LOAD_AT_STARTUP_DIR   
        if ((neededSave && saved) || (!neededSave)) {
            //if there was nothing to save, just remove the old stuff
            //if there was stuff to save, only remove if it was saved.
            log.debug("removing old statements from displayModel");
            displayModel.remove(oldListViewModel);
        }
    }

}

From source file:io.druid.indexer.HadoopDruidIndexerConfig.java

License:Apache License

public Path makeSegmentPartitionInfoPath(Interval bucketInterval) {
    return new Path(String.format("%s/%s_%s/partitions.json", makeIntermediatePath(),
            ISODateTimeFormat.basicDateTime().print(bucketInterval.getStart()),
            ISODateTimeFormat.basicDateTime().print(bucketInterval.getEnd())));
}

From source file:io.druid.indexer.JobHelper.java

License:Apache License

public static Path makeSegmentOutputPath(Path basePath, FileSystem fileSystem, String dataSource,
        String version, Interval interval, int partitionNum) {
    Path outputPath = new Path(prependFSIfNullScheme(fileSystem, basePath), "./" + dataSource);
    if ("hdfs".equals(fileSystem.getScheme())) {
        outputPath = new Path(outputPath,
                String.format("./%s_%s", interval.getStart().toString(ISODateTimeFormat.basicDateTime()),
                        interval.getEnd().toString(ISODateTimeFormat.basicDateTime())));
        outputPath = new Path(outputPath, version.replace(":", "_"));
    } else {//from w w w  . j  ava  2  s.  c o  m
        outputPath = new Path(outputPath,
                String.format("./%s_%s", interval.getStart().toString(), interval.getEnd().toString()));
        outputPath = new Path(outputPath, String.format("./%s", version));
    }
    outputPath = new Path(outputPath, Integer.toString(partitionNum));
    return outputPath;
}

From source file:io.druid.storage.azure.AzureDataSegmentPusher.java

License:Apache License

@Override
public String getStorageDir(DataSegment dataSegment) {
    String seg = JOINER.join(dataSegment.getDataSource(), StringUtils.format("%s_%s",
            // Use ISODateTimeFormat.basicDateTime() format, to avoid using colons in file path.
            dataSegment.getInterval().getStart().toString(ISODateTimeFormat.basicDateTime()),
            dataSegment.getInterval().getEnd().toString(ISODateTimeFormat.basicDateTime())),
            dataSegment.getVersion().replace(":", "_"), dataSegment.getShardSpec().getPartitionNum());

    log.info("DataSegment: [%s]", seg);

    // Replace colons with underscores, since they are not supported through wasb:// prefix
    return seg;/* w  w w .j  a  v  a2 s .c  o m*/
}

From source file:io.druid.storage.hdfs.HdfsDataSegmentPusher.java

License:Apache License

/**
 * Due to https://issues.apache.org/jira/browse/HDFS-13 ":" are not allowed in
 * path names. So we format paths differently for HDFS.
 *//*from www .jav a2  s.c  om*/

@Override
public String getStorageDir(DataSegment segment) {
    return JOINER.join(segment.getDataSource(),
            StringUtils.format("%s_%s",
                    segment.getInterval().getStart().toString(ISODateTimeFormat.basicDateTime()),
                    segment.getInterval().getEnd().toString(ISODateTimeFormat.basicDateTime())),
            segment.getVersion().replaceAll(":", "_"));
}

From source file:io.spikex.core.helper.Variables.java

License:Apache License

private Object resolveValue(final JsonObject event, final String var) {

    Object value = null;/*from   ww  w.j a  v  a2  s  .c o  m*/

    if (var.length() > 0) {

        if (var.startsWith(BUILTIN_PREFIX)) {

            DateTime dtNow;
            DateTimeFormatter fmt;

            if (var.startsWith(BUILTIN_SDF)) {
                // Simple date format
                String pattern = var.substring(BUILTIN_SDF.length());
                dtNow = new DateTime(DateTimeZone.UTC);
                fmt = DateTimeFormat.forPattern(pattern);
                value = fmt.print(dtNow);
            } else if (var.startsWith(BUILTIN_ENV)) {
                // env
                Object val = m_systemEnv.get(var.substring(BUILTIN_ENV.length()));
                value = (val != null ? String.valueOf(val) : "");
            } else if (var.startsWith(BUILTIN_PROP)) {
                // prop
                Object val = m_systemProps.get(var.substring(BUILTIN_PROP.length()));
                value = (val != null ? String.valueOf(val) : "");
            } else if (var.startsWith(BUILTIN_METRIC)) {
                // metrics
                if (m_vertx != null) {
                    Map<String, Object> values = m_vertx.sharedData().getMap(SHARED_METRICS_KEY);
                    value = values.get(var.substring(BUILTIN_METRIC.length()));
                }
            } else if (var.startsWith(BUILTIN_SENSOR)) {
                // sensor
                if (m_vertx != null) {
                    Map<String, Object> values = m_vertx.sharedData().getMap(SHARED_SENSORS_KEY);
                    value = values.get(var.substring(BUILTIN_SENSOR.length()));
                }
            } else if (var.startsWith(BUILTIN_NOW_EXTENDED)) {
                // now extended with timezone and time offset
                DateTime dt = Variables.createDateTimeNow(var);
                value = dt.getMillis();
            } else {

                switch (var) {
                case BUILTIN_NODE:
                    value = m_nodeName;
                    break;
                case BUILTIN_CLUSTER:
                    value = m_clusterName;
                    break;
                case BUILTIN_SPIKEX_HOME:
                    value = m_spikexHome;
                    break;
                case BUILTIN_SPIKEX_CONF:
                    value = m_spikexConf;
                    break;
                case BUILTIN_SPIKEX_DATA:
                    value = m_spikexData;
                    break;
                case BUILTIN_SPIKEX_TMP:
                    value = m_spikexTmp;
                    break;
                case BUILTIN_CHAIN:
                    value = m_chainName;
                    break;
                case BUILTIN_HOST:
                    value = m_hostName;
                    break;
                case BUILTIN_DATE:
                    dtNow = new DateTime(DateTimeZone.UTC);
                    fmt = ISODateTimeFormat.basicDate();
                    value = fmt.print(dtNow);
                    break;
                case BUILTIN_TIMESTAMP:
                    dtNow = new DateTime(DateTimeZone.UTC);
                    fmt = ISODateTimeFormat.basicDateTime();
                    value = fmt.print(dtNow);
                    break;
                case BUILTIN_NOW:
                    value = System.currentTimeMillis();
                    break;
                default:
                    value = var; // Just return the variable def
                    break;
                }
            }
        } else {
            //
            // Retrieve value from existing field in the event
            //
            if (event != null) {
                value = event.getValue(var);
            }
        }
    }
    return value;
}

From source file:org.apache.druid.indexer.HadoopDruidIndexerConfig.java

License:Apache License

public Path makeSegmentPartitionInfoPath(Interval bucketInterval) {
    return new Path(StringUtils.format("%s/%s_%s/partitions.json", makeIntermediatePath(),
            ISODateTimeFormat.basicDateTime().print(bucketInterval.getStart()),
            ISODateTimeFormat.basicDateTime().print(bucketInterval.getEnd())));
}