Example usage for org.joda.time.format ISODateTimeFormat dateTime

List of usage examples for org.joda.time.format ISODateTimeFormat dateTime

Introduction

In this page you can find the example usage for org.joda.time.format ISODateTimeFormat dateTime.

Prototype

public static DateTimeFormatter dateTime() 

Source Link

Document

Returns a formatter that combines a full date and time, separated by a 'T' (yyyy-MM-dd'T'HH:mm:ss.SSSZZ).

Usage

From source file:gsonjodatime.DateTimeConverter.java

License:Open Source License

/**
 * Gson invokes this call-back method during deserialization when it encounters a field of the
 * specified type. <p>/*from   w w  w . ja  v a2  s .c om*/
 *
 * In the implementation of this call-back method, you should consider invoking
 * {@link JsonDeserializationContext#deserialize(JsonElement, Type)} method to create objects
 * for any non-trivial field of the returned object. However, you should never invoke it on the
 * the same type passing {@code json} since that will cause an infinite loop (Gson will call your
 * call-back method again).
 * @param json The Json data being deserialized
 * @param typeOfT The type of the Object to deserialize to
 * @return a deserialized object of the specified type typeOfT which is a subclass of {@code T}
 * @throws JsonParseException if json is not in the expected format of {@code typeOfT}
 */
@Override
public DateTime deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
        throws JsonParseException {
    // Do not try to deserialize null or empty values
    if (json.getAsString() == null || json.getAsString().isEmpty()) {
        return null;
    }

    final DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
    return fmt.parseDateTime(json.getAsString());
}

From source file:hk.mcc.utils.applog2es.Main.java

private static void post2ES(List<AppLog> appLogs) {
    try {/* w w w  . j  a va2  s.c o m*/
        // on startup
        DateTimeFormatter sdf = ISODateTimeFormat.dateTime();
        Settings settings = Settings.settingsBuilder().put("cluster.name", "my-application").build();
        //Add transport addresses and do something with the client...
        Client client = TransportClient.builder().settings(settings).build()
                .addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), 9300));

        XContentBuilder mapping = jsonBuilder().startObject().startObject("applog").startObject("properties")
                .startObject("Url").field("type", "string").field("index", "not_analyzed").endObject()
                .startObject("Event").field("type", "string").field("index", "not_analyzed").endObject()
                .startObject("ClassName").field("type", "string").field("index", "not_analyzed").endObject()
                .startObject("UserId").field("type", "string").field("index", "not_analyzed").endObject()
                .startObject("Application").field("type", "string").field("index", "not_analyzed").endObject()
                .startObject("ecid").field("type", "string").field("index", "not_analyzed").endObject()
                .endObject().endObject().endObject();

        PutMappingResponse putMappingResponse = client.admin().indices().preparePutMapping("applog")
                .setType("applog").setSource(mapping).execute().actionGet();
        BulkRequestBuilder bulkRequest = client.prepareBulk();
        for (AppLog appLog : appLogs) {
            // either use client#prepare, or use Requests# to directly build index/delete requests
            if (appLog != null) {
                if (StringUtils.contains(appLog.getMessage(), "[CIMS_INFO] Filter Processing time")) {
                    String[] split = StringUtils.split(appLog.getMessage(), ",");
                    int elapsedTime = 0;
                    String url = "";
                    String event = "";
                    for (String token : split) {
                        if (StringUtils.contains(token, "elapsedTime")) {
                            elapsedTime = Integer.parseInt(StringUtils.substringAfter(token, "="));
                        } else if (StringUtils.contains(token, "with URL")) {
                            url = StringUtils.substringAfter(token, "=");
                        } else if (StringUtils.contains(token, "event")) {
                            event = StringUtils.substringAfter(token, "=");
                        }
                    }

                    bulkRequest.add(client.prepareIndex("applog", "applog").setSource(jsonBuilder()
                            .startObject().field("className", appLog.getClassName())
                            .field("logTime", appLog.getLogTime()).field("application", appLog.getApplication())
                            .field("code", appLog.getCode()).field("message", appLog.getMessage())
                            .field("ecid", appLog.getEcid()).field("application", appLog.getApplication())
                            .field("level", appLog.getLevel()).field("server", appLog.getServer())
                            .field("tid", appLog.getTid()).field("userId", appLog.getUserId())
                            .field("urls", url).field("elapsedTime", elapsedTime).field("events", event)
                            .endObject()));
                }
            }
        }
        BulkResponse bulkResponse = bulkRequest.get();
        if (bulkResponse.hasFailures()) {
            Logger.getLogger(Main.class.getName()).log(Level.SEVERE, bulkResponse.buildFailureMessage());
            // process failures by iterating through each bulk response item
        }

        // on shutdown
        client.close();
    } catch (UnknownHostException ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:hu.akarnokd.utils.xml.XElementBase.java

License:Apache License

/**
 * Convert the given date to string./*from   www .j  a v  a 2s. c o m*/
 * Always contains the milliseconds and timezone.
 * @param date the date, not null
 * @return the formatted date
 */
public static String formatDateTime(Date date) {
    return ISODateTimeFormat.dateTime().print(date.getTime());
}

From source file:hu.akarnokd.utils.xml.XElementBase.java

License:Apache License

/**
 * Parse an XSD dateTime.//from w ww.  j av a 2 s. com
 * @param date the date string
 * @return the date
 * @throws ParseException format exception
 */
public static Date parseDateTime(String date) throws ParseException {
    return ISODateTimeFormat.dateTime().parseDateTime(date).toDate();
}

From source file:io.apiman.manager.api.core.metrics.AbstractMetricsAccessor.java

License:Apache License

/**
 * @param date
 */
protected static String formatDateWithMillis(DateTime date) {
    return ISODateTimeFormat.dateTime().print(date);
}

From source file:io.apiman.manager.api.rest.impl.OrganizationResourceImpl.java

License:Apache License

/**
 * Parses a query param representing a date into an actual date object.
 * @param dateStr//w ww .ja v  a2  s  .  c  om
 * @param defaultDate
 * @param floor
 */
private static DateTime parseDate(String dateStr, DateTime defaultDate, boolean floor) {
    if ("now".equals(dateStr)) { //$NON-NLS-1$
        return DateTime.now();
    }
    if (dateStr.length() == 10) {
        DateTime parsed = ISODateTimeFormat.date().withZoneUTC().parseDateTime(dateStr);
        // If what we want is the floor, then just return it.  But if we want the
        // ceiling of the date, then we need to set the right params.
        if (!floor) {
            parsed = parsed.plusDays(1).minusMillis(1);
        }
        return parsed;
    }
    if (dateStr.length() == 20) {
        return ISODateTimeFormat.dateTimeNoMillis().withZoneUTC().parseDateTime(dateStr);
    }
    if (dateStr.length() == 24) {
        return ISODateTimeFormat.dateTime().withZoneUTC().parseDateTime(dateStr);
    }
    return defaultDate;
}

From source file:io.druid.query.expression.TimestampFormatExprMacro.java

License:Apache License

@Override
public Expr apply(final List<Expr> args) {
    if (args.size() < 1 || args.size() > 3) {
        throw new IAE("Function[%s] must have 1 to 3 arguments", name());
    }//from ww w .  j av a2  s .c  o  m

    final Expr arg = args.get(0);
    final String formatString;
    final DateTimeZone timeZone;

    if (args.size() > 1) {
        Preconditions.checkArgument(args.get(1).isLiteral(), "Function[%s] format arg must be a literal",
                name());
        formatString = (String) args.get(1).getLiteralValue();
    } else {
        formatString = null;
    }

    if (args.size() > 2) {
        timeZone = ExprUtils.toTimeZone(args.get(2));
    } else {
        timeZone = DateTimeZone.UTC;
    }

    final DateTimeFormatter formatter = formatString == null ? ISODateTimeFormat.dateTime()
            : DateTimeFormat.forPattern(formatString).withZone(timeZone);

    class TimestampFormatExpr implements Expr {
        @Nonnull
        @Override
        public ExprEval eval(final ObjectBinding bindings) {
            return ExprEval.of(formatter.print(arg.eval(bindings).asLong()));
        }

        @Override
        public void visit(final Visitor visitor) {
            arg.visit(visitor);
            visitor.visit(this);
        }
    }

    return new TimestampFormatExpr();
}

From source file:io.druid.server.lookup.namespace.URIExtractionNamespaceCacheFactory.java

License:Apache License

@Override
public Callable<String> getCachePopulator(final String id, final URIExtractionNamespace extractionNamespace,
        final String lastVersion, final Map<String, String> cache) {
    final long lastCached = lastVersion == null ? JodaUtils.MIN_INSTANT : Long.parseLong(lastVersion);
    return new Callable<String>() {
        @Override/*from w  w  w. j  a va2 s .  c  o  m*/
        public String call() {
            final boolean doSearch = extractionNamespace.getUriPrefix() != null;
            final URI originalUri = doSearch ? extractionNamespace.getUriPrefix()
                    : extractionNamespace.getUri();
            final SearchableVersionedDataFinder<URI> pullerRaw = pullers.get(originalUri.getScheme());
            if (pullerRaw == null) {
                throw new IAE("Unknown loader type[%s].  Known types are %s", originalUri.getScheme(),
                        pullers.keySet());
            }
            if (!(pullerRaw instanceof URIDataPuller)) {
                throw new IAE("Cannot load data from location [%s]. Data pulling from [%s] not supported",
                        originalUri, originalUri.getScheme());
            }
            final URIDataPuller puller = (URIDataPuller) pullerRaw;
            final URI uri;
            if (doSearch) {
                final Pattern versionRegex;

                if (extractionNamespace.getFileRegex() != null) {
                    versionRegex = Pattern.compile(extractionNamespace.getFileRegex());
                } else {
                    versionRegex = null;
                }
                uri = pullerRaw.getLatestVersion(extractionNamespace.getUriPrefix(), versionRegex);

                if (uri == null) {
                    throw new RuntimeException(new FileNotFoundException(
                            String.format("Could not find match for pattern `%s` in [%s] for %s", versionRegex,
                                    originalUri, extractionNamespace)));
                }
            } else {
                uri = extractionNamespace.getUri();
            }

            final String uriPath = uri.getPath();

            try {
                return RetryUtils.retry(new Callable<String>() {
                    @Override
                    public String call() throws Exception {
                        final String version = puller.getVersion(uri);
                        try {
                            long lastModified = Long.parseLong(version);
                            if (lastModified <= lastCached) {
                                final DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
                                log.debug(
                                        "URI [%s] for namespace [%s] was las modified [%s] but was last cached [%s]. Skipping ",
                                        uri.toString(), id, fmt.print(lastModified), fmt.print(lastCached));
                                return version;
                            }
                        } catch (NumberFormatException ex) {
                            log.debug(ex, "Failed to get last modified timestamp. Assuming no timestamp");
                        }
                        final ByteSource source;
                        if (CompressionUtils.isGz(uriPath)) {
                            // Simple gzip stream
                            log.debug("Loading gz");
                            source = new ByteSource() {
                                @Override
                                public InputStream openStream() throws IOException {
                                    return CompressionUtils.gzipInputStream(puller.getInputStream(uri));
                                }
                            };
                        } else {
                            source = new ByteSource() {
                                @Override
                                public InputStream openStream() throws IOException {
                                    return puller.getInputStream(uri);
                                }
                            };
                        }
                        final long lineCount = new MapPopulator<>(
                                extractionNamespace.getNamespaceParseSpec().getParser()).populate(source,
                                        cache);
                        log.info("Finished loading %d lines for namespace [%s]", lineCount, id);
                        return version;
                    }
                }, puller.shouldRetryPredicate(), DEFAULT_NUM_RETRIES);
            } catch (Exception e) {
                throw Throwables.propagate(e);
            }
        }
    };
}

From source file:io.druid.server.namespace.URIExtractionNamespaceFunctionFactory.java

License:Apache License

@Override
public Callable<String> getCachePopulator(final URIExtractionNamespace extractionNamespace,
        final String lastVersion, final Map<String, String> cache) {
    final long lastCached = lastVersion == null ? JodaUtils.MIN_INSTANT : Long.parseLong(lastVersion);
    return new Callable<String>() {
        @Override//w  w  w  . j av a  2  s  .  c  om
        public String call() {
            final URI originalUri = extractionNamespace.getUri();
            final SearchableVersionedDataFinder<URI> pullerRaw = pullers.get(originalUri.getScheme());
            if (pullerRaw == null) {
                throw new IAE("Unknown loader type[%s].  Known types are %s", originalUri.getScheme(),
                        pullers.keySet());
            }
            if (!(pullerRaw instanceof URIDataPuller)) {
                throw new IAE("Cannot load data from location [%s]. Data pulling from [%s] not supported",
                        originalUri.toString(), originalUri.getScheme());
            }
            final URIDataPuller puller = (URIDataPuller) pullerRaw;
            final String versionRegex = extractionNamespace.getVersionRegex();
            final URI uri = pullerRaw.getLatestVersion(originalUri,
                    versionRegex == null ? null : Pattern.compile(versionRegex));
            if (uri == null) {
                throw new RuntimeException(new FileNotFoundException(
                        String.format("Could not find match for pattern `%s` in [%s] for %s", versionRegex,
                                originalUri, extractionNamespace)));
            }
            final String uriPath = uri.getPath();

            try {
                return RetryUtils.retry(new Callable<String>() {
                    @Override
                    public String call() throws Exception {
                        final String version = puller.getVersion(uri);
                        try {
                            long lastModified = Long.parseLong(version);
                            if (lastModified <= lastCached) {
                                final DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
                                log.debug(
                                        "URI [%s] for namespace [%s] was las modified [%s] but was last cached [%s]. Skipping ",
                                        uri.toString(), extractionNamespace.getNamespace(),
                                        fmt.print(lastModified), fmt.print(lastCached));
                                return version;
                            }
                        } catch (NumberFormatException ex) {
                            log.debug(ex, "Failed to get last modified timestamp. Assuming no timestamp");
                        }
                        final ByteSource source;
                        if (CompressionUtils.isGz(uriPath)) {
                            // Simple gzip stream
                            log.debug("Loading gz");
                            source = new ByteSource() {
                                @Override
                                public InputStream openStream() throws IOException {
                                    return CompressionUtils.gzipInputStream(puller.getInputStream(uri));
                                }
                            };
                        } else {
                            source = new ByteSource() {
                                @Override
                                public InputStream openStream() throws IOException {
                                    return puller.getInputStream(uri);
                                }
                            };
                        }
                        final long lineCount = new MapPopulator<>(
                                extractionNamespace.getNamespaceParseSpec().getParser()).populate(source,
                                        cache);
                        log.info("Finished loading %d lines for namespace [%s]", lineCount,
                                extractionNamespace.getNamespace());
                        return version;
                    }
                }, puller.shouldRetryPredicate(), DEFAULT_NUM_RETRIES);
            } catch (Exception e) {
                throw Throwables.propagate(e);
            }
        }
    };
}

From source file:io.druid.sql.http.SqlResource.java

License:Apache License

@POST
@Produces(MediaType.APPLICATION_JSON)/*  w ww.jav  a2 s . c  o m*/
@Consumes(MediaType.APPLICATION_JSON)
public Response doPost(final SqlQuery sqlQuery) throws SQLException, IOException {
    // This is not integrated with the experimental authorization framework.
    // (Non-trivial since we don't know the dataSources up-front)

    final PlannerResult plannerResult;
    final DateTimeZone timeZone;

    try (final DruidPlanner planner = plannerFactory.createPlanner(sqlQuery.getContext())) {
        plannerResult = planner.plan(sqlQuery.getQuery());
        timeZone = planner.getPlannerContext().getTimeZone();

        // Remember which columns are time-typed, so we can emit ISO8601 instead of millis values.
        final List<RelDataTypeField> fieldList = plannerResult.rowType().getFieldList();
        final boolean[] timeColumns = new boolean[fieldList.size()];
        final boolean[] dateColumns = new boolean[fieldList.size()];
        for (int i = 0; i < fieldList.size(); i++) {
            final SqlTypeName sqlTypeName = fieldList.get(i).getType().getSqlTypeName();
            timeColumns[i] = sqlTypeName == SqlTypeName.TIMESTAMP;
            dateColumns[i] = sqlTypeName == SqlTypeName.DATE;
        }

        final Yielder<Object[]> yielder0 = Yielders.each(plannerResult.run());

        try {
            return Response.ok(new StreamingOutput() {
                @Override
                public void write(final OutputStream outputStream) throws IOException, WebApplicationException {
                    Yielder<Object[]> yielder = yielder0;

                    try (final JsonGenerator jsonGenerator = jsonMapper.getFactory()
                            .createGenerator(outputStream)) {
                        jsonGenerator.writeStartArray();

                        while (!yielder.isDone()) {
                            final Object[] row = yielder.get();
                            jsonGenerator.writeStartObject();
                            for (int i = 0; i < fieldList.size(); i++) {
                                final Object value;

                                if (timeColumns[i]) {
                                    value = ISODateTimeFormat.dateTime()
                                            .print(Calcites.calciteTimestampToJoda((long) row[i], timeZone));
                                } else if (dateColumns[i]) {
                                    value = ISODateTimeFormat.dateTime()
                                            .print(Calcites.calciteDateToJoda((int) row[i], timeZone));
                                } else {
                                    value = row[i];
                                }

                                jsonGenerator.writeObjectField(fieldList.get(i).getName(), value);
                            }
                            jsonGenerator.writeEndObject();
                            yielder = yielder.next(null);
                        }

                        jsonGenerator.writeEndArray();
                        jsonGenerator.flush();

                        // End with CRLF
                        outputStream.write('\r');
                        outputStream.write('\n');
                    } finally {
                        yielder.close();
                    }
                }
            }).build();
        } catch (Throwable e) {
            // make sure to close yielder if anything happened before starting to serialize the response.
            yielder0.close();
            throw Throwables.propagate(e);
        }
    } catch (Exception e) {
        log.warn(e, "Failed to handle query: %s", sqlQuery);

        final Exception exceptionToReport;

        if (e instanceof RelOptPlanner.CannotPlanException) {
            exceptionToReport = new ISE("Cannot build plan for query: %s", sqlQuery.getQuery());
        } else {
            exceptionToReport = e;
        }

        return Response.serverError().type(MediaType.APPLICATION_JSON_TYPE)
                .entity(jsonMapper.writeValueAsBytes(QueryInterruptedException.wrapIfNeeded(exceptionToReport)))
                .build();
    }
}