Example usage for org.joda.time.format DateTimeFormat forPattern

List of usage examples for org.joda.time.format DateTimeFormat forPattern

Introduction

In this page you can find the example usage for org.joda.time.format DateTimeFormat forPattern.

Prototype

public static DateTimeFormatter forPattern(String pattern) 

Source Link

Document

Factory to create a formatter from a pattern string.

Usage

From source file:com.github.pockethub.android.util.TimeUtils.java

License:Apache License

public static String dateToString(Date value) {
    DateTimeFormatter formats = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
    return formats.print(value.getTime());
}

From source file:com.github.pockethub.util.TimeUtils.java

License:Apache License

public static Date stringToDate(String value) {
    DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
    DateTime t = format.parseDateTime(value);
    return t.toDate();
}

From source file:com.github.terma.logb.node.timestamper.RealTimestamper.java

License:Apache License

public RealTimestamper(String timestampFormat, String pattern) {
    this.pattern = Pattern.compile(pattern);
    this.formatter = DateTimeFormat.forPattern(timestampFormat).withZoneUTC();
}

From source file:com.gooddata.util.ResultSetHelperService.java

License:Apache License

private String handleDate(ResultSet rs, int columnIndex) throws SQLException {
    java.sql.Date date = rs.getDate(columnIndex);
    String value = null;/*from   ww  w.  java2  s.c om*/
    if (date != null) {
        DateTimeFormatter dateFormat = DateTimeFormat.forPattern("dd-MMM-yyyy");
        value = dateFormat.print(new DateTime(date));
    }
    return value;
}

From source file:com.gooddata.util.ResultSetHelperService.java

License:Apache License

private String handleTimestamp(Timestamp timestamp) {
    DateTimeFormatter timeFormat = DateTimeFormat.forPattern("dd-MMM-yyyy HH:mm:ss");
    return timestamp == null ? null : timeFormat.print(new DateTime(timestamp));
}

From source file:com.goodhuddle.huddle.web.site.handlebars.helper.DateTimeHelper.java

License:Open Source License

private String format(ReadableInstant value, Options options, DateTimeFormatter defaultFormat) {
    DateTimeFormatter formatter = defaultFormat;
    String pattern = options.param(0, null);
    if (pattern != null) {
        formatter = DateTimeFormat.forPattern(pattern);
    }/*from w  w w .  jav  a 2s  .  co  m*/
    return formatter.print(value);
}

From source file:com.google.cloud.dataflow.sdk.runners.DataflowPipelineRunner.java

License:Apache License

@Override
public DataflowPipelineJob run(Pipeline pipeline) {
    logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline);

    LOG.info("Executing pipeline on the Dataflow Service, which will have billing implications "
            + "related to Google Compute Engine usage and other Google Cloud Services.");

    List<DataflowPackage> packages = options.getStager().stageFiles();

    // Set a unique client_request_id in the CreateJob request.
    // This is used to ensure idempotence of job creation across retried
    // attempts to create a job. Specifically, if the service returns a job with
    // a different client_request_id, it means the returned one is a different
    // job previously created with the same job name, and that the job creation
    // has been effectively rejected. The SDK should return
    // Error::Already_Exists to user in that case.
    int randomNum = new Random().nextInt(9000) + 1000;
    String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm").withZone(DateTimeZone.UTC)
            .print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum;

    // Try to create a debuggee ID. This must happen before the job is translated since it may
    // update the options.
    DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class);
    maybeRegisterDebuggee(dataflowOptions, requestId);

    JobSpecification jobSpecification = translator.translate(pipeline, this, packages);
    Job newJob = jobSpecification.getJob();
    newJob.setClientRequestId(requestId);

    String version = DataflowReleaseInfo.getReleaseInfo().getVersion();
    System.out.println("Dataflow SDK version: " + version);

    newJob.getEnvironment().setUserAgent(DataflowReleaseInfo.getReleaseInfo());
    // The Dataflow Service may write to the temporary directory directly, so
    // must be verified.
    if (!Strings.isNullOrEmpty(options.getTempLocation())) {
        newJob.getEnvironment()//w w w  .j  a v  a2s. c  o m
                .setTempStoragePrefix(dataflowOptions.getPathValidator().verifyPath(options.getTempLocation()));
    }
    newJob.getEnvironment().setDataset(options.getTempDatasetId());
    newJob.getEnvironment().setExperiments(options.getExperiments());

    // Set the Docker container image that executes Dataflow worker harness, residing in Google
    // Container Registry. Translator is guaranteed to create a worker pool prior to this point.
    String workerHarnessContainerImage = options.as(DataflowPipelineWorkerPoolOptions.class)
            .getWorkerHarnessContainerImage();
    for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) {
        workerPool.setWorkerHarnessContainerImage(workerHarnessContainerImage);
    }

    // Requirements about the service.
    Map<String, Object> environmentVersion = new HashMap<>();
    environmentVersion.put(PropertyNames.ENVIRONMENT_VERSION_MAJOR_KEY, ENVIRONMENT_MAJOR_VERSION);
    newJob.getEnvironment().setVersion(environmentVersion);
    // Default jobType is JAVA_BATCH_AUTOSCALING: A Java job with workers that the job can
    // autoscale if specified.
    String jobType = "JAVA_BATCH_AUTOSCALING";

    if (options.isStreaming()) {
        jobType = "STREAMING";
    }
    environmentVersion.put(PropertyNames.ENVIRONMENT_VERSION_JOB_TYPE_KEY, jobType);

    if (hooks != null) {
        hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment());
    }

    if (!Strings.isNullOrEmpty(options.getDataflowJobFile())) {
        runJobFileHooks(newJob);
    }
    if (hooks != null && !hooks.shouldActuallyRunJob()) {
        return null;
    }

    String jobIdToUpdate = null;
    if (options.getUpdate()) {
        jobIdToUpdate = getJobIdFromName(options.getJobName());
        newJob.setTransformNameMapping(options.getTransformNameMapping());
        newJob.setReplaceJobId(jobIdToUpdate);
    }
    Job jobResult;
    try {
        jobResult = dataflowClient.projects().jobs().create(options.getProject(), newJob).execute();
    } catch (GoogleJsonResponseException e) {
        String errorMessages = "Unexpected errors";
        if (e.getDetails() != null) {
            if (Utf8.encodedLength(newJob.toString()) >= CREATE_JOB_REQUEST_LIMIT_BYTES) {
                errorMessages = "The size of the serialized JSON representation of the pipeline "
                        + "exceeds the allowable limit. "
                        + "For more information, please check the FAQ link below:\n"
                        + "https://cloud.google.com/dataflow/faq";
            } else {
                errorMessages = e.getDetails().getMessage();
            }
        }
        throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e);
    } catch (IOException e) {
        throw new RuntimeException("Failed to create a workflow job", e);
    }

    // Obtain all of the extractors from the PTransforms used in the pipeline so the
    // DataflowPipelineJob has access to them.
    AggregatorPipelineExtractor aggregatorExtractor = new AggregatorPipelineExtractor(pipeline);
    Map<Aggregator<?, ?>, Collection<PTransform<?, ?>>> aggregatorSteps = aggregatorExtractor
            .getAggregatorSteps();

    DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(aggregatorSteps,
            jobSpecification.getStepNames());

    // Use a raw client for post-launch monitoring, as status calls may fail
    // regularly and need not be retried automatically.
    DataflowPipelineJob dataflowPipelineJob = new DataflowPipelineJob(options.getProject(), jobResult.getId(),
            Transport.newRawDataflowClient(options).build(), aggregatorTransforms);

    // If the service returned client request id, the SDK needs to compare it
    // with the original id generated in the request, if they are not the same
    // (i.e., the returned job is not created by this request), throw
    // DataflowJobAlreadyExistsException or DataflowJobAlreadyUpdatedExcetpion
    // depending on whether this is a reload or not.
    if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty()
            && !jobResult.getClientRequestId().equals(requestId)) {
        // If updating a job.
        if (options.getUpdate()) {
            throw new DataflowJobAlreadyUpdatedException(dataflowPipelineJob,
                    String.format(
                            "The job named %s with id: %s has already been updated into job id: %s "
                                    + "and cannot be updated again.",
                            newJob.getName(), jobIdToUpdate, jobResult.getId()));
        } else {
            throw new DataflowJobAlreadyExistsException(dataflowPipelineJob,
                    String.format("There is already an active job named %s with id: %s. If you want "
                            + "to submit a second job, try again by setting a different name using --jobName.",
                            newJob.getName(), jobResult.getId()));
        }
    }

    LOG.info("To access the Dataflow monitoring console, please navigate to {}",
            MonitoringUtil.getJobMonitoringPageURL(options.getProject(), jobResult.getId()));
    System.out.println("Submitted job: " + jobResult.getId());

    LOG.info("To cancel the job using the 'gcloud' tool, run:\n> {}",
            MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId()));

    return dataflowPipelineJob;
}

From source file:com.google.gerrit.server.config.ScheduleConfig.java

License:Apache License

private static long initialDelay(Config rc, String section, String subsection, String keyStartTime,
        DateTime now, long interval) {
    long delay = MISSING_CONFIG;
    String start = rc.getString(section, subsection, keyStartTime);
    try {/*from  w w  w. j ava 2  s. c  o m*/
        if (start != null) {
            DateTimeFormatter formatter;
            MutableDateTime startTime = now.toMutableDateTime();
            try {
                formatter = ISODateTimeFormat.hourMinute();
                LocalTime firstStartTime = formatter.parseLocalTime(start);
                startTime.hourOfDay().set(firstStartTime.getHourOfDay());
                startTime.minuteOfHour().set(firstStartTime.getMinuteOfHour());
            } catch (IllegalArgumentException e1) {
                formatter = DateTimeFormat.forPattern("E HH:mm").withLocale(Locale.US);
                LocalDateTime firstStartDateTime = formatter.parseLocalDateTime(start);
                startTime.dayOfWeek().set(firstStartDateTime.getDayOfWeek());
                startTime.hourOfDay().set(firstStartDateTime.getHourOfDay());
                startTime.minuteOfHour().set(firstStartDateTime.getMinuteOfHour());
            }
            startTime.secondOfMinute().set(0);
            startTime.millisOfSecond().set(0);
            long s = startTime.getMillis();
            long n = now.getMillis();
            delay = (s - n) % interval;
            if (delay <= 0) {
                delay += interval;
            }
        } else {
            log.info(MessageFormat.format("{0} schedule parameter \"{0}.{1}\" is not configured", section,
                    keyStartTime));
        }
    } catch (IllegalArgumentException e2) {
        log.error(MessageFormat.format("Invalid {0} schedule parameter \"{0}.{1}\"", section, keyStartTime),
                e2);
        delay = INVALID_CONFIG;
    }
    return delay;
}

From source file:com.google.maps.internal.LocalTimeAdapter.java

License:Open Source License

/**
 * Read a time from the Places API and convert to a {@link LocalTime}
 */// w w w .ja va  2 s  . com
@Override
public LocalTime read(JsonReader reader) throws IOException {
    if (reader.peek() == JsonToken.NULL) {
        reader.nextNull();
        return null;
    }

    if (reader.peek() == JsonToken.STRING) {
        DateTimeFormatter dtf = DateTimeFormat.forPattern("HHmm");
        return LocalTime.parse(reader.nextString(), dtf);
    }

    throw new UnsupportedOperationException("Unsupported format");
}

From source file:com.google.samples.apps.iosched.explore.ExploreSessionsFragment.java

License:Open Source License

@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
    DateTimeFormatter formatter = DateTimeFormat.forPattern("E dd.MMMM yyyy");
    DateTime dateToUse = null;/*  w  w w .  j  a v a2 s . co  m*/
    DateTime startDate = null;
    DateTime endDate = null;
    if (mSessionDate != null) {
        dateToUse = formatter.parseDateTime(mSessionDate);
        startDate = dateToUse.plusHours(0);
        endDate = dateToUse.plusHours(23);
    }
    switch (id) {
    case ExploreSessionsQuery.NORMAL_TOKEN:
        return new CursorLoader(getActivity(), mCurrentUri, ExploreSessionsQuery.NORMAL_PROJECTION,
                mSessionDate != null ? ScheduleContract.Sessions.STARTING_AT_TIME_INTERVAL_SELECTION : null,
                mSessionDate != null ? new String[] { startDate.getMillis() + "", endDate.getMillis() + "" }
                        : null,
                ScheduleContract.Sessions.SORT_BY_TYPE_THEN_TIME);
    case ExploreSessionsQuery.SEARCH_TOKEN:
        return new CursorLoader(getActivity(), mCurrentUri, ExploreSessionsQuery.SEARCH_PROJECTION,
                mSessionDate != null ? ScheduleContract.Sessions.STARTING_AT_TIME_INTERVAL_SELECTION : null,
                mSessionDate != null ? new String[] { startDate.getMillis() + "", endDate.getMillis() + "" }
                        : null,
                ScheduleContract.Sessions.SORT_BY_TYPE_THEN_TIME);
    case TAG_METADATA_TOKEN:
        return TagMetadata.createCursorLoader(getActivity());
    default:
        return null;
    }
}