Example usage for org.joda.time Duration Duration

List of usage examples for org.joda.time Duration Duration

Introduction

In this page you can find the example usage for org.joda.time Duration Duration.

Prototype

public Duration(Object duration) 

Source Link

Document

Creates a duration from the specified object using the org.joda.time.convert.ConverterManager ConverterManager .

Usage

From source file:com.example.geomesa.kafka08.KafkaQuickStart.java

License:Open Source License

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from   w w w .j  a  va 2 s .  c o m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart08";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka09.KafkaQuickStart.java

License:Open Source License

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from w  w w .  jav a 2 s  .c  o m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart09";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka10.KafkaQuickStart.java

License:Open Source License

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from w  w  w .  j a  v  a  2s . c  om*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart10";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.facebook.config.ConfigAccessor.java

License:Apache License

public Duration getDuration(String key, String defaultValue) {
    return new Duration(getDurationMillis(key, defaultValue));
}

From source file:com.facebook.stats.AbstractCompositeCounter.java

License:Apache License

public AbstractCompositeCounter(ReadableDuration maxLength) {
    this(maxLength, new Duration(maxLength.getMillis() / 10));
}

From source file:com.fengduo.bee.commons.util.StringFormatter.java

License:Open Source License

public static String formatDuration(long durationMs, String prefix) {
    Duration duration = new Duration(durationMs);
    String print = formatter.print(duration.toPeriod());
    return (prefix == null ? "" : prefix) + print + " (" + durationMs + ")ms";
}

From source file:com.funambol.common.pim.utility.TimeUtils.java

License:Open Source License

/**
 * Returns the given minutes in iso 8601 duration format
 * @param minutes String//from ww  w .  j  a  va 2  s. c  o m
 * @return String
 */
public static String getIso8601Duration(String minutes) {

    if (minutes == null || minutes.equals("")) {
        return minutes;
    }

    int min = Integer.parseInt(minutes);

    if (min == -1) {
        return null;
    }

    long mills = min * 60L * 1000L;
    Duration d = new Duration(mills);

    PeriodFormatter formatter = ISOPeriodFormat.standard();
    return formatter.print(d.toPeriod());
}

From source file:com.github.aneveux.euptime.EUptimeWidget.java

License:Open Source License

/**
 * This method allows to get the Eclipse's uptime in a pretty format using
 * Joda//from   w  w  w.  j  av  a  2  s  .com
 * 
 * @return something like 1d4h3m2s describing the Eclipse's uptime
 */
protected String getPrettyDuration() {
    final Duration duration = new Duration(System.currentTimeMillis() - Activator.getDefault().getStartTime());
    final PeriodFormatter formatter = new PeriodFormatterBuilder().appendDays().appendSuffix("d").appendHours()
            .appendSuffix("h").appendMinutes().appendSuffix("m").appendSeconds().appendSuffix("s")
            .toFormatter();
    return formatter.print(duration.toPeriod());
}

From source file:com.github.vase4kin.teamcityapp.utils.DateUtils.java

License:Apache License

/**
 * Format date to Overview screen to format "20 Apr 15 16:14:28 - 16:14:46 (18s)"
 *///from   ww w  .  j a v  a2 s .  c  om
public String formatDateToOverview() {
    String formattedStartDate = formatStartDateToBuildTitle();
    String formattedFinishedDate = formatFinishedDate();

    Duration duration = new Duration(new DateTime(finishedDate).minus(startDate.getTime()).getMillis());
    PeriodFormatter formatter = new PeriodFormatterBuilder().appendDays().appendSuffix("d:").appendHours()
            .appendSuffix("h:").appendMinutes().appendSuffix("m:").appendSeconds().appendSuffix("s")
            .toFormatter();
    String formatted = formatter.print(duration.toPeriod());
    return String.format("%s - %s (%s)", formattedStartDate, formattedFinishedDate, formatted);
}

From source file:com.google.cloud.examples.coinflow.sources.CoinbaseSocket.java

License:Open Source License

@Override
public Instant getWatermark() {
    return currentTimestamp.minus(new Duration(1));
}