List of usage examples for org.joda.time Duration Duration
public Duration(Object duration)
From source file:org.jadira.usertype.dateandtime.joda.columnmapper.BigIntegerColumnDurationMapper.java
License:Apache License
@Override public Duration fromNonNullString(String s) { return new Duration(s); }
From source file:org.jadira.usertype.dateandtime.joda.columnmapper.IntegerColumnDurationMapper.java
License:Apache License
@Override public Duration fromNonNullValue(Integer value) { return (value == null ? new Duration(null) : Duration.millis(1000L * value.intValue())); }
From source file:org.jadira.usertype.dateandtime.joda.columnmapper.LongColumnDurationMapper.java
License:Apache License
@Override public Duration fromNonNullValue(Long value) { return (value == null ? new Duration(null) : Duration.millis(value)); }
From source file:org.jadira.usertype.dateandtime.joda.columnmapper.StringColumnDurationMapper.java
License:Apache License
@Override public Duration fromNonNullValue(String s) { return new Duration(s); }
From source file:org.kalypso.ui.rrm.internal.timeseries.view.imports.ValidateTimestepsVisitor.java
License:Open Source License
public ValidateTimestepsVisitor(final Period timestep) { m_duration = new Duration(timestep.toStandardSeconds().getSeconds() * 1000); }
From source file:org.kitodo.config.ConfigCore.java
License:Open Source License
/** * Request Duration parameter from configuration. * * @param key//from w ww . j a v a 2 s . com * as Parameter whose value is to be returned * @param timeUnit * as TimeUnit * @return Parameter as Duration */ public static Duration getDurationParameter(ParameterCore key, TimeUnit timeUnit) { long duration = getLongParameterOrDefaultValue(key); return new Duration(TimeUnit.MILLISECONDS.convert(duration, timeUnit)); }
From source file:org.kitodo.production.helper.tasks.EmptyTask.java
License:Open Source License
/** * The function getDurationDead() returns the duration the task is dead. If * a time of death has not yet been recorded, null is returned. * * @return the duration since the task died *///from w w w . j av a 2s . c o m Duration getDurationDead() { if (Objects.isNull(passedAway)) { return null; } long elapsed = System.nanoTime() - passedAway; return new Duration(TimeUnit.MILLISECONDS.convert(elapsed, TimeUnit.NANOSECONDS)); }
From source file:org.ldp4j.application.data.Literals.java
License:Apache License
public static DurationLiteral duration(long time, TimeUnit unit) { checkNotNull(time, TIME_CANNOT_BE_NULL); checkNotNull(unit, TIME_UNIT_CANNOT_BE_NULL); return of(new Duration(TimeUnit.MILLISECONDS.convert(time, unit))); }
From source file:org.locationtech.geomesa.examples.KafkaQuickStart.java
License:Open Source License
public static void main(String[] args) throws Exception { // read command line args for a connection to Kafka CommandLineParser parser = new BasicParser(); Options options = getCommonRequiredOptions(); CommandLine cmd = parser.parse(options, args); // create the producer and consumer KafkaDataStore objects Map<String, String> dsConf = getKafkaDataStoreConf(cmd); dsConf.put("isProducer", "true"); DataStore producerDS = DataStoreFinder.getDataStore(dsConf); dsConf.put("isProducer", "false"); DataStore consumerDS = DataStoreFinder.getDataStore(dsConf); // verify that we got back our KafkaDataStore objects properly if (producerDS == null) { throw new Exception("Null producer KafkaDataStore"); }/*from w w w. j a v a 2 s. co m*/ if (consumerDS == null) { throw new Exception("Null consumer KafkaDataStore"); } // create the schema which creates a topic in Kafka // (only needs to be done once) final String sftName = "KafkaQuickStart"; final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326"; SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema); // set zkPath to default if not specified String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH); SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath); // only create the schema if it hasn't been created already if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName)) producerDS.createSchema(preppedOutputSft); System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)"); System.in.read(); // the live consumer must be created before the producer writes features // in order to read streaming data. // i.e. the live consumer will only read data written after its instantiation SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName); SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName); // creates and adds SimpleFeatures to the producer every 1/5th of a second System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes"); Instant replayStart = new Instant(); addSimpleFeatures(sft, producerFS); Instant replayEnd = new Instant(); // read from Kafka after writing all the features. // LIVE CONSUMER - will obtain the current state of SimpleFeatures System.out.println("\nConsuming with the live consumer..."); SimpleFeatureCollection featureCollection = consumerFS.getFeatures(); System.out.println(featureCollection.size() + " features were written to Kafka"); // the state of the two SimpleFeatures is real time here System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:"); SimpleFeatureIterator featureIterator = featureCollection.features(); SimpleFeature feature1 = featureIterator.next(); SimpleFeature feature2 = featureIterator.next(); featureIterator.close(); printFeature(feature1); printFeature(feature2); // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time // Replay consumer requires a ReplayConfig which takes a time range and a // duration of time to process System.out.println("\nConsuming with the replay consumer..."); Duration readBehind = new Duration(1000); // 1 second readBehind ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind); SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc); producerDS.createSchema(replaySFT); SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName()); // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd. // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures // by processing all of the messages that were sent in between queryTime-readBehind and queryTime. // only the messages in between replayStart and replayEnd are cached. Instant queryTime = replayEnd.minus(5000); featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime)); System.out.println(featureCollection.size() + " features were written to Kafka"); System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:"); featureIterator = featureCollection.features(); feature1 = featureIterator.next(); feature2 = featureIterator.next(); featureIterator.close(); printFeature(feature1); printFeature(feature2); System.exit(0); }
From source file:org.opencastproject.metadata.dublincore.EncodingSchemeUtils.java
License:Educational Community License
/** * Encode a duration measured in milliseconds into a Dublin Core string using the * {@link DublinCore#ENC_SCHEME_ISO8601} encoding scheme <code>PTnHnMnS</code>. * <p/>//from w w w . j ava2s . c om * The language of the returned value is {@link DublinCore#LANGUAGE_UNDEFINED}. * <p/> * See <a href="http://en.wikipedia.org/wiki/ISO_8601#Durations"> ISO8601 Durations</a> for details. * * @param duration * the duration in milliseconds */ public static DublinCoreValue encodeDuration(long duration) { return new DublinCoreValue(ISOPeriodFormat.standard().print(new Duration(duration).toPeriod()), DublinCore.LANGUAGE_UNDEFINED, DublinCore.ENC_SCHEME_ISO8601); }