List of usage examples for org.joda.time Period Period
public Period(Object period)
From source file:nu.yona.app.utils.AppUtils.java
/** * Gets time for otp./* w ww . j av a 2 s .c o m*/ * * @param time the time * @return the time for otp */ public static Pair<String, Long> getTimeForOTP(String time) { try { StringBuffer buffer = new StringBuffer(); long totalTime = 0; Period period = new Period(time); if (period.getHours() > 0) { totalTime += period.getHours() * AppConstant.ONE_SECOND * 60 * 60; buffer.append(YonaApplication.getAppContext().getString(R.string.hours, period.getHours() + "")); } if (period.getMinutes() > 0) { totalTime += period.getMinutes() * AppConstant.ONE_SECOND * 60; buffer.append(YonaApplication.getAppContext().getString(R.string.minute, period.getMinutes() + "")); } if (period.getSeconds() > 0) { totalTime += period.getSeconds() * AppConstant.ONE_SECOND; buffer.append( YonaApplication.getAppContext().getString(R.string.seconds, period.getSeconds() + "")); } return Pair.create(buffer.toString(), totalTime); } catch (Exception e) { AppUtils.reportException(AppUtils.class.getSimpleName(), e, Thread.currentThread()); } return Pair.create(time, (long) 0); }
From source file:org.addhen.birudo.util.AppUtil.java
License:Apache License
public static String formattedDuration(long duration) { PeriodFormatter hoursMinutes = new PeriodFormatterBuilder().appendHours().appendSuffix(" hr", " hrs") .appendSeparator(" ").appendMinutes().appendSuffix(" min", " mins").appendSeparator(" ") .appendSeconds().appendSuffix(" sec", " secs").toFormatter(); Period p = new Period(duration); return hoursMinutes.print(p); }
From source file:org.agilewiki.jasocket.cluster.WhoerAgent.java
License:Open Source License
@Override public void start(final RP<Jid> rp) throws Exception { PrintJid out = PrintJid.newPrintJid(this); ConcurrentHashMap<String, Interpreter> interpreters = agentChannelManager().interpreters; Iterator<String> it = interpreters.keySet().iterator(); while (it.hasNext()) { String id = it.next();/* w w w . j av a 2 s. c om*/ Interpreter interpreter = interpreters.get(id); out.println(interpreter.getOperatorName() + " " + agentChannelManager().agentChannelManagerAddress() + " " + id + " " + ISOPeriodFormat.standard().print(new Period(interpreter.getLogonTime())) + " " + interpreter.getCommandCount() + " " + ISOPeriodFormat.standard().print(new Period(interpreter.getIdleTime()))); } rp.processResponse(out); }
From source file:org.agilewiki.jasocket.commands.LocalServersAgent.java
License:Open Source License
@Override public void process(final RP<PrintJid> rp) throws Exception { GetLocalServers.req.send(this, agentChannelManager(), new RP<TreeMap<String, Server>>() { @Override//from w ww . ja va 2 s .com public void processResponse(TreeMap<String, Server> response) throws Exception { Iterator<String> it = response.keySet().iterator(); while (it.hasNext()) { String name = it.next(); Server server = response.get(name); out.println(name + " " + server.getOperatorName() + " " + ISOPeriodFormat.standard() .print(new Period(System.currentTimeMillis() - server.startTime)) + " " + server.startupArgs()); } rp.processResponse(out); } }); }
From source file:org.agilewiki.jasocket.sshd.SSHAgent.java
License:Open Source License
@Override public void start(final RP<Jid> rp) throws Exception { final PrintJid out = PrintJid.newPrintJid(this); (new GetLocalServer(getNameJid().getValue())).send(this, agentChannelManager(), new RP<Server>() { @Override/*from w w w.j a v a 2 s . co m*/ public void processResponse(Server response) throws Exception { if (response != null) { SSHServer sshServer = (SSHServer) response; ConcurrentHashMap<String, Interpreter> interpreters = agentChannelManager().interpreters; Iterator<String> it = interpreters.keySet().iterator(); int count = 0; while (it.hasNext()) { String id = it.next(); Interpreter interpreter = interpreters.get(id); if (interpreter.isSSH()) count += 1; } out.println(agentChannelManager().agentChannelManagerAddress() + " " + sshServer.sshPort() + " " + ISOPeriodFormat.standard().print(new Period(sshServer.runTime())) + " " + count); } rp.processResponse(out); } }); }
From source file:org.apache.druid.indexing.common.actions.TaskActionTestKit.java
License:Apache License
@Override public void before() { taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(new Period("PT24H"))); testDerbyConnector = new TestDerbyConnector(Suppliers.ofInstance(new MetadataStorageConnectorConfig()), Suppliers.ofInstance(metadataStorageTablesConfig)); final ObjectMapper objectMapper = new TestUtils().getTestObjectMapper(); metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(objectMapper, metadataStorageTablesConfig, testDerbyConnector); taskLockbox = new TaskLockbox(taskStorage, metadataStorageCoordinator); metadataSegmentManager = new SQLMetadataSegmentManager(objectMapper, Suppliers.ofInstance(new MetadataSegmentManagerConfig()), Suppliers.ofInstance(metadataStorageTablesConfig), testDerbyConnector); taskActionToolbox = new TaskActionToolbox(taskLockbox, taskStorage, metadataStorageCoordinator, new NoopServiceEmitter(), EasyMock.createMock(SupervisorManager.class)); testDerbyConnector.createDataSourceTable(); testDerbyConnector.createPendingSegmentsTable(); testDerbyConnector.createSegmentTable(); testDerbyConnector.createRulesTable(); testDerbyConnector.createConfigTable(); testDerbyConnector.createTaskTables(); testDerbyConnector.createAuditTable(); }
From source file:org.apache.druid.indexing.seekablestream.supervisor.SeekableStreamSupervisorTuningConfig.java
License:Apache License
static Duration defaultDuration(final Period period, final String theDefault) { return (period == null ? new Period(theDefault) : period).toStandardDuration(); }
From source file:org.apache.druid.query.expression.ExprUtils.java
License:Apache License
static PeriodGranularity toPeriodGranularity(final Expr periodArg, @Nullable final Expr originArg, @Nullable final Expr timeZoneArg, final Expr.ObjectBinding bindings) { final Period period = new Period(periodArg.eval(bindings).asString()); final DateTime origin; final DateTimeZone timeZone; if (timeZoneArg == null) { timeZone = null;/*ww w . j av a 2 s . c o m*/ } else { final String value = timeZoneArg.eval(bindings).asString(); timeZone = value != null ? DateTimes.inferTzFromString(value) : null; } if (originArg == null) { origin = null; } else { Chronology chronology = timeZone == null ? ISOChronology.getInstanceUTC() : ISOChronology.getInstance(timeZone); final Object value = originArg.eval(bindings).value(); if (value instanceof String && NullHandling.isNullOrEquivalent((String) value)) { // We get a blank string here, when sql compatible null handling is enabled // and expression contains empty string for for origin // e.g timestamp_floor(\"__time\",'PT1M','','UTC') origin = null; } else { origin = value != null ? new DateTime(value, chronology) : null; } } return new PeriodGranularity(period, origin, timeZone); }
From source file:org.apache.druid.query.lookup.namespace.JdbcExtractionNamespace.java
License:Apache License
@JsonCreator public JdbcExtractionNamespace( @NotNull @JsonProperty(value = "connectorConfig", required = true) final MetadataStorageConnectorConfig connectorConfig, @NotNull @JsonProperty(value = "table", required = true) final String table, @NotNull @JsonProperty(value = "keyColumn", required = true) final String keyColumn, @NotNull @JsonProperty(value = "valueColumn", required = true) final String valueColumn, @JsonProperty(value = "tsColumn", required = false) @Nullable final String tsColumn, @JsonProperty(value = "filter", required = false) @Nullable final String filter, @Min(0) @JsonProperty(value = "pollPeriod", required = false) @Nullable final Period pollPeriod) { this.connectorConfig = Preconditions.checkNotNull(connectorConfig, "connectorConfig"); Preconditions.checkNotNull(connectorConfig.getConnectURI(), "connectorConfig.connectURI"); this.table = Preconditions.checkNotNull(table, "table"); this.keyColumn = Preconditions.checkNotNull(keyColumn, "keyColumn"); this.valueColumn = Preconditions.checkNotNull(valueColumn, "valueColumn"); this.tsColumn = tsColumn; this.filter = filter; this.pollPeriod = pollPeriod == null ? new Period(0L) : pollPeriod; }
From source file:org.apache.hadoop.hive.druid.DruidStorageHandler.java
License:Apache License
private static HttpClient makeHttpClient(Lifecycle lifecycle) { final int numConnection = HiveConf.getIntVar(SessionState.getSessionConf(), HiveConf.ConfVars.HIVE_DRUID_NUM_HTTP_CONNECTION); final Period readTimeout = new Period( HiveConf.getVar(SessionState.getSessionConf(), HiveConf.ConfVars.HIVE_DRUID_HTTP_READ_TIMEOUT)); LOG.info("Creating Druid HTTP client with {} max parallel connections and {}ms read timeout", numConnection, readTimeout.toStandardDuration().getMillis()); return HttpClientInit.createClient(HttpClientConfig.builder().withNumConnections(numConnection) .withReadTimeout(new Period(readTimeout).toStandardDuration()).build(), lifecycle); }