List of usage examples for org.joda.time DateTimeZone UTC
DateTimeZone UTC
To view the source code for org.joda.time DateTimeZone UTC.
Click Source Link
From source file:com.alliander.osgp.webdevicesimulator.service.OslpChannelHandler.java
License:Open Source License
private static DateTime correctUsageUntilDate(final DateTime dateTimeUntil, final HistoryTermType termType) { final DateTime now = new DateTime(); if (dateTimeUntil.isAfter(now)) { if (termType == HistoryTermType.Short) { return now.hourOfDay().roundCeilingCopy(); } else {//from w w w. j a v a 2s . co m return now.withZone(localTimeZone).dayOfWeek().roundCeilingCopy().withZone(DateTimeZone.UTC); } } return dateTimeUntil; }
From source file:com.altoukhov.svsync.engines.Analyzer.java
License:Apache License
private static Snapshot loadFromCacheOrScan(Map<String, String> infoParams, String snapshotFilePath, IScannableFileSpace fileSpace, Collection<String> filters) { boolean isCacheEnabled = infoParams.containsKey("cache-days"); boolean shouldScan = true; Snapshot snap = Snapshot.fromFile(snapshotFilePath); if (snap != null) { if (isCacheEnabled) { DateTime cacheExpiration = snap.getTimestamp() .plusDays(Integer.parseInt(infoParams.get("cache-days"))); shouldScan = cacheExpiration.isBefore(DateTime.now(DateTimeZone.UTC)); }/*from w ww . j a v a 2s . c o m*/ } if (shouldScan) { snap = fileSpace.scan(filters); if ((snap != null) && isCacheEnabled) { snap.toFile(snapshotFilePath); } } return snap; }
From source file:com.altoukhov.svsync.Snapshot.java
License:Apache License
public Snapshot(Map<String, FileSnapshot> files, Set<String> dirs) { this.files = files; this.directories = dirs; this.timestamp = DateTime.now(DateTimeZone.UTC); }
From source file:com.anrisoftware.simplerest.oanda.rest.AbstractInstrumentHistory.java
License:Open Source License
private String toRfc3339Date(DateTime date) { DateTime dt = new DateTime(date, DateTimeZone.UTC); DateTimeFormatter fmt = ISODateTimeFormat.dateTime(); return fmt.print(dt); }
From source file:com.antsoft.framework.datetime.DateTimeTypeHandler.java
License:Apache License
private static DateTime toDateTime(Timestamp timestamp) { if (timestamp != null) { return new DateTime(timestamp.getTime(), DateTimeZone.UTC); } else {/*from www . ja va2s . c om*/ return null; } }
From source file:com.apm4all.tracy.RouteBuilder.java
License:Apache License
@Override public void configure() throws Exception { Tracer tracer = new Tracer(); tracer.setTraceOutExchanges(true);/*w ww.ja v a 2 s . c o m*/ tracer.setEnabled(false); // we configure the default trace formatter where we can // specify which fields we want in the output DefaultTraceFormatter formatter = new DefaultTraceFormatter(); // formatter.setShowOutBody(true); // formatter.setShowOutBodyType(true); formatter.setShowBody(true); formatter.setShowBodyType(true); // set to use our formatter tracer.setFormatter(formatter); getContext().addInterceptStrategy(tracer); // configure we want to use servlet as the component for the rest DSL // and we enable json binding mode //netty4-http restConfiguration().component("servlet").bindingMode(RestBindingMode.json) // and output using pretty print .dataFormatProperty("prettyPrint", "true") // setup context path and port number that netty will use .contextPath("tws").port(8080) // add swagger api-doc out of the box .apiContextPath("/api-doc").apiProperty("api.title", "Tracy Web Services API") .apiProperty("api.version", "1.0.0") // and enable CORS .apiProperty("cors", "true"); rest().description("Tracy Web Service").consumes("application/json").produces("application/json") .get("/applications/{application}/tasks/{task}/measurement") .description("Get measurement for a Task").outType(TaskMeasurement.class).param() .name("application").type(path).description("The application to measure").dataType("string") .endParam().param().name("task").type(path).description("The task to measure").dataType("string") .endParam().to("direct:taskMeasurement") .get("/applications/{application}/measurement").description("Get measurement for an Application") .outType(ApplicationMeasurement.class).param().name("application").type(path) .description("The application to measure").dataType("string").endParam() .to("bean:applicationMeasurementService?method=getApplicationMeasurement(${header.application})") .post("/applications/{application}/tasks/{task}/config").description("Set Task config") .type(TaskConfig.class).param().name("application").type(path).description("The application") .dataType("string").endParam().param().name("task").type(path).description("The task") .dataType("string").endParam().to("bean:esTaskConfig?method=setTaskConfig") .get("/applications/{application}/tasks/{task}/config").description("Get Task config") .outType(TaskConfig.class).param().name("application").type(path).description("The application") .dataType("string").endParam().param().name("task").type(path).description("The task") .dataType("string").endParam().to("bean:esTaskConfig?method=getTaskConfig") .options("/applications/{application}/tasks/{task}/config").to("direct:trash") .get("/registry").description("Get Tracy Registry containing supported environments") .to("direct:registry") .get("/capabilities") .description("Get Server capabilities (Applications/Tasks supported and associated views)") .to("direct:capabilities") .get("/applications/{application}/tasks/{task}/analysis").description("Get analysis for a Task") .outType(TaskAnalysisFake.class).param().name("application").type(path) .description("The application to analyse").dataType("string").endParam().param().name("task") .type(path).description("The task to analyse").dataType("string").endParam().param() .name("earliest").type(query).description("The earliest time (in epoch msec)").dataType("integer") .endParam().param().name("latest").type(query).description("The latest time (in epoch msec)") .dataType("integer").endParam().param().name("filter").type(query) .description("The expression to filter analysis").dataType("string").endParam().param().name("sort") .type(query).description("The fields to sort by").dataType("string").endParam().param() .name("limit").type(query).defaultValue("20") .description("The number of records to analyse, i.e. page size, default is 20").dataType("integer") .endParam().param().name("offset").type(query).description("The page number").defaultValue("1") .dataType("integer").endParam().to("direct:taskAnalysis") .delete("/tracy").description("Delete all Tracy events stored in backed") .to("direct:flushTracyRequest") .post("/tracySimulation").description("Produce Tracy for simulation purposes") .to("direct:toogleTracySimulation") .get("/demo").to("direct:getSimulation") .post("/demo").to("direct:setSimulation"); from("direct:trash").stop(); from("direct:getSimulation").routeId("getSimulation").setBody(simple("")).process(new Processor() { @Override public void process(Exchange exchange) throws Exception { Map<String, Boolean> state = new HashMap<String, Boolean>(); state.put("demo", tracySimulationEnabled); exchange.getIn().setBody(state); } }); from("direct:setSimulation").routeId("setSimulation") // .log("${body}") .process(new Processor() { @Override public void process(Exchange exchange) throws Exception { Map<String, Boolean> state = (Map<String, Boolean>) exchange.getIn().getBody(); tracySimulationEnabled = state.get("demo"); state.put("demo", tracySimulationEnabled); exchange.getIn().setBody(state); } }); from("direct:toogleTracySimulation").routeId("toogleTracySimulation").setBody(simple("")) .process(new Processor() { @Override public void process(Exchange exchange) throws Exception { String response; tracySimulationEnabled = !tracySimulationEnabled; if (tracySimulationEnabled) { response = "Tracy simulation enabled"; } else { response = "Tracy simulation disabled"; } exchange.getIn().setBody(response); } }); from("quartz://everySecond?cron=0/1+*+*+*+*+?").routeId("everySecondTimer").process(new Processor() { @Override public void process(Exchange exchange) throws Exception { Map<String, Object> headers = exchange.getIn().getHeaders(); if (tracySimulationEnabled) { headers.put(TRACY_SIMULATION_ENABLED, new Boolean(true)); } else { headers.put(TRACY_SIMULATION_ENABLED, new Boolean(false)); } } }).to("seda:flushTracy").choice().when(simple("${in.header.TRACY_SIMULATION_ENABLED} == true")) // .loop(100).to("seda:generateTracy") .to("seda:generateTracy") // To not loop .end(); from("seda:generateTracy").routeId("generateTracy").setBody(simple("")).process(new Processor() { @Override public void process(Exchange exchange) throws Exception { //TODO: Extract Tracy generation to a separate thread final String COMPONENT = "hello-tracy"; final String OUTER = "serviceEndpoint"; final String INNER = "dodgyBackend"; int status = 200; long random = new Double(Math.random() * 100).longValue() + 1; if (random <= 80) { status = 200; } // 80% 200: OK else if (random > 99) { status = 202; } // 1% 202: Accepted else if (random > 97) { status = 429; } // 1% 307: Temp redirect else if (random > 87) { status = 404; } // 10% 404: Not found else if (random > 84) { status = 401; } // 3% 401: Unauthorized else if (random > 82) { status = 400; } // 2% 404: Bad request else if (random > 81) { status = 307; } // 2% 429: Too many requests else if (random > 80) { status = 500; } // 1% 500: Internal server error Tracy.setContext(null, null, COMPONENT); Tracy.before(OUTER); Tracy.annotate("status", status); Tracy.before(INNER); // long delayInMsec = new Double(Math.random() * 2).longValue() + 2; long delayInMsec = new Double(Math.random() * 200).longValue() + 100; Thread.sleep(delayInMsec); Tracy.after(INNER); // delayInMsec = new Double(Math.random() * 2).longValue() + 2; delayInMsec = new Double(Math.random() * 10).longValue() + 10; Thread.sleep(delayInMsec); Tracy.after(OUTER); exchange.getIn().setBody(Tracy.getEventsAsJson()); Tracy.clearContext(); } }).to("seda:ingestTracy"); from("direct:flushTracyRequest").routeId("flushTracyRequest").process(new Processor() { @Override public void process(Exchange exchange) throws Exception { flushTracy = true; } }).setBody(simple("Flushed all Tracy events")).log("Flush request accepted"); from("seda:flushTracy").routeId("flushTracy") // .log("Flush request processing started") .process(new Processor() { @Override public void process(Exchange exchange) throws Exception { Map<String, Object> headers = exchange.getIn().getHeaders(); if (flushTracy) { headers.clear(); headers.put(FLUSH_TRACY, new Boolean(true)); flushTracy = false; } else { headers.clear(); headers.put(FLUSH_TRACY, new Boolean(false)); } exchange.getIn().setBody(""); } }).setHeader(Exchange.HTTP_METHOD, constant(org.apache.camel.component.http4.HttpMethods.DELETE)) // .log("Flush request ready to be sent") .choice().when(simple("${in.header.FLUSH_TRACY} == true")) //TODO: Hanle 404 status (nothing to delete) gracefully .to("http4://localhost:9200/tracy-*/tracy") //TODO: Investigate why Camel ES Delete is not working // .setHeader(ElasticsearchConstants.PARAM_INDEX_NAME, simple("tracy-hello-tracy-*")) // .setHeader(ElasticsearchConstants.PARAM_INDEX_TYPE, simple("tracy")) // .to("elasticsearch://local?operation=DELETE"); .log("Flush request sent").end(); from("seda:ingestTracy").routeId("ingestTracy") //TODO: If tracySegment instead of tracyFrame, split into Tracy frames (not required for MVC) .split(body()) // .setHeader(ElasticsearchConstants.PARAM_INDEX_NAME, "tracy-" + simple("${body[component]}") .process(new Processor() { @Override public void process(Exchange exchange) throws Exception { ObjectMapper m = new ObjectMapper(); JsonNode rootNode = m.readTree((String) exchange.getIn().getBody()); DateTime dt = new DateTime(rootNode.path("msecBefore").asLong(), DateTimeZone.UTC); String esTimestamp = dt.toString("yyyy-MM-dd'T'HH:mm:ss.SSS"); ((ObjectNode) rootNode).put("@timestamp", esTimestamp); StringBuilder index = new StringBuilder(); DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy.MM.dd"); String dateString = fmt.print(dt); index.append("tracy-").append(rootNode.path("component").textValue()).append("-") .append(dateString); exchange.getIn().setHeader(ElasticsearchConstants.PARAM_INDEX_NAME, index.toString()); exchange.getIn().setHeader(ElasticsearchConstants.PARAM_INDEX_TYPE, "tracy"); String indexId = rootNode.path("taskId").textValue() + "_" + rootNode.path("optId").textValue(); exchange.getIn().setHeader(ElasticsearchConstants.PARAM_INDEX_ID, indexId); exchange.getIn().setBody(m.writer().writeValueAsString(rootNode)); } }) // .log("${body}") // .log("${headers}") .to("elasticsearch://local?operation=INDEX"); from("direct:registry").routeId("registry").process(new Processor() { @Override public void process(Exchange exchange) throws Exception { ObjectMapper m = new ObjectMapper(); Map<String, Object> registry = m.readValue( "{\"environments\":[{\"name\":\"Local1\",\"servers\":[{\"url\":\"http://localhost:8080/tws/v1\"}]},{\"name\":\"Local2\",\"servers\":[{\"url\":\"http://localhost:8080/tws/v1\"}]}]}", Map.class); exchange.getIn().setBody(registry); } }); from("direct:capabilities").routeId("capabilities").process(new Processor() { @Override public void process(Exchange exchange) throws Exception { ObjectMapper m = new ObjectMapper(); Map<String, Object> capabilities = m.readValue( "{\"capabilities\":{\"applications\":[{\"name\":\"appX\",\"views\":[{\"label\":\"Measurement\",\"name\":\"measurement\"}],\"tasks\":[{\"name\":\"taskX1\",\"views\":[{\"label\":\"Measurement\",\"name\":\"measurement\"}]}]}]}}", Map.class); exchange.getIn().setBody(capabilities); } }); from("direct:taskMeasurement").routeId("taskMeasurement").choice() .when(simple("${in.header.application} contains 'demo-live'")) .bean("esTaskMeasurement", "getTaskMeasurement") .when(simple("${in.header.application} contains 'demo-static'")) .to("bean:taskMeasurementService?method=getTaskMeasurement(${header.application}, ${header.task})") .end(); from("direct:taskAnalysis").routeId("taskAnalysis") // .log("${headers}") .choice().when(simple("${in.header.application} contains 'demo-live'")) .bean("esTaskAnalysis", "getTaskAnalysis") .when(simple("${in.header.application} contains 'demo-static'")) .to("bean:taskAnalysisService?method=getTaskAnalysis" + "(${header.application}, ${header.task}, ${header.earliest}, ${header.latest}, ${header.filter}, ${header.sort}, ${header.limit}, ${header.offset})") .end(); }
From source file:com.appdynamics.extensions.cloudwatch.metricsmanager.MetricsManager.java
License:Apache License
/** * Create a GetMetricStatisticsRequest for a particular namespace * @param namespace Name of the Namespace * @param metricName Name of the Metric * @param statisticsType Type of Statistics (i.e. Average, Sum) * @param dimensions List of dimensions used to filter metrics * @return GetMetricStatisticsRequest/*from www .ja v a 2 s .c om*/ */ protected GetMetricStatisticsRequest createGetMetricStatisticsRequest(String namespace, String metricName, String statisticsType, List<Dimension> dimensions) { GetMetricStatisticsRequest getMetricStatisticsRequest = new GetMetricStatisticsRequest() .withStartTime(DateTime.now(DateTimeZone.UTC).minusMinutes(10).toDate()).withNamespace(namespace) .withDimensions(dimensions).withPeriod(60).withMetricName(metricName).withStatistics(statisticsType) .withEndTime(DateTime.now(DateTimeZone.UTC).minusMinutes(5).toDate()); return getMetricStatisticsRequest; }
From source file:com.appdynamics.monitors.azure.statsCollector.AzureServiceBusStatsCollector.java
License:Apache License
private Map<String, String> createValueMap(Azure azure, String namespaceName, String resourceType, Set<String> queueStats) { Map<String, String> valueMap = new HashMap<String, String>(); valueMap.put("SubscriptionId", azure.getSubscriptionId()); valueMap.put("NameSpace", namespaceName); valueMap.put("ResourceType", resourceType); String stats = Joiner.on(",").skipNulls().join(queueStats); valueMap.put("Stats", stats); DateTime dateTime = new DateTime(DateTimeZone.UTC).minusMinutes(15); String endTime = dateTime.toString(DateTimeFormat.forPattern(DATE_FORMAT)); String startTime = dateTime.minusMinutes(1).toString(DateTimeFormat.forPattern(DATE_FORMAT)); valueMap.put("StartTime", startTime); valueMap.put("EndTime", endTime); return valueMap; }
From source file:com.arpnetworking.clusteraggregator.aggregation.Aggregator.java
License:Apache License
/** * {@inheritDoc}//from ww w.ja v a 2 s .com */ @Override public void onReceive(final Object message) throws Exception { if (message instanceof AggregatedData) { processAggregationMessage((AggregatedData) message); } else if (message instanceof BucketCheck) { if (_initialized) { while (_aggBuckets.size() > 0) { final AggregationBucket bucket = _aggBuckets.getFirst(); if (bucket.getPeriodStart().plus(_period).plus(AGG_TIMEOUT).isBeforeNow()) { _aggBuckets.removeFirst(); //The units may be different coming from different machines //Need to unify them final List<AggregatedData> aggData = AggDataUnifier.unify(bucket.getAggregatedData()); final Quantity computed = _statistic.calculateAggregations(aggData); _log.info(String.format("Computed %s %s %s %s %s (%s) = %s", _cluster, _service, _metric, _statistic, _period, bucket.getPeriodStart().withZone(DateTimeZone.UTC), computed)); final AggregatedData result = _resultBuilder.setStart(bucket.getPeriodStart()) .setValue(computed).build(); _emitter.tell(result, getSelf()); _metricsListener.tell(result, getSelf()); } else { //Walk of the list is complete break; } } } } else if (message instanceof UpdateBookkeeper) { if (_resultBuilder != null) { _lifecycleTracker.tell(new AggregatorLifecycle.NotifyAggregatorStarted(_resultBuilder.build()), getSelf()); } } else { unhandled(message); } }
From source file:com.arpnetworking.logback.serialization.BaseSerializationStrategy.java
License:Apache License
/** * Start writing the Steno JSON wrapper. * * @param event Instance of <code>ILoggingEvent</code>. * @param eventName The name of the event. * @param jsonGenerator <code>JsonGenerator</code> instance. * @param objectMapper <code>ObjectMapper</code> instance. * @throws java.io.IOException If writing JSON fails. *//* w w w .j av a 2 s .c o m*/ protected void startStenoWrapper(final ILoggingEvent event, final String eventName, final JsonGenerator jsonGenerator, final ObjectMapper objectMapper) throws IOException { final StenoLevel level = StenoLevel.findByLogbackLevel(event.getLevel()); jsonGenerator.writeStartObject(); jsonGenerator.writeObjectField("time", ISO_DATE_TIME_FORMATTER.print(new DateTime(event.getTimeStamp(), DateTimeZone.UTC))); jsonGenerator.writeObjectField("name", eventName); jsonGenerator.writeObjectField("level", level.name()); }