List of usage examples for org.joda.time Instant getMillis
public long getMillis()
From source file:app.sunstreak.yourpisd.net.Student.java
License:Open Source License
public JSONObject getClassGrade(int classIndex, int termIndex) throws JSONException { String html = ""; int classId = gradeSummary[classIndex][0]; int termIndexOffset = 0; if (gradeSummary[classIndex][3] == CLASS_DISABLED_DURING_TERM) termIndexOffset = 4;/* w ww . j a va 2 s . co m*/ termIndex -= termIndexOffset; if (hasClassGrade(classIndex, termIndex + termIndexOffset)) return classGrades.get(classIndex).optJSONArray("terms").optJSONObject(termIndex); try { int termId = getTermIds(classId)[termIndex]; html = getDetailedReport(classId, termId, studentId); } catch (IOException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } // Parse the teacher name if not already there. try { classList.getJSONObject(classIndex).getString("teacher"); } catch (JSONException e) { // Teacher was not found. String[] teacher = Parser.teacher(html); try { classList.getJSONObject(classIndex).put("teacher", teacher[0]); classList.getJSONObject(classIndex).put("teacherEmail", teacher[1]); } catch (JSONException f) { e.printStackTrace(); } } JSONObject classGrade; try { classGrade = new JSONObject(classList.getJSONObject(getClassMatch()[classIndex]).toString()); JSONArray termGrades = Parser.detailedReport(html); Object[] termCategory = Parser.termCategoryGrades(html); JSONArray termCategoryGrades = (JSONArray) termCategory[0]; if ((Integer) termCategory[1] != -1) classGrade.getJSONArray("terms").getJSONObject(termIndex).put("average", termCategory[1]); classGrade.getJSONArray("terms").getJSONObject(termIndex).put("grades", termGrades); classGrade.getJSONArray("terms").getJSONObject(termIndex).put("categoryGrades", termCategoryGrades); Instant in = new Instant(); // String time = in.toString(); // System.out.println(time); classGrade.getJSONArray("terms").getJSONObject(termIndex).put("lastUpdated", in.getMillis()); // classGrade.getJSONArray("terms").getJSONObject(termIndex).put("lastUpdated", // "0"); // System.out.println("cg= " + classGrade); if (classGrades.indexOfKey(classIndex) < 0) classGrades.put(classIndex, classGrade); return classGrade.getJSONArray("terms").getJSONObject(termIndex); } catch (JSONException e) { System.err.println("Error: Class index = " + classIndex + "; JSON index = " + getClassMatch()[classIndex] + "; Term index = " + termIndex + "."); e.printStackTrace(); return null; } }
From source file:be.fedict.trust.xkms2.WSSecurityServerHandler.java
License:Open Source License
/** * Handles the inbound SOAP message. If a WS-Security header is present, * will validate body and timestamp being signed. No validation of the * embedded certificate will be done.//from ww w. j av a2 s .c om */ @SuppressWarnings("unchecked") private void handleInboundDocument(SOAPPart document, SOAPMessageContext soapMessageContext) { LOG.debug("handle inbound document"); WSSecurityEngine securityEngine = new WSSecurityEngine(); WSSConfig wssConfig = WSSConfig.getNewInstance(); securityEngine.setWssConfig(wssConfig); List<WSSecurityEngineResult> wsSecurityEngineResults; try { wsSecurityEngineResults = securityEngine.processSecurityHeader(document, null, null, null); } catch (WSSecurityException e) { LOG.debug("WS-Security error: " + e.getMessage(), e); throw createSOAPFaultException("The signature or decryption was invalid", "FailedCheck"); } LOG.debug("results: " + wsSecurityEngineResults); if (null == wsSecurityEngineResults) { LOG.debug("No WS-Security header present"); return; } LOG.debug("WS-Security header validation"); // WS-Security timestamp validation WSSecurityEngineResult timeStampActionResult = WSSecurityUtil.fetchActionResult(wsSecurityEngineResults, WSConstants.TS); if (null == timeStampActionResult) { throw new SecurityException("no WS-Security timestamp result"); } Timestamp receivedTimestamp = (Timestamp) timeStampActionResult.get(WSSecurityEngineResult.TAG_TIMESTAMP); if (null == receivedTimestamp) { throw new SecurityException("missing WS-Security timestamp"); } Date created = receivedTimestamp.getCreated(); DateTime createdDateTime = new DateTime(created); Instant createdInstant = createdDateTime.toInstant(); Instant nowInstant = new DateTime().toInstant(); long offset = Math.abs(createdInstant.getMillis() - nowInstant.getMillis()); if (offset > maxWsSecurityTimestampOffset) { LOG.debug("timestamp offset: " + offset); LOG.debug("maximum allowed offset: " + maxWsSecurityTimestampOffset); throw createSOAPFaultException("WS-Security Created Timestamp offset exceeded", "FailedCheck"); } }
From source file:com.cloudera.api.ApiUtils.java
License:Apache License
/** * Calculate the fromDate./* w ww . j a v a2 s. c o m*/ * If the fromString is not provided, then the fromDate calculated * from the toDate and the window. * @param fromString A string representation of the from date. * @param toDate The to date for this period * @param window The duration of this period * @return the Date object that corresponds to the from date */ public static Date getFromDate(String fromString, Date toDate, Duration window) { Date fromDate = null; if (fromString != null) { fromDate = newDateFromString(fromString); Preconditions.checkArgument(fromDate.getTime() < toDate.getTime(), "Invalid period specified: 'to' must be later than 'from'."); } else { Instant fromInstant = new Instant(toDate.getTime()).minus(window); fromDate = new Date(fromInstant.getMillis()); } return fromDate; }
From source file:com.dataartisans.flink.dataflow.translation.wrappers.streaming.FlinkGroupAlsoByWindowWrapper.java
License:Apache License
@Override public void processWatermark(Watermark mark) throws Exception { context.setCurrentInputWatermark(new Instant(mark.getTimestamp())); Multimap<K, TimerInternals.TimerData> timers = getTimersReadyToProcess(mark.getTimestamp()); if (!timers.isEmpty()) { for (K key : timers.keySet()) { processKeyedWorkItem(KeyedWorkItems.<K, VIN>timersWorkItem(key, timers.get(key))); }//from w w w .ja va 2 s . c o m } /** * This is to take into account the different semantics of the Watermark in Flink and * in Dataflow. To understand the reasoning behind the Dataflow semantics and its * watermark holding logic, see the documentation of * {@link WatermarkHold#addHold(ReduceFn.ProcessValueContext, boolean)} * */ long millis = Long.MAX_VALUE; for (FlinkStateInternals state : perKeyStateInternals.values()) { Instant watermarkHold = state.getWatermarkHold(); if (watermarkHold != null && watermarkHold.getMillis() < millis) { millis = watermarkHold.getMillis(); } } if (mark.getTimestamp() < millis) { millis = mark.getTimestamp(); } context.setCurrentOutputWatermark(new Instant(millis)); // Don't forget to re-emit the watermark for further operators down the line. // This is critical for jobs with multiple aggregation steps. // Imagine a job with a groupByKey() on key K1, followed by a map() that changes // the key K1 to K2, and another groupByKey() on K2. In this case, if the watermark // is not re-emitted, the second aggregation would never be triggered, and no result // will be produced. output.emitWatermark(new Watermark(millis)); }
From source file:com.dataartisans.flink.dataflow.translation.wrappers.streaming.io.UnboundedSourceWrapper.java
License:Apache License
@Override public void run(SourceContext<WindowedValue<T>> ctx) throws Exception { if (!(ctx instanceof StreamSource.ManualWatermarkContext)) { throw new RuntimeException("We assume that all sources in Dataflow are EventTimeSourceFunction. " + "Apparently " + this.name + " is not. " + "Probably you should consider writing your own Wrapper for this source."); }//from w w w. j a v a2s.c o m context = (StreamSource.ManualWatermarkContext<WindowedValue<T>>) ctx; runtime = (StreamingRuntimeContext) getRuntimeContext(); isRunning = true; reader = source.createReader(pipelineOptions, null); boolean inputAvailable = reader.start(); setNextWatermarkTimer(this.runtime); try { while (isRunning) { if (!inputAvailable && isRunning) { // wait a bit until we retry to pull more records Thread.sleep(50); inputAvailable = reader.advance(); } if (inputAvailable) { // get it and its timestamp from the source T item = reader.getCurrent(); Instant timestamp = reader.getCurrentTimestamp(); // write it to the output collector synchronized (ctx.getCheckpointLock()) { context.collectWithTimestamp(makeWindowedValue(item, timestamp), timestamp.getMillis()); } inputAvailable = reader.advance(); } } } finally { reader.close(); } }
From source file:com.dataartisans.flink.dataflow.translation.wrappers.streaming.state.StateCheckpointWriter.java
License:Apache License
public StateCheckpointWriter setTimestamp(Instant timestamp) throws IOException { validate();//from w w w . j a v a2 s.com output.writeLong(TimeUnit.MILLISECONDS.toMicros(timestamp.getMillis())); return this; }
From source file:com.datastax.driver.extras.codecs.joda.InstantCodec.java
License:Apache License
@Override public ByteBuffer serialize(Instant value, ProtocolVersion protocolVersion) { return value == null ? null : bigint().serializeNoBoxing(value.getMillis(), protocolVersion); }
From source file:com.google.cloud.dataflow.examples.complete.game.HourlyTeamScore.java
License:Apache License
/** * Run a batch pipeline to do windowed analysis of the data. *//* www. j a v a 2 s . com*/ // [START DocInclude_HTSMain] public static void main(String[] args) throws Exception { // Begin constructing a pipeline configured by commandline flags. Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class); Pipeline pipeline = Pipeline.create(options); final Instant stopMinTimestamp = new Instant(minFmt.parseMillis(options.getStopMin())); final Instant startMinTimestamp = new Instant(minFmt.parseMillis(options.getStartMin())); // Read 'gaming' events from a text file. pipeline.apply(TextIO.Read.from(options.getInput())) // Parse the incoming data. .apply(ParDo.named("ParseGameEvent").of(new ParseEventFn())) // Filter out data before and after the given times so that it is not included // in the calculations. As we collect data in batches (say, by day), the batch for the day // that we want to analyze could potentially include some late-arriving data from the previous // day. If so, we want to weed it out. Similarly, if we include data from the following day // (to scoop up late-arriving events from the day we're analyzing), we need to weed out events // that fall after the time period we want to analyze. // [START DocInclude_HTSFilters] .apply("FilterStartTime", Filter.byPredicate( (GameActionInfo gInfo) -> gInfo.getTimestamp() > startMinTimestamp.getMillis())) .apply("FilterEndTime", Filter.byPredicate( (GameActionInfo gInfo) -> gInfo.getTimestamp() < stopMinTimestamp.getMillis())) // [END DocInclude_HTSFilters] // [START DocInclude_HTSAddTsAndWindow] // Add an element timestamp based on the event log, and apply fixed windowing. .apply("AddEventTimestamps", WithTimestamps.of((GameActionInfo i) -> new Instant(i.getTimestamp()))) .apply(Window.named("FixedWindowsTeam").<GameActionInfo>into( FixedWindows.of(Duration.standardMinutes(options.getWindowDuration())))) // [END DocInclude_HTSAddTsAndWindow] // Extract and sum teamname/score pairs from the event data. .apply("ExtractTeamScore", new ExtractAndSumScore("team")).apply("WriteTeamScoreSums", new WriteWindowedToBigQuery<KV<String, Integer>>(options.getTableName(), configureWindowedTableWrite())); pipeline.run(); }
From source file:com.google.cloud.dataflow.examples.opinionanalysis.IndexerPipelineUtils.java
License:Apache License
public static String buildBigQueryProcessedUrlsQuery(IndexerPipelineOptions options) { String timeWindow = null;/*from w w w .j a v a2 s .c o m*/ if (options.getProcessedUrlHistorySec() != null) { if (options.getProcessedUrlHistorySec() != Integer.MAX_VALUE) { Instant fromTime = Instant.now(); fromTime = fromTime.minus(options.getProcessedUrlHistorySec() * 1000L); Integer fromDateId = IdConverterUtils.getDateIdFromTimestamp(fromTime.getMillis()); timeWindow = "PublicationDateId >= " + fromDateId; } } if (timeWindow != null) timeWindow = "WHERE " + timeWindow; String result = "SELECT Url, MAX(ProcessingTime) AS ProcessingTime\n" + "FROM " + options.getBigQueryDataset() + "." + WEBRESOURCE_TABLE + "\n" + timeWindow + "\n" + "GROUP BY Url"; return result; }
From source file:com.google.cloud.dataflow.examples.opinionanalysis.IndexerPipelineUtils.java
License:Apache License
public static String buildBigQueryProcessedDocsQuery(IndexerPipelineOptions options) { String timeWindow = null;/* w w w . java2 s . c om*/ if (options.getProcessedUrlHistorySec() != null) { if (options.getProcessedUrlHistorySec() != Integer.MAX_VALUE) { Instant fromTime = Instant.now(); fromTime = fromTime.minus(options.getProcessedUrlHistorySec() * 1000L); Integer fromDateId = IdConverterUtils.getDateIdFromTimestamp(fromTime.getMillis()); timeWindow = "PublicationDateId >= " + fromDateId; } } if (timeWindow != null) timeWindow = "WHERE " + timeWindow; String result = "SELECT DocumentHash, MAX(ProcessingTime) AS ProcessingTime\n" + "FROM " + options.getBigQueryDataset() + "." + DOCUMENT_TABLE + "\n" + timeWindow + "\n" + "GROUP BY DocumentHash"; return result; }