List of usage examples for org.joda.time Interval toDuration
public Duration toDuration()
From source file:org.jgrasstools.gears.io.timeseries.OmsTimeSeriesWriter.java
License:Open Source License
@Execute public void write() throws IOException { ensureOpen();// w w w . j a va 2 s.c o m Set<Entry<DateTime, double[]>> entrySet = inData.entrySet(); if (entrySet.isEmpty()) { throw new ModelsIllegalargumentException("The data to write are empty.", this); } Entry<DateTime, double[]> firstItem = entrySet.iterator().next(); int cols = firstItem.getValue().length + 1; if (columns != null) { String[] colNames = columns.split(","); for (int i = 0; i < colNames.length; i++) { colNames[i] = colNames[i].trim(); } memoryTable.setColumns(colNames); } else { String[] colNames = new String[cols]; colNames[0] = "date"; for (int i = 1; i < colNames.length; i++) { colNames[i] = "value_" + i; } memoryTable.setColumns(colNames); } if (inMetadata != null && inMetadata.size() > 0) { for (List<String> metadataRecord : inMetadata) { String metadataName = metadataRecord.get(0); for (int i = 1; i < metadataRecord.size(); i++) { memoryTable.getColumnInfo(i).put(metadataName, metadataRecord.get(i - 1)); } } } if (doDates) { // add date metadata if they are not already provided boolean hasFormat = false; boolean hasType = false; if (inMetadata != null && inMetadata.size() > 0) { hasFormat = false; for (List<String> metadataRecord : inMetadata) { if (metadataRecord.contains("Format")) { hasFormat = true; break; } } hasType = false; for (List<String> metadataRecord : inMetadata) { if (metadataRecord.contains("Type")) { hasType = true; break; } } } if (!hasFormat) { memoryTable.getColumnInfo(1).put("Format", formatterPattern); for (int i = 2; i <= cols; i++) { memoryTable.getColumnInfo(i).put("Format", ""); } } if (!hasType) { memoryTable.getColumnInfo(1).put("Type", "Date"); for (int i = 2; i <= cols; i++) { memoryTable.getColumnInfo(i).put("Type", ""); } } } for (Entry<DateTime, double[]> entry : entrySet) { Object[] valuesRow = new Object[cols]; DateTime dateTime = entry.getKey(); if (doDates) { valuesRow[0] = dateTime.toString(formatter); } else { Interval interval = new Interval(firstItem.getKey(), dateTime); long dt = interval.toDuration().getStandardSeconds(); valuesRow[0] = dt; } double[] valuesArray = entry.getValue(); for (int j = 0; j < valuesArray.length; j++) { valuesRow[j + 1] = valuesArray[j]; } memoryTable.addRow(valuesRow); } }
From source file:org.jgrasstools.hortonmachine.modules.hydrogeomorphology.peakflow.core.jeff.RealJeff.java
License:Open Source License
/** * @param rainfallMap the sorted map of rainfall values in time. <b>This has to be a sorted map.</b> *//*from w ww .j a v a2 s .c om*/ public RealJeff(HashMap<DateTime, double[]> rainfallMap) { this.rainfallMap = rainfallMap; Set<Entry<DateTime, double[]>> entrySet = rainfallMap.entrySet(); for (Entry<DateTime, double[]> entry : entrySet) { if (first == null) { first = entry.getKey(); } else if (second == null) { second = entry.getKey(); break; } } Interval interval = new Interval(first, second); rain_timestep = interval.toDuration().getStandardSeconds(); }
From source file:org.kalypso.ui.rrm.internal.calccase.CatchmentModelHelper.java
License:Open Source License
/** * This function compares the validity ranges of the generators. * * @param compareGenerator/*w w w . j a v a 2 s . c o m*/ * The compare generator. * @param generators * All generators the compare generator will be compared against. If the compare generator is contained, it * will be ignored. * @return <ul> * <li>True: The validity range of the compare generator does not overlap the validity ranges of the other * generators.</li> * <li>False: The validity range of the compare generator overlaps one validity range of the other generators. * </li> * </ul> */ private static boolean compareGeneratorValidityOverlap(final IRainfallGenerator compareGenerator, final IRainfallGenerator[] generators) { /* No generators available, to compare to. */ if (generators.length == 0) return true; /* The interval of the compare generator. */ final Interval compareInterval = new Interval(new DateTime(compareGenerator.getValidFrom()), new DateTime(compareGenerator.getValidTo())); /* Check if the interval overlaps one of the other intervals. */ for (final IRainfallGenerator generator : generators) { /* Do not compare the compare generator with itself. */ if (compareGenerator.getId().equals(generator.getId())) continue; /* The interval of the generator. */ final Interval interval = new Interval(new DateTime(generator.getValidFrom()), new DateTime(generator.getValidTo())); if (compareInterval.overlaps(interval)) { final Interval overlapInterval = compareInterval.overlap(interval); final Duration overlapDuration = overlapInterval.toDuration(); final long standardMinutes = overlapDuration.getStandardMinutes(); if (standardMinutes > 0) return false; } } return true; }
From source file:org.kuali.kpme.tklm.time.rules.shiftdifferential.service.ShiftDifferentialRuleServiceImpl.java
License:Educational Community License
@Override public void processShiftDifferentialRules(TimesheetDocument timesheetDocument, TkTimeBlockAggregate aggregate) { DateTimeZone zone = HrServiceLocator.getTimezoneService().getUserTimezoneWithFallback(); List<List<TimeBlock>> blockDays = aggregate.getDayTimeBlockList(); DateTime periodStartDateTime = timesheetDocument.getCalendarEntry().getBeginPeriodLocalDateTime() .toDateTime(zone);// w ww . ja v a2 s .c om Map<Long, Set<ShiftDifferentialRule>> jobNumberToShifts = getJobNumberToShiftRuleMap(timesheetDocument); // If there are no shift differential rules, we have an early exit. if (jobNumberToShifts.isEmpty()) { return; } // Get the last day of the previous pay period. We need this to determine // if there are hours from the previous pay period that will effect the // shift rule on the first day of the currently-being-processed pay period. // // Will be set to null if not applicable. boolean previousPayPeriodPrevDay = true; Map<Long, List<TimeBlock>> jobNumberToTimeBlocksPreviousDay = getPreviousPayPeriodLastDayJobToTimeBlockMap( timesheetDocument, jobNumberToShifts); // We are going to look at the time blocks grouped by Days. // // This is a very large outer loop. for (int pos = 0; pos < blockDays.size(); pos++) { List<TimeBlock> blocks = blockDays.get(pos); // Timeblocks for this day. if (blocks.isEmpty()) continue; // No Time blocks, no worries. DateTime currentDay = periodStartDateTime.plusDays(pos); Interval virtualDay = new Interval(currentDay, currentDay.plusHours(24)); // Builds our JobNumber to TimeBlock for Current Day List. // // Shift Differential Rules are also grouped by Job number, this // provides a quick way to do the lookup / reference. // We don't need every time block, only the ones that will be // applicable to the shift rules. Map<Long, List<TimeBlock>> jobNumberToTimeBlocks = new HashMap<Long, List<TimeBlock>>(); for (TimeBlock block : blocks) { Long jobNumber = block.getJobNumber(); if (jobNumberToShifts.containsKey(jobNumber)) { List<TimeBlock> jblist = jobNumberToTimeBlocks.get(jobNumber); if (jblist == null) { jblist = new ArrayList<TimeBlock>(); jobNumberToTimeBlocks.put(jobNumber, jblist); } jblist.add(block); } } // Large Outer Loop to look at applying the Shift Rules based on // the current JobNumber. // // This loop will handle previous day boundary time as well as the // current day. // // There is room for refactoring here! for (Map.Entry<Long, Set<ShiftDifferentialRule>> entry : jobNumberToShifts.entrySet()) { Set<ShiftDifferentialRule> shiftDifferentialRules = entry.getValue(); // Obtain and sort our previous and current time blocks. List<TimeBlock> ruleTimeBlocksPrev = null; List<TimeBlock> ruleTimeBlocksCurr = jobNumberToTimeBlocks.get(entry.getKey()); if (ruleTimeBlocksCurr != null && ruleTimeBlocksCurr.size() > 0) { if (jobNumberToTimeBlocksPreviousDay != null) ruleTimeBlocksPrev = jobNumberToTimeBlocksPreviousDay.get(entry.getKey()); if (ruleTimeBlocksPrev != null && ruleTimeBlocksPrev.size() > 0) this.sortTimeBlocksInverse(ruleTimeBlocksPrev); this.sortTimeBlocksNatural(ruleTimeBlocksCurr); } else { // Skip to next job, there is nothing for this job // on this day, and because of this we don't care // about the previous day either. continue; } for (ShiftDifferentialRule rule : shiftDifferentialRules) { Set<String> fromEarnGroup = HrServiceLocator.getEarnCodeGroupService() .getEarnCodeListForEarnCodeGroup(rule.getFromEarnGroup(), timesheetDocument .getCalendarEntry().getBeginPeriodFullDateTime().toLocalDate()); LocalTime ruleStart = new LocalTime(rule.getBeginTime(), zone); LocalTime ruleEnd = new LocalTime(rule.getEndTime(), zone); DateTime shiftEnd = ruleEnd.toDateTime(currentDay); DateTime shiftStart = ruleStart.toDateTime(currentDay); if (shiftEnd.isBefore(shiftStart) || shiftEnd.isEqual(shiftStart)) { shiftEnd = shiftEnd.plusDays(1); } Interval shiftInterval = new Interval(shiftStart, shiftEnd); // Set up buckets to handle previous days time accumulations BigDecimal hoursBeforeVirtualDay = BigDecimal.ZERO; // Check current day first block to see if start time gap from virtual day start is greater than max gap // if so, we can skip the previous day checks. TimeBlock firstBlockOfCurrentDay = null; for (TimeBlock b : ruleTimeBlocksCurr) { if (timeBlockHasEarnCode(fromEarnGroup, b)) { firstBlockOfCurrentDay = b; break; } } // Previous Day :: We have prior block container of nonzero size, and the previous day is active. Interval previousDayShiftInterval = new Interval(shiftStart.minusDays(1), shiftEnd.minusDays(1)); // Blank initialization pointer for picking which interval to pass to applyPremium() Interval evalInterval = null; if (ruleTimeBlocksPrev != null && ruleTimeBlocksPrev.size() > 0 && dayIsRuleActive(currentDay.minusDays(1), rule)) { // Simple heuristic to see if we even need to worry about // the Shift rule for this set of data. if (shiftEnd.isAfter(virtualDay.getEnd())) { // Compare first block of previous day with first block of current day for max gaptitude. TimeBlock firstBlockOfPreviousDay = null; for (TimeBlock b : ruleTimeBlocksPrev) { if (timeBlockHasEarnCode(fromEarnGroup, b)) { firstBlockOfPreviousDay = b; break; } } // Only if we actually have at least one block. // Adding Assumption: We must have both a valid current and previous block. Max Gap can not be more than a virtual day. // If this assumption does not hold, additional logic will be needed to iteratively go back in time to figure out which // blocks are valid. if ((firstBlockOfPreviousDay != null) && (firstBlockOfCurrentDay != null)) { Interval previousBlockInterval = new Interval( firstBlockOfPreviousDay.getEndDateTime().withZone(zone), firstBlockOfCurrentDay.getBeginDateTime().withZone(zone)); Duration blockGapDuration = previousBlockInterval.toDuration(); BigDecimal bgdHours = TKUtils.convertMillisToHours(blockGapDuration.getMillis()); // if maxGap is 0, ignore gaps and assign shift to time blocks within the hours if (rule.getMaxGap().compareTo(BigDecimal.ZERO) == 0 || bgdHours.compareTo(rule.getMaxGap()) <= 0) { // If we are here, we know we have at least one valid time block to pull some hours forward from. // These are inversely sorted. for (int i = 0; i < ruleTimeBlocksPrev.size(); i++) { TimeBlock b = ruleTimeBlocksPrev.get(i); if (timeBlockHasEarnCode(fromEarnGroup, b)) { Interval blockInterval = new Interval( b.getBeginDateTime().withZone(zone), b.getEndDateTime().withZone(zone)); // Calculate Block Gap, the duration between clock outs and clock ins of adjacent time blocks. if (previousBlockInterval != null) { blockGapDuration = new Duration(b.getEndDateTime().withZone(zone), previousBlockInterval.getStart()); bgdHours = TKUtils .convertMillisToHours(blockGapDuration.getMillis()); } // Check Gap, if good, sum hours, if maxGap is 0, ignore gaps if (rule.getMaxGap().compareTo(BigDecimal.ZERO) == 0 || bgdHours.compareTo(rule.getMaxGap()) <= 0) { // Calculate Overlap and add it to hours before virtual day bucket. if (blockInterval.overlaps(previousDayShiftInterval)) { BigDecimal hrs = TKUtils.convertMillisToHours(blockInterval .overlap(previousDayShiftInterval).toDurationMillis()); hoursBeforeVirtualDay = hoursBeforeVirtualDay.add(hrs); } } else { // Time blocks are reverse sorted, we can jump out as soon as the max gap is exceeded. break; } previousBlockInterval = blockInterval; } } } else { // DO NOTHING! } } } } BigDecimal hoursToApply = BigDecimal.ZERO; BigDecimal hoursToApplyPrevious = BigDecimal.ZERO; // If the hours before virtual day are less than or equal to // min hours, we have already applied the time, so we don't // set hoursToApplyPrevious if (hoursBeforeVirtualDay.compareTo(rule.getMinHours()) <= 0) { // we need to apply these hours. hoursToApplyPrevious = hoursBeforeVirtualDay; } // Current Day TimeBlock previous = null; // Previous Time Block List<TimeBlock> accumulatedBlocks = new ArrayList<TimeBlock>(); // TimeBlocks we MAY or MAY NOT apply Shift Premium to. List<Interval> accumulatedBlockIntervals = new ArrayList<Interval>(); // To save recompute time when checking timeblocks for application we store them as we create them. // Iterate over sorted list, checking time boundaries vs Shift Intervals. long accumulatedMillis = TKUtils.convertHoursToMillis(hoursBeforeVirtualDay); boolean previousDayOnly = false; // IF the rule is not active today, but was on the previous day, we need to still look at time blocks. if (!dayIsRuleActive(currentDay, rule)) { if (dayIsRuleActive(currentDay.minusDays(1), rule)) { previousDayOnly = true; } else { // Nothing to see here, move to next rule. continue; } } /* * We will touch each time block and accumulate time blocks that are applicable to * the current rule we are on. */ // These blocks are only used for detail application // We don't want to pass along the previous pay period, // because we don't want to modify the time blocks on that // period. If null is passed, time will be placed on the // first block of the first period if the previous period // block had influence. List<TimeBlock> previousBlocksFiltered = (previousPayPeriodPrevDay) ? null : filterBlocksByApplicableEarnGroup(fromEarnGroup, ruleTimeBlocksPrev); for (TimeBlock current : ruleTimeBlocksCurr) { if (!timeBlockHasEarnCode(fromEarnGroup, current)) { // TODO: WorkSchedule considerations somewhere in here? continue; } Interval blockInterval = new Interval(current.getBeginDateTime().withZone(zone), current.getEndDateTime().withZone(zone)); // Check both Intervals, since the time blocks could still // be applicable to the previous day. These two intervals should // not have any overlap. if (previousDayShiftInterval.overlaps(shiftInterval)) { LOG.error("Interval of greater than 24 hours created in the rules processing."); return; // throw new RuntimeException("Interval of greater than 24 hours created in the rules processing."); } // This block of code handles cases where you have time // that spills to multiple days and a shift rule that // has a valid window on multiple consecutive days. Time // must be applied with the correct shift interval. Interval overlap = previousDayShiftInterval.overlap(blockInterval); evalInterval = previousDayShiftInterval; if (overlap == null) { if (hoursToApplyPrevious.compareTo(BigDecimal.ZERO) > 0) { // we have hours from previous day, and the shift // window is going to move to current day. // Need to apply this now, and move window forward // for current time block. BigDecimal accumHours = TKUtils.convertMillisToHours(accumulatedMillis); this.applyAccumulatedWrapper(accumHours, evalInterval, accumulatedBlockIntervals, accumulatedBlocks, previousBlocksFiltered, hoursToApplyPrevious, hoursToApply, rule); accumulatedMillis = 0L; // reset accumulated hours.. hoursToApply = BigDecimal.ZERO; hoursToApplyPrevious = BigDecimal.ZERO; } // Because of our position in the loop, when we are at this point, // we know we've passed any previous day shift intervals, so we can // determine if we should skip the current day based on the boolean // we set earlier. if (previousDayOnly) { continue; } overlap = shiftInterval.overlap(blockInterval); evalInterval = shiftInterval; } // Time bucketing and application as normal: // if (overlap != null) { // There IS overlap. if (previous != null) { // only check max gap if max gap of rule is not 0 if (rule.getMaxGap().compareTo(BigDecimal.ZERO) != 0 && exceedsMaxGap(previous, current, rule.getMaxGap())) { BigDecimal accumHours = TKUtils.convertMillisToHours(accumulatedMillis); this.applyAccumulatedWrapper(accumHours, evalInterval, accumulatedBlockIntervals, accumulatedBlocks, previousBlocksFiltered, hoursToApplyPrevious, hoursToApply, rule); accumulatedMillis = 0L; // reset accumulated hours.. hoursToApply = BigDecimal.ZERO; hoursToApplyPrevious = BigDecimal.ZERO; } else { long millis = overlap.toDurationMillis(); accumulatedMillis += millis; hoursToApply = hoursToApply.add(TKUtils.convertMillisToHours(millis)); } } else { // Overlap shift at first time block. long millis = overlap.toDurationMillis(); accumulatedMillis += millis; hoursToApply = hoursToApply.add(TKUtils.convertMillisToHours(millis)); } accumulatedBlocks.add(current); accumulatedBlockIntervals.add(blockInterval); previous = current; // current can still apply to next. } else { // No Overlap / Outside of Rule if (previous != null) { BigDecimal accumHours = TKUtils.convertMillisToHours(accumulatedMillis); this.applyAccumulatedWrapper(accumHours, evalInterval, accumulatedBlockIntervals, accumulatedBlocks, previousBlocksFiltered, hoursToApplyPrevious, hoursToApply, rule); accumulatedMillis = 0L; // reset accumulated hours.. hoursToApply = BigDecimal.ZERO; hoursToApplyPrevious = BigDecimal.ZERO; } } } // All time blocks are iterated over, check for remainders. // Check containers for time, and apply if needed. BigDecimal accumHours = TKUtils.convertMillisToHours(accumulatedMillis); this.applyAccumulatedWrapper(accumHours, evalInterval, accumulatedBlockIntervals, accumulatedBlocks, previousBlocksFiltered, hoursToApplyPrevious, hoursToApply, rule); } } // Keep track of previous as we move day by day. jobNumberToTimeBlocksPreviousDay = jobNumberToTimeBlocks; previousPayPeriodPrevDay = false; } }
From source file:org.sleuthkit.autopsy.timeline.db.EventDB.java
License:Open Source License
/** * merge the events in the given list if they are within the same period * General algorithm is as follows://from ww w . j av a 2 s .c o m * * 1) sort them into a map from (type, description)-> List<aggevent> * 2) for each key in map, merge the events and accumulate them in a list to * return * * @param timeUnitLength * @param preMergedEvents * * @return */ static private List<EventStripe> mergeClustersToStripes(Period timeUnitLength, List<EventCluster> preMergedEvents) { //effectively map from type to (map from description to events) Map<EventType, SetMultimap<String, EventCluster>> typeMap = new HashMap<>(); for (EventCluster aggregateEvent : preMergedEvents) { typeMap.computeIfAbsent(aggregateEvent.getEventType(), eventType -> HashMultimap.create()) .put(aggregateEvent.getDescription(), aggregateEvent); } //result list to return ArrayList<EventCluster> aggEvents = new ArrayList<>(); //For each (type, description) key, merge agg events for (SetMultimap<String, EventCluster> descrMap : typeMap.values()) { //for each description ... for (String descr : descrMap.keySet()) { //run through the sorted events, merging together adjacent events Iterator<EventCluster> iterator = descrMap.get(descr).stream() .sorted(Comparator.comparing(event -> event.getSpan().getStartMillis())).iterator(); EventCluster current = iterator.next(); while (iterator.hasNext()) { EventCluster next = iterator.next(); Interval gap = current.getSpan().gap(next.getSpan()); //if they overlap or gap is less one quarter timeUnitLength //TODO: 1/4 factor is arbitrary. review! -jm if (gap == null || gap.toDuration() .getMillis() <= timeUnitLength.toDurationFrom(gap.getStart()).getMillis() / 4) { //merge them current = EventCluster.merge(current, next); } else { //done merging into current, set next as new current aggEvents.add(current); current = next; } } aggEvents.add(current); } } //merge clusters to stripes Map<ImmutablePair<EventType, String>, EventStripe> stripeDescMap = new HashMap<>(); for (EventCluster eventCluster : aggEvents) { stripeDescMap.merge(ImmutablePair.of(eventCluster.getEventType(), eventCluster.getDescription()), new EventStripe(eventCluster), EventStripe::merge); } return stripeDescMap.values().stream().sorted(Comparator.comparing(EventStripe::getStartMillis)) .collect(Collectors.toList()); }
From source file:org.sleuthkit.autopsy.timeline.events.db.EventDB.java
License:Open Source License
/** * //TODO: update javadoc //TODO: split this into helper methods * * get a list of {@link AggregateEvent}s. * * General algorithm is as follows://w w w. j av a 2 s .c o m * * - get all aggregate events, via one db query. * - sort them into a map from (type, description)-> aggevent * - for each key in map, merge the events and accumulate them in a list * to return * * * @param timeRange the Interval within in which all returned aggregate * events will be. * @param filter only events that pass the filter will be included in * aggregates events returned * @param zoomLevel only events of this level will be included * @param lod description level of detail to use when grouping events * * * @return a list of aggregate events within the given timerange, that pass * the supplied filter, aggregated according to the given event type and * description zoom levels */ private List<AggregateEvent> getAggregatedEvents(Interval timeRange, Filter filter, EventTypeZoomLevel zoomLevel, DescriptionLOD lod) { String descriptionColumn = getDescriptionColumn(lod); final boolean useSubTypes = (zoomLevel.equals(EventTypeZoomLevel.SUB_TYPE)); //get some info about the time range requested RangeDivisionInfo rangeInfo = RangeDivisionInfo.getRangeDivisionInfo(timeRange); //use 'rounded out' range long start = timeRange.getStartMillis() / 1000;//.getLowerBound(); long end = timeRange.getEndMillis() / 1000;//Millis();//rangeInfo.getUpperBound(); if (Objects.equals(start, end)) { end++; } //get a sqlite srtftime format string String strfTimeFormat = getStrfTimeFormat(rangeInfo.getPeriodSize()); //effectively map from type to (map from description to events) Map<EventType, SetMultimap<String, AggregateEvent>> typeMap = new HashMap<>(); //get all agregate events in this time unit dbReadLock(); String query = "select strftime('" + strfTimeFormat + "',time , 'unixepoch'" + (TimeLineController.getTimeZone().get().equals(TimeZone.getDefault()) ? ", 'localtime'" : "") + ") as interval, group_concat(event_id) as event_ids, Min(time), Max(time), " + descriptionColumn + ", " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN) // NON-NLS + " from events where time >= " + start + " and time < " + end + " and " + getSQLWhere(filter) // NON-NLS + " group by interval, " + (useSubTypes ? SUB_TYPE_COLUMN : BASE_TYPE_COLUMN) + " , " + descriptionColumn // NON-NLS + " order by Min(time)"; // NON-NLS //System.out.println(query); ResultSet rs = null; try (Statement stmt = con.createStatement(); // scoop up requested events in groups organized by interval, type, and desription ) { Stopwatch stopwatch = new Stopwatch(); stopwatch.start(); rs = stmt.executeQuery(query); stopwatch.stop(); //System.out.println(stopwatch.elapsedMillis() / 1000.0 + " seconds"); while (rs.next()) { EventType type = useSubTypes ? RootEventType.allTypes.get(rs.getInt(SUB_TYPE_COLUMN)) : BaseTypes.values()[rs.getInt(BASE_TYPE_COLUMN)]; AggregateEvent aggregateEvent = new AggregateEvent( new Interval(rs.getLong("Min(time)") * 1000, rs.getLong("Max(time)") * 1000, TimeLineController.getJodaTimeZone()), // NON-NLS type, Arrays.asList(rs.getString("event_ids").split(",")), // NON-NLS rs.getString(descriptionColumn), lod); //put events in map from type/descrition -> event SetMultimap<String, AggregateEvent> descrMap = typeMap.get(type); if (descrMap == null) { descrMap = HashMultimap.<String, AggregateEvent>create(); typeMap.put(type, descrMap); } descrMap.put(aggregateEvent.getDescription(), aggregateEvent); } } catch (SQLException ex) { Exceptions.printStackTrace(ex); } finally { try { rs.close(); } catch (SQLException ex) { Exceptions.printStackTrace(ex); } dbReadUnlock(); } //result list to return ArrayList<AggregateEvent> aggEvents = new ArrayList<>(); //save this for use when comparing gap size Period timeUnitLength = rangeInfo.getPeriodSize().getPeriod(); //For each (type, description) key, merge agg events for (SetMultimap<String, AggregateEvent> descrMap : typeMap.values()) { for (String descr : descrMap.keySet()) { //run through the sorted events, merging together adjacent events Iterator<AggregateEvent> iterator = descrMap.get(descr).stream() .sorted((AggregateEvent o1, AggregateEvent o2) -> Long .compare(o1.getSpan().getStartMillis(), o2.getSpan().getStartMillis())) .iterator(); AggregateEvent current = iterator.next(); while (iterator.hasNext()) { AggregateEvent next = iterator.next(); Interval gap = current.getSpan().gap(next.getSpan()); //if they overlap or gap is less one quarter timeUnitLength //TODO: 1/4 factor is arbitrary. review! -jm if (gap == null || gap.toDuration() .getMillis() <= timeUnitLength.toDurationFrom(gap.getStart()).getMillis() / 4) { //merge them current = AggregateEvent.merge(current, next); } else { //done merging into current, set next as new current aggEvents.add(current); current = next; } } aggEvents.add(current); } } //at this point we should have a list of aggregate events. //one per type/description spanning consecutive time units as determined in rangeInfo return aggEvents; }
From source file:org.squashtest.tm.service.internal.requirement.VerifiedRequirementsManagerServiceImpl.java
License:Open Source License
@SuppressWarnings("unchecked") private Map<ExecutionStatus, Long> findResultsForSteppedCoverageWithExecution( List<RequirementVersionCoverage> stepedCoverage, List<Long> mainVersionTCWithItpiIds, Map<Long, Long> nbSimpleCoverageByTestCase) { List<Long> testStepsIds = new ArrayList<>(); Map<ExecutionStatus, Long> result = new EnumMap<>(ExecutionStatus.class); //First we compute all testStep id in a list, to allow multiple occurrence of the same step. //Witch is not a good practice but is allowed by the app so we must take this possibility in account for calculations. for (RequirementVersionCoverage cov : stepedCoverage) { Long tcId = cov.getVerifyingTestCase().getId(); if (mainVersionTCWithItpiIds.contains(tcId)) { for (ActionTestStep step : cov.getVerifyingSteps()) { testStepsIds.add(step.getId()); }/*from ww w . jav a2s. c om*/ } } //now retrieve a list of exec steps MultiMap executionsStatus = executionStepDao.findStepExecutionsStatus(mainVersionTCWithItpiIds, testStepsIds); for (Long testStepsId : testStepsIds) { List<ExecutionStep> executionSteps = (List<ExecutionStep>) executionsStatus.get(testStepsId); for (ExecutionStep executionStep : executionSteps) { //Here come horrible code to detect if ITPI was fast passed AFTER execution. //We have no attribute in model to help us, and no time to develop a proper solution. //So we'll use execution date on itpi and exec. If the delta between two date is superior to 2 seconds, //we consider it's a fast pass Execution execution = executionStep.getExecution(); IterationTestPlanItem itpi = execution.getTestPlan(); Date itpiDateLastExecutedOn = itpi.getLastExecutedOn(); Date execDateLastExecutedOn = execution.getLastExecutedOn(); ExecutionStatus status = ExecutionStatus.READY; //if execution dates are null, the execution was only READY, so we don't compare dates to avoid npe if (itpiDateLastExecutedOn != null && execDateLastExecutedOn != null) { DateTime itpiLastExecutedOn = new DateTime(itpi.getLastExecutedOn().getTime()); DateTime execLastExecutedOn = new DateTime(execution.getLastExecutedOn().getTime()); Interval interval = new Interval(execLastExecutedOn, itpiLastExecutedOn); boolean fastPass = interval.toDuration().isLongerThan(new Duration(2000L)); //If we have a fast path use it for step status status = fastPass ? itpi.getExecutionStatus() : executionStep.getExecutionStatus(); } Long memo = result.get(status); if (memo == null) { result.put(status, 1L); } else { result.put(status, memo + 1); } } } return result; }
From source file:test.stress.StressManager.java
License:Apache License
public void handleCmd(String cmd, String line, PrintWriter out) { if (cmd.equals("status")) { out.print("StressManager:\r\n"); DateTime startTime, nowTime;/* ww w. j av a 2 s .c om*/ startTime = new DateTime(startupTime); nowTime = new DateTime(System.currentTimeMillis()); Interval interval = new Interval(startTime, nowTime); Duration duration = interval.toDuration(); out.print("start time: " + new Date(startupTime).toString() + "\r\n"); out.print("uptime: " + duration + "\r\n"); out.print("minUsers: " + minUsers + "\r\n"); out.print("maxUsers: " + maxUsers + "\r\n"); if (maxMinutes != Integer.MAX_VALUE) out.print("maxMinutes: " + maxMinutes + "\r\n"); } if (cmd.equals("uptime")) { DateTime startTime, nowTime; startTime = new DateTime(startupTime); nowTime = new DateTime(System.currentTimeMillis()); Interval interval = new Interval(startTime, nowTime); Duration duration = interval.toDuration(); out.print("start time: " + startTime.toString() + "\r\n"); out.print("uptime: " + duration + "\r\n"); } if (cmd.equals("help")) { out.print("StressManager:\r\n"); out.print("shutdown\r\n"); out.print("uptime\r\n"); out.print("minUsers <users>\r\n"); out.print("maxUsers <users> \r\n"); out.print("add <test_file>\r\n"); out.print("xcc_logging <new_level>\r\n"); out.print("get_property <prop_name>\r\n"); out.print("set_property <prop_name> <value>\r\n"); out.print("get_properties\r\n"); } else if (cmd.equals("shutdown")) { out.print("StressManager shutting down\r\n"); minUsers = 0; System.out.println("handling shutdown request"); logMessage("handling shutdown request"); threadMgr.stopAllTests(); doShutdown = true; } else if (cmd.equals("minusers")) { StringTokenizer tokens = new StringTokenizer(line); // get past the actual command String s = tokens.nextToken(); s = tokens.nextToken(); int i = Integer.parseInt(s); if (i > 0) minUsers = i; if (telemetryServer != null) telemetryServer.sendTelemetry("StressManager.minusers", Integer.toString(minUsers)); } else if (cmd.equals("maxusers")) { StringTokenizer tokens = new StringTokenizer(line); // get past the actual command String s = tokens.nextToken(); s = tokens.nextToken(); int i = Integer.parseInt(s); if (i > 0) maxUsers = i; if (telemetryServer != null) telemetryServer.sendTelemetry("StressManager.maxusers", Integer.toString(maxUsers)); } else if (cmd.equals("xcc_logging")) { StringTokenizer tokens = new StringTokenizer(line); // get past the actual command String s = tokens.nextToken(); s = tokens.nextToken(); Logger logger = Logger.getLogger("com.marklogic.xcc"); System.out.println("xcc logger name: " + logger.getName()); System.out.println("xcc logger level: " + logger.getLevel()); System.out.println("xcc logger handles:"); Handler[] handlers = logger.getHandlers(); System.out.println("handle count: " + handlers.length); int ii; for (ii = 0; ii < handlers.length; ii++) { Handler handler = handlers[ii]; System.out.println("handler " + ii + ": " + handler.toString() + ", level " + handler.getLevel()); } // initialize it to what it was Level newLevel = logger.getLevel(); if (s.equalsIgnoreCase("OFF")) { newLevel = Level.OFF; } else if (s.equalsIgnoreCase("SEVERE")) { newLevel = Level.SEVERE; } else if (s.equalsIgnoreCase("WARNING")) { newLevel = Level.WARNING; } else if (s.equalsIgnoreCase("INFO")) { newLevel = Level.INFO; } else if (s.equalsIgnoreCase("CONFIG")) { newLevel = Level.CONFIG; } else if (s.equalsIgnoreCase("FINE")) { newLevel = Level.FINE; } else if (s.equalsIgnoreCase("FINER")) { newLevel = Level.FINER; } else if (s.equalsIgnoreCase("FINEST")) { newLevel = Level.FINEST; } else if (s.equalsIgnoreCase("ALL")) { newLevel = Level.ALL; } logger.setLevel(newLevel); // now walk through all the handlers and set them to the same level for (ii = 0; ii < handlers.length; ii++) { Handler handler = handlers[ii]; handler.setLevel(newLevel); System.out.println("handler " + ii + ": " + handler.toString() + ", level " + handler.getLevel()); // let the command client know we've handled this out.println("handler " + ii + ": " + handler.toString() + ", level " + handler.getLevel()); } if (telemetryServer != null) telemetryServer.sendTelemetry("StressManager.xcc.logging_level", s); } else if (cmd.equals("test")) { StringTokenizer tokens = new StringTokenizer(line); String s = tokens.nextToken(); // trim off the initial command String subcmd = null; if (tokens.hasMoreTokens()) subcmd = tokens.nextToken().toLowerCase(); if (subcmd != null && subcmd.equals("start")) { String t = null; if (tokens.hasMoreTokens()) t = tokens.nextToken(); out.print("starting test " + t + "\r\n"); if (t != null) { t = t.replaceAll("QA_HOME", System.getProperty("QA_HOME")); File f = new File(t); if (!f.exists()) { out.print("invalid filename: " + t + "\r\n"); } else { threadMgr.addCmdUser(connectionData, t, 0); } } } if (subcmd != null && subcmd.equals("add")) { String t = null; if (tokens.hasMoreTokens()) t = tokens.nextToken(); out.print("adding test " + t + "\r\n"); if (t != null) { t = t.replaceAll("QA_HOME", System.getProperty("QA_HOME")); File f = new File(t); if (!f.exists()) { out.print("invalid filename: " + t + "\r\n"); } else { // threadMgr.addCmdUser(connectionData, t, 0); addTestLocation(t); } } } } else if (cmd.equals("set_property")) { StringTokenizer tokens = new StringTokenizer(line); // get past the actual command String s = tokens.nextToken(); String prop = tokens.nextToken(); String value = null; if (tokens.hasMoreTokens()) value = tokens.nextToken(); StressTestProperties properties = StressTestProperties.getStressTestProperties(); if (value != null) properties.setProperty(prop, value); else out.print("no value provided\r\n"); } else if (cmd.equals("get_property")) { StringTokenizer tokens = new StringTokenizer(line); // get past the actual command String s = tokens.nextToken(); String prop = tokens.nextToken(); StressTestProperties properties = StressTestProperties.getStressTestProperties(); String value = properties.getProperty(prop); if (value == null) out.print("not set\r\n"); else out.print(value + "\r\n"); } else if (cmd.equals("get_properties")) { StringTokenizer tokens = new StringTokenizer(line); // get past the actual command String s = tokens.nextToken(); StressTestProperties properties = StressTestProperties.getStressTestProperties(); Enumeration propNames = properties.getPropertyNames(); while (propNames.hasMoreElements()) { String prop = (String) propNames.nextElement(); String value = properties.getProperty(prop); out.print(prop + ": " + value + "\r\n"); } } }
From source file:TVShowTimelineMaker.timeConstraints.ContainsConstraint.java
@Override public boolean consistentWithConstraint(T inFirstPlacement, S inSecondPlacement) { if (this.getContainsType().equals(ContainsType.CONTAINS)) { if (this.mContainsEvent instanceof OncePeriodEvent) { OncePeriodEvent.OncePeriodEventPlacement firstOncePeriodEventPlacement = (OncePeriodEvent.OncePeriodEventPlacement) inFirstPlacement; DateTime firstStartDate = firstOncePeriodEventPlacement.startDay.withHourOfDay(1); DateTime firstEndDate = firstOncePeriodEventPlacement.endDay.withHourOfDay(23); if (this.mContainedEvent instanceof OnceDayEvent) { OnceDayEvent.OnceDayEventPlacement secondOnceDayEventPlacement = (OnceDayEvent.OnceDayEventPlacement) inSecondPlacement; return firstStartDate.isBefore(secondOnceDayEventPlacement.day) && firstEndDate.isAfter(secondOnceDayEventPlacement.day); } else if (this.mContainedEvent instanceof YearlyDayEvent) { YearlyDayEvent.YearlyDayEventPlacement secondYearlyDayEventPlacement = (YearlyDayEvent.YearlyDayEventPlacement) inSecondPlacement; long lengh = new org.joda.time.Interval(firstStartDate, firstEndDate).toDuration() .getStandardDays(); if (lengh > 366) { return true; } else { DateTime lowDateTime = secondYearlyDayEventPlacement.day.toDateTime() .withYear(firstStartDate.getYear()); DateTime highDateTime = secondYearlyDayEventPlacement.day.toDateTime() .withYear(firstEndDate.getYear()); return (firstStartDate.isBefore(lowDateTime) && firstEndDate.isAfter(lowDateTime)) || (firstStartDate.isBefore(highDateTime) && firstEndDate.isAfter(highDateTime)); }//from ww w . ja v a 2s.c om } else if (this.mContainedEvent instanceof OncePeriodEvent) { OncePeriodEvent.OncePeriodEventPlacement secondOncePeriodEventPlacement = (OncePeriodEvent.OncePeriodEventPlacement) inSecondPlacement; return firstStartDate.isBefore(secondOncePeriodEventPlacement.startDay) && firstEndDate.isAfter(secondOncePeriodEventPlacement.endDay); } else if (this.mContainedEvent instanceof YearlyPeriodEvent) { YearlyPeriodEvent.YearlyPeriodEventPlacement secondSeasonPlacement = (YearlyPeriodEvent.YearlyPeriodEventPlacement) inSecondPlacement; long lengh = new org.joda.time.Interval(firstStartDate, firstEndDate).toDuration() .getStandardDays(); if (lengh > 366) { return true; } else { DateTime lowStartDateTime = secondSeasonPlacement.startDay.toDateTime() .withYear(firstStartDate.getYear()); DateTime lowEndDateTime = secondSeasonPlacement.endDay.toDateTime() .withYear(firstStartDate.getYear()); DateTime highStartDateTime = secondSeasonPlacement.startDay.toDateTime() .withYear(firstEndDate.getYear()); DateTime highEndDateTime = secondSeasonPlacement.endDay.toDateTime() .withYear(firstEndDate.getYear()); return (firstStartDate.isBefore(lowStartDateTime) && firstEndDate.isAfter(lowEndDateTime)) || (firstStartDate.isBefore(highStartDateTime) && firstEndDate.isAfter(highEndDateTime)); } } else { return true; } } else if (this.mContainsEvent instanceof YearlyPeriodEvent) { YearlyPeriodEvent.YearlyPeriodEventPlacement firstSeasonPlacement = (YearlyPeriodEvent.YearlyPeriodEventPlacement) inFirstPlacement; if (this.mContainedEvent instanceof OnceDayEvent) { OnceDayEvent.OnceDayEventPlacement secondOnceDayEventPlacement = (OnceDayEvent.OnceDayEventPlacement) inSecondPlacement; DayOfYear tempDay = DayOfYear.fromDateTime(secondOnceDayEventPlacement.day); return firstSeasonPlacement.startDay.isEqualOrBefore(tempDay) && firstSeasonPlacement.endDay.isEqualOrAfter(tempDay); } else if (this.mContainedEvent instanceof YearlyDayEvent) { YearlyDayEvent.YearlyDayEventPlacement secondYearlyDayEventPlacement = (YearlyDayEvent.YearlyDayEventPlacement) inSecondPlacement; return firstSeasonPlacement.startDay.isEqualOrBefore(secondYearlyDayEventPlacement.day) && firstSeasonPlacement.endDay.isEqualOrAfter(secondYearlyDayEventPlacement.day); } else if (this.mContainedEvent instanceof OncePeriodEvent) { OncePeriodEvent.OncePeriodEventPlacement secondOncePeriodEventPlacement = (OncePeriodEvent.OncePeriodEventPlacement) inSecondPlacement; long lengh = new org.joda.time.Interval(secondOncePeriodEventPlacement.startDay, secondOncePeriodEventPlacement.endDay).toDuration().getStandardDays(); if (lengh > 366) { return true; } else { DayOfYear tempStartDay = DayOfYear.fromDateTime(secondOncePeriodEventPlacement.startDay); DayOfYear tempEndDay = DayOfYear.fromDateTime(secondOncePeriodEventPlacement.endDay); return firstSeasonPlacement.startDay.isEqualOrBefore(tempStartDay) && firstSeasonPlacement.endDay.isEqualOrAfter(tempEndDay); } } else if (this.mContainedEvent instanceof YearlyPeriodEvent) { YearlyPeriodEvent.YearlyPeriodEventPlacement secondSeasonPlacement = (YearlyPeriodEvent.YearlyPeriodEventPlacement) inSecondPlacement; return firstSeasonPlacement.startDay.isEqualOrBefore(secondSeasonPlacement.startDay) && firstSeasonPlacement.endDay.isEqualOrAfter(secondSeasonPlacement.endDay); } else { return true; } } else { return true; } } else { //todo: fill in if (this.mContainsEvent instanceof OncePeriodEvent) { OncePeriodEvent.OncePeriodEventPlacement firstOncePeriodEventPlacement = (OncePeriodEvent.OncePeriodEventPlacement) inFirstPlacement; DateTime firstStartDate = firstOncePeriodEventPlacement.startDay.withHourOfDay(23); DateTime firstEndDate = firstOncePeriodEventPlacement.endDay.withHourOfDay(2); if (this.mContainedEvent instanceof YearlyEvent) { org.joda.time.Interval newInterval = new org.joda.time.Interval(firstStartDate, firstEndDate); if (newInterval.toDuration().toStandardDays().getDays() >= 366) { return false; } } if (this.mContainedEvent instanceof OnceDayEvent) { OnceDayEvent.OnceDayEventPlacement secondOnceDayEventPlacement = (OnceDayEvent.OnceDayEventPlacement) inSecondPlacement; return secondOnceDayEventPlacement.day.isBefore(firstStartDate) || secondOnceDayEventPlacement.day.isAfter(firstEndDate); } else if (this.mContainedEvent instanceof YearlyDayEvent) { YearlyDayEvent.YearlyDayEventPlacement secondYearlyDayEventPlacement = (YearlyDayEvent.YearlyDayEventPlacement) inSecondPlacement; for (DateTime curDateTime = firstStartDate.withHourOfDay(1); curDateTime .isBefore(firstEndDate); curDateTime = curDateTime.plusDays(1)) { DayOfYear curDayOfYear = DayOfYear.fromDateTime(curDateTime); if (curDayOfYear.equals(secondYearlyDayEventPlacement.day)) { return false; } } } else if (this.mContainedEvent instanceof OncePeriodEvent) { OncePeriodEvent.OncePeriodEventPlacement secondOncePeriodEventPlacement = (OncePeriodEvent.OncePeriodEventPlacement) inSecondPlacement; return secondOncePeriodEventPlacement.endDay.isBefore(firstStartDate) || secondOncePeriodEventPlacement.startDay.isAfter(firstEndDate); } else if (this.mContainedEvent instanceof YearlyPeriodEvent) { YearlyPeriodEvent.YearlyPeriodEventPlacement secondSeasonPlacement = (YearlyPeriodEvent.YearlyPeriodEventPlacement) inSecondPlacement; java.util.Collection<DayOfYear> firstSet = new java.util.HashSet<>(1); java.util.Collection<DayOfYear> secondSet = new java.util.HashSet<>(1); for (DateTime curDateTime = firstStartDate.withHourOfDay(1); curDateTime .isBefore(firstEndDate); curDateTime = curDateTime.plusDays(1)) { DayOfYear curDayOfYear = DayOfYear.fromDateTime(curDateTime); firstSet.add(curDayOfYear); } for (DayOfYear curDayOfYear = secondSeasonPlacement.startDay; !curDayOfYear .equals(secondSeasonPlacement.endDay); curDayOfYear.plusDays(1)) { secondSet.add(curDayOfYear); } secondSet.add(secondSeasonPlacement.endDay); firstSet.retainAll(secondSet); if (!firstSet.isEmpty()) { return false; } } else { return true; } } else if (this.mContainsEvent instanceof YearlyPeriodEvent) { YearlyPeriodEvent.YearlyPeriodEventPlacement firstSeasonPlacement = (YearlyPeriodEvent.YearlyPeriodEventPlacement) inFirstPlacement; java.util.Collection<DayOfYear> firstSet = new java.util.HashSet<>(1); for (DayOfYear curDayOfYear = firstSeasonPlacement.startDay; !curDayOfYear .equals(firstSeasonPlacement.endDay); curDayOfYear.plusDays(1)) { firstSet.add(curDayOfYear); } if (this.mContainedEvent instanceof OnceDayEvent) { OnceDayEvent.OnceDayEventPlacement secondOnceDayEventPlacement = (OnceDayEvent.OnceDayEventPlacement) inSecondPlacement; if (firstSet.contains(DayOfYear.fromDateTime(secondOnceDayEventPlacement.day))) { return false; } } else if (this.mContainedEvent instanceof YearlyDayEvent) { YearlyDayEvent.YearlyDayEventPlacement secondYearlyDayEventPlacement = (YearlyDayEvent.YearlyDayEventPlacement) inSecondPlacement; if (firstSet.contains(secondYearlyDayEventPlacement.day)) { return false; } } else if (this.mContainedEvent instanceof OncePeriodEvent) { OncePeriodEvent.OncePeriodEventPlacement secondOncePeriodEventPlacement = (OncePeriodEvent.OncePeriodEventPlacement) inSecondPlacement; java.util.Collection<DayOfYear> secondSet = new java.util.HashSet<>(1); DateTime endDateTime = secondOncePeriodEventPlacement.endDay.withHourOfDay(23); for (DateTime curDateTime = secondOncePeriodEventPlacement.startDay.withHourOfDay( 1); curDateTime.isBefore(endDateTime); curDateTime = curDateTime.plusDays(1)) { DayOfYear curDayOfYear = DayOfYear.fromDateTime(curDateTime); secondSet.add(curDayOfYear); } firstSet.retainAll(secondSet); if (!firstSet.isEmpty()) { return false; } } else if (this.mContainedEvent instanceof YearlyPeriodEvent) { YearlyPeriodEvent.YearlyPeriodEventPlacement secondSeasonPlacement = (YearlyPeriodEvent.YearlyPeriodEventPlacement) inSecondPlacement; java.util.Collection<DayOfYear> secondSet = new java.util.HashSet<>(1); for (DayOfYear curDayOfYear = secondSeasonPlacement.startDay; !curDayOfYear .equals(secondSeasonPlacement.endDay); curDayOfYear.plusDays(1)) { secondSet.add(curDayOfYear); } secondSet.add(secondSeasonPlacement.endDay); firstSet.retainAll(secondSet); if (!firstSet.isEmpty()) { return false; } } else { return true; } } else { return true; } } return true; }
From source file:TVShowTimelineMaker.timeConstraints.dayAcceptors.SameTimeAsDayAcceptor.java
public void build() { DateTime buildFromStart = resetStartTime; DateTime buildFromEnd = resetEndTime; for (DayEvent<?, ?> curEvent : this.Events) { if (curEvent.isValid()) { if (curEvent instanceof OnceEvent) { OnceEvent<?> curOnceEvent = (OnceEvent<?>) curEvent; Interval curEventInterval = new Interval(curOnceEvent.getEarliestPossibleStartTime(), curOnceEvent.getLatestPossibleEndTime()); if (curEventInterval.toDuration().isShorterThan(yearOfDays.toStandardDuration())) { int startYear = curOnceEvent.getEarliestPossibleStartTime() .get(org.joda.time.DateTimeFieldType.year()); DateTime newStart = curOnceEvent.getEarliestPossibleStartTime().minusYears(startYear); DateTime newEnd = curOnceEvent.getLatestPossibleEndTime().minusYears(startYear); if (newStart.isAfter(buildFromStart)) { buildFromStart = newStart; }//from ww w .j a v a2s .c o m if (newEnd.isBefore(buildFromEnd)) { buildFromEnd = newEnd; } } } else if (curEvent instanceof YearlyDayEvent) { YearlyDayEvent curYearlyDayEvent = (YearlyDayEvent) curEvent; } } } this.overYearBound = buildFromEnd.get(org.joda.time.DateTimeFieldType.year()) > 0; this.startMonth = buildFromStart.get(org.joda.time.DateTimeFieldType.monthOfYear()); this.startDay = buildFromStart.get(org.joda.time.DateTimeFieldType.dayOfMonth()); this.endMonth = buildFromEnd.get(org.joda.time.DateTimeFieldType.monthOfYear()); this.endDay = buildFromEnd.get(org.joda.time.DateTimeFieldType.dayOfMonth()); this.lastmodifyed = System.currentTimeMillis(); }