List of usage examples for com.google.common.collect Range lowerEndpoint
public C lowerEndpoint()
From source file:net.sf.mzmine.modules.peaklistmethods.peakpicking.deconvolution.ADAPpeakpicking.ADAPDetector.java
@Override public Feature[] resolvePeaks(final Feature chromatogram, final ParameterSet parameters, RSessionWrapper rSession, double msmsRange, double rTRangeMSMS) throws RSessionWrapperException { int scanNumbers[] = chromatogram.getScanNumbers(); final int scanCount = scanNumbers.length; double retentionTimes[] = new double[scanCount]; double intensities[] = new double[scanCount]; RawDataFile dataFile = chromatogram.getDataFile(); for (int i = 0; i < scanCount; i++) { final int scanNum = scanNumbers[i]; retentionTimes[i] = dataFile.getScan(scanNum).getRetentionTime(); DataPoint dp = chromatogram.getDataPoint(scanNum); if (dp != null) intensities[i] = dp.getIntensity(); else/*www . j a v a2 s .c o m*/ intensities[i] = 0.0; } //List<PeakInfo> ADAPPeaks = new ArrayList<PeakInfo>(); List<PeakInfo> ADAPPeaks = null; Range<Double> peakDuration = parameters.getParameter(PEAK_DURATION).getValue(); final MZmineProcessingStep<SNEstimatorChoice> signalNoiseEstimator = parameters.getParameter(SN_ESTIMATORS) .getValue(); String SNCode = signalNoiseEstimator.getModule().getSNCode(); double signalNoiseWindowMult = -1.0; boolean absWavCoeffs = false; Map<String, Object> informationSN = new HashMap<String, Object>(); if (SNCode == "Wavelet Coefficient Estimator") { informationSN.put("code", "Wavelet Coefficient Estimator"); signalNoiseWindowMult = signalNoiseEstimator.getParameterSet().getParameter(HALF_WAVELET_WINDOW) .getValue(); absWavCoeffs = signalNoiseEstimator.getParameterSet().getParameter(ABS_WAV_COEFFS).getValue(); informationSN.put("multiplier", signalNoiseWindowMult); informationSN.put("absolutewavecoeffs", absWavCoeffs); } if (SNCode == "Intensity Window Estimator") { informationSN.put("code", "Intensity Window Estimator"); } // get the average rt spacing double rtSum = 0.0; for (int i = 0; i < retentionTimes.length - 1; i++) { rtSum += retentionTimes[i + 1] - retentionTimes[i]; } double avgRTInterval = rtSum / ((double) (retentionTimes.length - 1)); // Change the lower and uper bounds for the wavelet scales from retention times to number of scans. Range<Double> rtRangeForCWTScales = parameters.getParameter(RT_FOR_CWT_SCALES_DURATION).getValue(); double rtLow = rtRangeForCWTScales.lowerEndpoint(); double rtHigh = rtRangeForCWTScales.upperEndpoint(); int numScansRTLow = (int) Math.round(rtLow / avgRTInterval); int numScansRTHigh = (int) Math.round(rtHigh / avgRTInterval); if (numScansRTLow < 1) { numScansRTLow = 1; } if (numScansRTHigh >= retentionTimes.length) { numScansRTHigh = retentionTimes.length; } ADAPPeaks = DeconvoluteSignal(retentionTimes, intensities, chromatogram.getMZ(), parameters.getParameter(SN_THRESHOLD).getValue(), parameters.getParameter(MIN_FEAT_HEIGHT).getValue(), peakDuration, parameters.getParameter(COEF_AREA_THRESHOLD).getValue(), numScansRTLow, numScansRTHigh, informationSN); final List<ResolvedPeak> resolvedPeaks; if (ADAPPeaks == null) { resolvedPeaks = new ArrayList<ResolvedPeak>(0); } else { LOG.finest("Processing peak matrix..."); // Process peak matrix. resolvedPeaks = new ArrayList<ResolvedPeak>(ADAPPeaks.size()); // The old way could detect the same peak more than once if the wavlet scales were too large. // If the left bounds were the same and there was a null point before the right bounds it would //make the same peak twice. // To avoid the above see if the peak duration range is met before going into // the loop //for (final double[] peakRow : peakMatrix) { for (int i = 0; i < ADAPPeaks.size(); i++) { PeakInfo curPeak = ADAPPeaks.get(i); SimplePeakInformation information = new SimplePeakInformation(); information.addProperty("Signal-to-Noise", Double.toString(curPeak.signalToNoiseRatio)); information.addProperty("Coefficient-over-area", Double.toString(curPeak.coeffOverArea)); // information.addProperty("index", // //Integer.toString(scans[(int) peakIndex[j] - 1])); // Substract one because r-indices start from 1 // Integer.toString((int) curPeak.peakIndex)); // information.addProperty("sharpness", // Double.toString(curPeak.sharpness)); // information.addProperty("signalToNoiseRatio", // Double.toString(curPeak.signalToNoiseRatio)); // information.addProperty("isShared", // Boolean.toString(curPeak.isShared)); // //Boolean.toString(1.0 == curPeak.isShared)); // information.addProperty("offset", // Integer.toString((int) curPeak.offset)); ResolvedPeak peak = new ResolvedPeak(chromatogram, curPeak.leftApexIndex, curPeak.rightApexIndex, msmsRange, rTRangeMSMS); peak.setPeakInformation(information); resolvedPeaks.add(peak); //resolvedPeaks.add(new ResolvedPeak(chromatogram,curPeak.leftApexIndex, curPeak.rightApexIndex)); } } return resolvedPeaks.toArray(new ResolvedPeak[resolvedPeaks.size()]); }
From source file:com.google.googlejavaformat.java.JavaOutput.java
/** * Expand a token range to start and end on acceptable boundaries for re-formatting. * * @param iRange the {@link Range} of tokens * @return the expanded token range//from ww w. j av a 2 s . c o m */ private Range<Integer> expandToBreakableRegions(Range<Integer> iRange) { // The original line range. int loTok = iRange.lowerEndpoint(); int hiTok = iRange.upperEndpoint() - 1; // Expand the token indices to formattable boundaries (e.g. edges of statements). if (!partialFormatRanges.contains(loTok) || !partialFormatRanges.contains(hiTok)) { return EMPTY_RANGE; } loTok = partialFormatRanges.rangeContaining(loTok).lowerEndpoint(); hiTok = partialFormatRanges.rangeContaining(hiTok).upperEndpoint(); return Range.closedOpen(loTok, hiTok + 1); }
From source file:com.zulily.omicron.crontab.ExpressionPart.java
ExpressionPart(final Range<Integer> allowedRange, final String[] stringValues) { this.allowedRange = allowedRange; final HashMap<String, Integer> stringValuesMap = Maps.newHashMap(); if (stringValues != null && allowedRange != null) { for (int index = 0; index < stringValues.length; index++) { stringValuesMap.put(stringValues[index], index + allowedRange.lowerEndpoint()); // correct for 1-based month/d-o-m }//www . ja v a2s .c o m } this.stringNameMap = ImmutableMap.copyOf(stringValuesMap); }
From source file:org.apache.drill.exec.record.RecordIterator.java
public void forward(long delta) { if (!enableMarkAndReset) { throw new UnsupportedOperationException("mark and reset disabled for this RecordIterator"); }//from www. j a v a 2s .co m assert delta >= 0; assert (delta + outerPosition) < totalRecordCount; final long nextOuterPosition = delta + outerPosition; final RecordBatchData rbdNew = batches.get(nextOuterPosition); final RecordBatchData rbdOld = batches.get(outerPosition); assert rbdNew != null; assert rbdOld != null; container.transferOut(rbdOld.getContainer()); // Get vectors from new position. container.transferIn(rbdNew.getContainer()); outerPosition = nextOuterPosition; final Range<Long> markedBatchRange = batches.getEntry(outerPosition).getKey(); startBatchPosition = markedBatchRange.lowerEndpoint(); innerPosition = (int) (outerPosition - startBatchPosition); innerRecordCount = (int) (markedBatchRange.upperEndpoint() - startBatchPosition); }
From source file:org.apache.drill.exec.record.RecordIterator.java
public void reset() { if (!enableMarkAndReset) { throw new UnsupportedOperationException("mark and reset disabled for this RecordIterator"); }/*w w w . j a va2 s . c o m*/ if (markedOuterPosition >= 0) { // Move to rbd for markedOuterPosition. final RecordBatchData rbdNew = batches.get(markedOuterPosition); final RecordBatchData rbdOld = batches.get(startBatchPosition); assert rbdOld != null; assert rbdNew != null; if (rbdNew != rbdOld) { container.transferOut(rbdOld.getContainer()); container.transferIn(rbdNew.getContainer()); } innerPosition = markedInnerPosition; outerPosition = markedOuterPosition; final Range<Long> markedBatchRange = batches.getEntry(outerPosition).getKey(); startBatchPosition = markedBatchRange.lowerEndpoint(); innerRecordCount = (int) (markedBatchRange.upperEndpoint() - startBatchPosition); markedInnerPosition = -1; markedOuterPosition = -1; } }
From source file:com.yahoo.gondola.container.client.ZookeeperShardManagerClient.java
@Override public void migrateBuckets(Range<Integer> splitRange, String fromShardId, String toShardId, long timeoutMs) throws ShardManagerException, InterruptedException { // Enable special mode on destination shard. sendActionToShard(toShardId, MIGRATE_1, splitRange.lowerEndpoint(), splitRange.upperEndpoint(), fromShardId, toShardId, timeoutMs);/*from w w w . ja v a 2 s . c o m*/ waitCondition(fromShardId, ZookeeperStat::isMigrating1Operational, timeoutMs); // set all nodes in migrating mode, traffic to the original shard will route to new shard in this state. setBuckets(splitRange, fromShardId, toShardId, false); // set all nodes migration complete, all traffic will route to new shard. setBuckets(splitRange, fromShardId, toShardId, true); // done, mark operation as completed sendActionToAll(Action.NOOP); }
From source file:net.sf.mzmine.modules.projectmethods.projectsave.RawDataFileSaveHandler.java
/** * Create the part of the XML document related to the scans * // w w w . j av a 2 s . com * @param scan * @param element */ private void fillScanElement(Scan scan, TransformerHandler hd) throws SAXException, IOException { // <SCAN_ID> AttributesImpl atts = new AttributesImpl(); hd.startElement("", "", RawDataElementName.SCAN_ID.getElementName(), atts); hd.characters(String.valueOf(scan.getScanNumber()).toCharArray(), 0, String.valueOf(scan.getScanNumber()).length()); hd.endElement("", "", RawDataElementName.SCAN_ID.getElementName()); // <MS_LEVEL> hd.startElement("", "", RawDataElementName.MS_LEVEL.getElementName(), atts); hd.characters(String.valueOf(scan.getMSLevel()).toCharArray(), 0, String.valueOf(scan.getMSLevel()).length()); hd.endElement("", "", RawDataElementName.MS_LEVEL.getElementName()); if (scan.getMSLevel() >= 2) { // <PRECURSOR_MZ> hd.startElement("", "", RawDataElementName.PRECURSOR_MZ.getElementName(), atts); hd.characters(String.valueOf(scan.getPrecursorMZ()).toCharArray(), 0, String.valueOf(scan.getPrecursorMZ()).length()); hd.endElement("", "", RawDataElementName.PRECURSOR_MZ.getElementName()); // <PRECURSOR_CHARGE> hd.startElement("", "", RawDataElementName.PRECURSOR_CHARGE.getElementName(), atts); hd.characters(String.valueOf(scan.getPrecursorCharge()).toCharArray(), 0, String.valueOf(scan.getPrecursorCharge()).length()); hd.endElement("", "", RawDataElementName.PRECURSOR_CHARGE.getElementName()); } // <RETENTION_TIME> hd.startElement("", "", RawDataElementName.RETENTION_TIME.getElementName(), atts); // In the project file, retention time is represented in seconds, for // historical reasons double rt = scan.getRetentionTime() * 60d; hd.characters(String.valueOf(rt).toCharArray(), 0, String.valueOf(rt).length()); hd.endElement("", "", RawDataElementName.RETENTION_TIME.getElementName()); // <CENTROIDED> hd.startElement("", "", RawDataElementName.CENTROIDED.getElementName(), atts); hd.characters(String.valueOf(scan.getSpectrumType()).toCharArray(), 0, String.valueOf(scan.getSpectrumType()).length()); hd.endElement("", "", RawDataElementName.CENTROIDED.getElementName()); // <QUANTITY_DATAPOINTS> hd.startElement("", "", RawDataElementName.QUANTITY_DATAPOINTS.getElementName(), atts); hd.characters(String.valueOf((scan.getNumberOfDataPoints())).toCharArray(), 0, String.valueOf((scan.getNumberOfDataPoints())).length()); hd.endElement("", "", RawDataElementName.QUANTITY_DATAPOINTS.getElementName()); // <FRAGMENT_SCAN> if (scan.getFragmentScanNumbers() != null) { int[] fragmentScans = scan.getFragmentScanNumbers(); atts.addAttribute("", "", RawDataElementName.QUANTITY.getElementName(), "CDATA", String.valueOf(fragmentScans.length)); hd.startElement("", "", RawDataElementName.QUANTITY_FRAGMENT_SCAN.getElementName(), atts); atts.clear(); for (int i : fragmentScans) { hd.startElement("", "", RawDataElementName.FRAGMENT_SCAN.getElementName(), atts); hd.characters(String.valueOf(i).toCharArray(), 0, String.valueOf(i).length()); hd.endElement("", "", RawDataElementName.FRAGMENT_SCAN.getElementName()); } hd.endElement("", "", RawDataElementName.QUANTITY_FRAGMENT_SCAN.getElementName()); } // <MASS_LIST> MassList massLists[] = scan.getMassLists(); for (MassList massList : massLists) { StorableMassList stMassList = (StorableMassList) massList; atts.addAttribute("", "", RawDataElementName.NAME.getElementName(), "CDATA", stMassList.getName()); atts.addAttribute("", "", RawDataElementName.STORAGE_ID.getElementName(), "CDATA", String.valueOf(stMassList.getStorageID())); hd.startElement("", "", RawDataElementName.MASS_LIST.getElementName(), atts); atts.clear(); hd.endElement("", "", RawDataElementName.MASS_LIST.getElementName()); } // <POLARITY> hd.startElement("", "", RawDataElementName.POLARITY.getElementName(), atts); String pol = scan.getPolarity().toString(); hd.characters(pol.toCharArray(), 0, pol.length()); hd.endElement("", "", RawDataElementName.POLARITY.getElementName()); // <SCAN_DESCRIPTION> hd.startElement("", "", RawDataElementName.SCAN_DESCRIPTION.getElementName(), atts); String scanDesc = scan.getScanDefinition(); hd.characters(scanDesc.toCharArray(), 0, scanDesc.length()); hd.endElement("", "", RawDataElementName.SCAN_DESCRIPTION.getElementName()); // <SCAN_MZ_RANGE> hd.startElement("", "", RawDataElementName.SCAN_MZ_RANGE.getElementName(), atts); Range<Double> mzRange = scan.getScanningMZRange(); String mzRangeStr = mzRange.lowerEndpoint() + "-" + mzRange.upperEndpoint(); hd.characters(mzRangeStr.toCharArray(), 0, mzRangeStr.length()); hd.endElement("", "", RawDataElementName.SCAN_MZ_RANGE.getElementName()); }
From source file:org.noroomattheinn.visibletesla.Prefs.java
private Range<Long> getLoadPeriod() { Range<Long> range = Range.closed(Long.MIN_VALUE, Long.MAX_VALUE); long now = System.currentTimeMillis(); LoadPeriod period = nameToLoadPeriod.get(loadPeriod.get()); if (period == null) { period = LoadPeriod.All;//from ww w. ja v a2 s . com loadPeriod.set(nameToLoadPeriod.inverse().get(period)); } switch (period) { case None: range = Range.closed(now + 1000, now + 1000L); // Empty Range break; case Last7: range = Range.closed(now - (7 * 24 * 60 * 60 * 1000L), now); break; case Last14: range = Range.closed(now - (14 * 24 * 60 * 60 * 1000L), now); break; case Last30: range = Range.closed(now - (30 * 24 * 60 * 60 * 1000L), now); break; case ThisWeek: Range<Date> thisWeek = getThisWeek(); range = Range.closed(thisWeek.lowerEndpoint().getTime(), thisWeek.upperEndpoint().getTime()); break; case ThisMonth: Range<Date> thisMonth = getThisMonth(); range = Range.closed(thisMonth.lowerEndpoint().getTime(), thisMonth.upperEndpoint().getTime()); break; case All: default: break; } return range; }
From source file:de.tuberlin.uebb.jdae.llmsl.ExecutableDAE.java
public ExecutableDAE(final DataLayout layout, Causalisation causalisation, InitializationCausalisation iCausalisation, final ContinuousEvent[] continuousEvents, SimulationOptions options) {/*w ww .j a v a2 s. co m*/ this.options = options; this.logger = Logger.getLogger(this.getClass().toString()); this.layout = layout; this.states = causalisation.states; data = layout.alloc(); this.blocks = new IBlock[causalisation.computations.size()]; int i = 0; for (TIntObjectMap<Range<Integer>> block : causalisation.computations) { final Set<DerivedEquation> deriveds = Sets.newHashSet(); final TIntObjectIterator<Range<Integer>> blockIter = block.iterator(); while (blockIter.hasNext()) { blockIter.advance(); final int eq = blockIter.key(); final Range<Integer> eqRange = blockIter.value(); deriveds.add(new DerivedEquation(causalisation.equations[eq], eqRange.lowerEndpoint(), eqRange.upperEndpoint())); } final IBlock block_i; /* always prepare a numerical fallback, just in case */ final IBlock numericalSolution = new Block(data, layout, causalisation.iteratees.get(i), deriveds, options); if (causalisation.iteratees.get(i).size() == 1) { /* causalisation */ final GlobalVariable var = causalisation.iteratees.get(i).iterator().next(); final GlobalEquation eq = deriveds.iterator().next().eqn; if (eq.canSpecializeFor(var)) { block_i = eq.specializeFor(var, numericalSolution, this); } else { block_i = numericalSolution; } } else { block_i = numericalSolution; } blocks[i++] = block_i; } i = 0; this.initials = new IBlock[iCausalisation.computations.size()]; for (Set<DerivedEquation> block : iCausalisation.computations) initials[i] = new Block(data, layout, iCausalisation.iteratees.get(i++), block, options); this.execCtxt = new ExecutionContext(0, new GlobalVariable[0], data); // must be last, calls methods here this.eventHandler = new EventEvaluator(this, continuousEvents); }
From source file:org.openmhealth.dsu.repository.MongoDataPointRepositoryImpl.java
void addCreationTimestampCriteria(Query query, Range<OffsetDateTime> timestampRange) { if (timestampRange.hasLowerBound() || timestampRange.hasUpperBound()) { Criteria timestampCriteria = where("header.creation_date_time"); if (timestampRange.hasLowerBound()) { if (timestampRange.lowerBoundType() == CLOSED) { timestampCriteria = timestampCriteria.gte(timestampRange.lowerEndpoint()); } else { timestampCriteria = timestampCriteria.gt(timestampRange.lowerEndpoint()); }/*from w w w . ja v a 2 s .c o m*/ } if (timestampRange.hasUpperBound()) { if (timestampRange.upperBoundType() == CLOSED) { timestampCriteria = timestampCriteria.lte(timestampRange.upperEndpoint()); } else { timestampCriteria = timestampCriteria.lt(timestampRange.upperEndpoint()); } } query.addCriteria(timestampCriteria); } }