Example usage for com.google.common.collect Range span

List of usage examples for com.google.common.collect Range span

Introduction

In this page you can find the example usage for com.google.common.collect Range span.

Prototype

public Range<C> span(Range<C> other) 

Source Link

Document

Returns the minimal range that #encloses encloses both this range and other .

Usage

From source file:org.pshdl.model.extensions.RangeExtension.java

protected Optional<Range<BigInteger>> _determineRange(final HDLVariableRef obj,
        final HDLEvaluationContext context) {
    final Optional<BigInteger> bigVal = ConstantEvaluate.valueOf(obj, context);
    boolean _isPresent = bigVal.isPresent();
    if (_isPresent) {
        BigInteger _get = bigVal.get();
        BigInteger _get_1 = bigVal.get();
        Range<BigInteger> _createRange = RangeTool.<BigInteger>createRange(_get, _get_1);
        return Optional.<Range<BigInteger>>of(_createRange);
    }//w w w  . j  a va  2 s  .com
    final Optional<HDLVariable> hVar = obj.resolveVar();
    boolean _isPresent_1 = hVar.isPresent();
    boolean _not = (!_isPresent_1);
    if (_not) {
        obj.<IHDLObject>addMeta(ProblemDescription.SOURCE, obj);
        obj.<ProblemDescription>addMeta(ProblemDescription.DESCRIPTION,
                ProblemDescription.VARIABLE_NOT_RESOLVED);
        return Optional.<Range<BigInteger>>absent();
    }
    HDLVariable _get_2 = hVar.get();
    HDLAnnotation _annotation = _get_2.getAnnotation(HDLBuiltInAnnotationProvider.HDLBuiltInAnnotations.range);
    LinkedHashSet<Problem> _linkedHashSet = new LinkedHashSet<Problem>();
    final Optional<Range<BigInteger>> annoCheck = HDLBuiltInAnnotationProvider.HDLBuiltInAnnotations
            .checkRangeAnnotation(_annotation, _linkedHashSet);
    boolean _isPresent_2 = annoCheck.isPresent();
    if (_isPresent_2) {
        return annoCheck;
    }
    HDLVariable _get_3 = hVar.get();
    final IHDLObject container = _get_3.getContainer();
    boolean _tripleNotEquals = (container != null);
    if (_tripleNotEquals) {
        if ((container instanceof HDLVariableDeclaration)) {
            final HDLVariableDeclaration hvd = ((HDLVariableDeclaration) container);
            HDLAnnotation _annotation_1 = hvd
                    .getAnnotation(HDLBuiltInAnnotationProvider.HDLBuiltInAnnotations.range);
            LinkedHashSet<Problem> _linkedHashSet_1 = new LinkedHashSet<Problem>();
            final Optional<Range<BigInteger>> subAnnoCheck = HDLBuiltInAnnotationProvider.HDLBuiltInAnnotations
                    .checkRangeAnnotation(_annotation_1, _linkedHashSet_1);
            boolean _isPresent_3 = subAnnoCheck.isPresent();
            if (_isPresent_3) {
                return subAnnoCheck;
            }
        }
        if ((container instanceof HDLForLoop)) {
            final HDLForLoop loop = ((HDLForLoop) container);
            ArrayList<HDLRange> _range = loop.getRange();
            HDLRange _get_4 = _range.get(0);
            final Optional<Range<BigInteger>> zeroR = RangeExtension.rangeOf(_get_4, context);
            boolean _isPresent_4 = zeroR.isPresent();
            if (_isPresent_4) {
                Range<BigInteger> res = zeroR.get();
                ArrayList<HDLRange> _range_1 = loop.getRange();
                for (final HDLRange r : _range_1) {
                    {
                        final Optional<Range<BigInteger>> rRange = RangeExtension.rangeOf(r, context);
                        boolean _isPresent_5 = rRange.isPresent();
                        if (_isPresent_5) {
                            Range<BigInteger> _get_5 = rRange.get();
                            Range<BigInteger> _span = res.span(_get_5);
                            res = _span;
                        } else {
                            Optional.<Object>absent();
                        }
                    }
                }
                return Optional.<Range<BigInteger>>of(res);
            } else {
                return Optional.<Range<BigInteger>>absent();
            }
        }
    }
    ArrayList<HDLRange> _bits = obj.getBits();
    int _size = _bits.size();
    boolean _greaterThan = (_size > 0);
    if (_greaterThan) {
        BigInteger bitWidth = BigInteger.ZERO;
        ArrayList<HDLRange> _bits_1 = obj.getBits();
        for (final HDLRange r_1 : _bits_1) {
            {
                HDLExpression width = r_1.getWidth();
                HDLExpression _copyDeepFrozen = width.copyDeepFrozen(r_1);
                width = _copyDeepFrozen;
                Optional<BigInteger> cw = ConstantEvaluate.valueOf(width, context);
                boolean _isPresent_5 = cw.isPresent();
                boolean _not_1 = (!_isPresent_5);
                if (_not_1) {
                    bitWidth = null;
                } else {
                    boolean _tripleNotEquals_1 = (bitWidth != null);
                    if (_tripleNotEquals_1) {
                        BigInteger _get_5 = cw.get();
                        BigInteger _add = bitWidth.add(_get_5);
                        bitWidth = _add;
                    }
                }
            }
        }
        boolean _tripleNotEquals_1 = (bitWidth != null);
        if (_tripleNotEquals_1) {
            int _intValue = bitWidth.intValue();
            BigInteger _shiftLeft = BigInteger.ONE.shiftLeft(_intValue);
            BigInteger _subtract = _shiftLeft.subtract(BigInteger.ONE);
            Range<BigInteger> _createRange_1 = RangeTool.<BigInteger>createRange(BigInteger.ZERO, _subtract);
            return Optional.<Range<BigInteger>>of(_createRange_1);
        }
    }
    HDLVariable _get_5 = hVar.get();
    final Optional<? extends HDLType> type = TypeExtension.typeOf(_get_5);
    boolean _and = false;
    boolean _isPresent_5 = type.isPresent();
    if (!_isPresent_5) {
        _and = false;
    } else {
        HDLType _get_6 = type.get();
        _and = (_get_6 instanceof HDLPrimitive);
    }
    if (_and) {
        HDLPrimitives _instance = HDLPrimitives.getInstance();
        HDLType _get_7 = type.get();
        return _instance.getValueRange(((HDLPrimitive) _get_7), context);
    }
    obj.<IHDLObject>addMeta(ProblemDescription.SOURCE, obj);
    obj.<ProblemDescription>addMeta(ProblemDescription.DESCRIPTION,
            ProblemDescription.NON_PRIMITVE_TYPE_NOT_EVALUATED);
    return Optional.<Range<BigInteger>>absent();
}

From source file:net.sf.mzmine.modules.peaklistmethods.peakpicking.smoothing.SmoothingTask.java

@Override
public void run() {

    setStatus(TaskStatus.PROCESSING);/*w ww. j a v  a  2  s .  co  m*/

    try {
        // Get filter weights.
        final double[] filterWeights = SavitzkyGolayFilter.getNormalizedWeights(filterWidth);

        // Create new peak list
        newPeakList = new SimplePeakList(origPeakList + " " + suffix, origPeakList.getRawDataFiles());
        int peakID = 1;

        // Process each row.
        for (final PeakListRow row : origPeakList.getRows()) {

            if (!isCanceled()) {

                // Create a new peak-list row.
                final PeakListRow newRow = new SimplePeakListRow(peakID++);

                // Process each peak.
                for (final Feature peak : row.getPeaks()) {

                    if (!isCanceled()) {

                        // Copy original peak intensities.
                        final int[] scanNumbers = peak.getScanNumbers();
                        final int numScans = scanNumbers.length;
                        final double[] intensities = new double[numScans];
                        for (int i = 0; i < numScans; i++) {

                            final DataPoint dataPoint = peak.getDataPoint(scanNumbers[i]);
                            intensities[i] = dataPoint == null ? 0.0 : dataPoint.getIntensity();
                        }

                        // Smooth peak.
                        final double[] smoothed = convolve(intensities, filterWeights);

                        // Measure peak (max, ranges, area etc.)
                        final RawDataFile dataFile = peak.getDataFile();
                        final DataPoint[] newDataPoints = new DataPoint[numScans];
                        double maxIntensity = 0.0;
                        int maxScanNumber = -1;
                        DataPoint maxDataPoint = null;
                        Range<Double> intensityRange = null;
                        double area = 0.0;
                        for (int i = 0; i < numScans; i++) {

                            final int scanNumber = scanNumbers[i];
                            final DataPoint dataPoint = peak.getDataPoint(scanNumber);
                            final double intensity = smoothed[i];
                            if (dataPoint != null && intensity > 0.0) {

                                // Create a new data point.
                                final double mz = dataPoint.getMZ();
                                final double rt = dataFile.getScan(scanNumber).getRetentionTime();
                                final DataPoint newDataPoint = new SimpleDataPoint(mz, intensity);
                                newDataPoints[i] = newDataPoint;

                                // Track maximum intensity data point.
                                if (intensity > maxIntensity) {

                                    maxIntensity = intensity;
                                    maxScanNumber = scanNumber;
                                    maxDataPoint = newDataPoint;
                                }

                                // Update ranges.
                                if (intensityRange == null) {
                                    intensityRange = Range.singleton(intensity);
                                } else {
                                    intensityRange = intensityRange.span(Range.singleton(intensity));
                                }

                                // Accumulate peak area.
                                if (i != 0) {

                                    final DataPoint lastDP = newDataPoints[i - 1];
                                    final double lastIntensity = lastDP == null ? 0.0 : lastDP.getIntensity();
                                    final double lastRT = dataFile.getScan(scanNumbers[i - 1])
                                            .getRetentionTime();
                                    area += (rt - lastRT) * (intensity + lastIntensity) / 2.0;
                                }
                            }
                        }

                        assert maxDataPoint != null;

                        if (!isCanceled() && maxScanNumber >= 0) {

                            // Create a new peak.
                            newRow.addPeak(dataFile, new SimpleFeature(dataFile, maxDataPoint.getMZ(),
                                    peak.getRT(), maxIntensity, area, scanNumbers, newDataPoints,
                                    peak.getFeatureStatus(), maxScanNumber,
                                    peak.getMostIntenseFragmentScanNumber(), peak.getRawDataPointsRTRange(),
                                    peak.getRawDataPointsMZRange(), intensityRange));
                        }
                    }
                }
                newPeakList.addRow(newRow);
                progress++;
            }
        }

        // Finish up.
        if (!isCanceled()) {

            // Add new peak-list to the project.
            project.addPeakList(newPeakList);

            // Add quality parameters to peaks
            QualityParameters.calculateQualityParameters(newPeakList);

            // Remove the original peak-list if requested.
            if (removeOriginal) {
                project.removePeakList(origPeakList);
            }

            // Copy previously applied methods
            for (final PeakListAppliedMethod method : origPeakList.getAppliedMethods()) {

                newPeakList.addDescriptionOfAppliedTask(method);
            }

            // Add task description to peak-list.
            newPeakList.addDescriptionOfAppliedTask(
                    new SimplePeakListAppliedMethod("Peaks smoothed by Savitzky-Golay filter", parameters));

            LOG.finest("Finished peak smoothing: " + progress + " rows processed");

            setStatus(TaskStatus.FINISHED);
        }
    } catch (Throwable t) {

        LOG.log(Level.SEVERE, "Smoothing error", t);
        setErrorMessage(t.getMessage());
        setStatus(TaskStatus.ERROR);
    }
}

From source file:net.sf.mzmine.modules.peaklistmethods.identification.camera.CameraSearchTask.java

/**
 * Perform CAMERA search.// w w w  . ja  va 2s  .  com
 *
 * @param rawFile
 *            raw data file of peak list to process.
 */
private void cameraSearch(final RawDataFile rawFile) {

    LOG.finest("Detecting peaks.");

    errorMsg = null;
    try {

        String[] reqPackages = { "CAMERA" };
        String[] reqPackagesVersions = { CAMERA_VERSION };
        this.rSession = new RSessionWrapper("Camera search feature", reqPackages, reqPackagesVersions);
        this.rSession.open();

        // Create empty peaks matrix.
        this.rSession.eval(
                "columnHeadings <- c('mz','mzmin','mzmax','rt','rtmin','rtmax','into','intb','maxo','sn')");
        this.rSession.eval("peaks <- matrix(nrow=0, ncol=length(columnHeadings))");
        this.rSession.eval("colnames(peaks) <- columnHeadings");

        // Initialize.
        final Feature[] peaks = peakList.getPeaks(rawFile);
        progress = 0.0;

        // Initialize scan map.
        final Map<Scan, Set<DataPoint>> peakDataPointsByScan = new HashMap<Scan, Set<DataPoint>>(
                rawFile.getNumOfScans(MS_LEVEL));
        int dataPointCount = 0;
        for (final int scanNumber : rawFile.getScanNumbers(MS_LEVEL)) {

            // Create a set to hold data points (sorted by m/z).
            final Set<DataPoint> dataPoints = new TreeSet<DataPoint>(ASCENDING_MASS_SORTER);

            // Add a dummy data point.
            dataPoints.add(new SimpleDataPoint(0.0, 0.0));
            dataPointCount++;

            // Map the set.
            peakDataPointsByScan.put(rawFile.getScan(scanNumber), dataPoints);
        }

        // Add peaks.
        // 80 percents for building peaks list.
        double progressInc = 0.8 / (double) peaks.length;
        for (final Feature peak : peaks) {

            // Get peak data.
            Range<Double> rtRange = null;
            Range<Double> intRange = null;
            final double mz = peak.getMZ();

            // Get the peak's data points per scan.
            for (final int scanNumber : peak.getScanNumbers()) {

                final Scan scan = rawFile.getScan(scanNumber);
                if (scan.getMSLevel() != MS_LEVEL) {

                    throw new IllegalStateException(
                            "CAMERA can only process peak lists from MS-level " + MS_LEVEL);
                }

                // Copy the data point.
                final DataPoint dataPoint = peak.getDataPoint(scanNumber);
                if (dataPoint != null) {

                    final double intensity = dataPoint.getIntensity();
                    peakDataPointsByScan.get(scan).add(new SimpleDataPoint(mz, intensity));
                    dataPointCount++;

                    // Update RT & intensity range.
                    final double rt = scan.getRetentionTime();
                    if (rtRange == null) {
                        rtRange = Range.singleton(rt);
                        intRange = Range.singleton(intensity);
                    } else {
                        rtRange = rtRange.span(Range.singleton(rt));
                        intRange = intRange.span(Range.singleton(intensity));
                    }

                }
            }

            // Set peak values.
            final double area = peak.getArea();
            final double maxo = intRange == null ? peak.getHeight() : intRange.upperEndpoint();
            final double rtMin = (rtRange == null ? peak.getRawDataPointsRTRange() : rtRange).lowerEndpoint();
            final double rtMax = (rtRange == null ? peak.getRawDataPointsRTRange() : rtRange).upperEndpoint();

            // Add peak row.
            this.rSession.eval("peaks <- rbind(peaks, c(" + mz + ", " // mz
                    + mz + ", " // mzmin: use the same as mz.
                    + mz + ", " // mzmax: use the same as mz.
                    + peak.getRT() + ", " // rt
                    + rtMin + ", " // rtmin
                    + rtMax + ", " // rtmax
                    + area + ", " // into: peak area.
                    + area + ", " // intb: doesn't affect result, use area.
                    + maxo + ", " // maxo
                    + SIGNAL_TO_NOISE + "))", false);

            progress += progressInc;
        }

        // 20 percents (5*4) for building pseudo-isotopes groups.
        progressInc = 0.05;

        // Create R vectors.
        final int scanCount = peakDataPointsByScan.size();
        final double[] scanTimes = new double[scanCount];
        final int[] scanIndices = new int[scanCount];
        final double[] masses = new double[dataPointCount];
        final double[] intensities = new double[dataPointCount];

        // Fill vectors.
        int scanIndex = 0;
        int pointIndex = 0;
        for (final int scanNumber : rawFile.getScanNumbers(MS_LEVEL)) {

            final Scan scan = rawFile.getScan(scanNumber);
            scanTimes[scanIndex] = scan.getRetentionTime();
            scanIndices[scanIndex] = pointIndex + 1;
            scanIndex++;

            for (final DataPoint dataPoint : peakDataPointsByScan.get(scan)) {

                masses[pointIndex] = dataPoint.getMZ();
                intensities[pointIndex] = dataPoint.getIntensity();
                pointIndex++;
            }
        }

        // Set vectors.
        this.rSession.assign("scantime", scanTimes);
        this.rSession.assign("scanindex", scanIndices);
        this.rSession.assign("mass", masses);
        this.rSession.assign("intensity", intensities);

        // Construct xcmsRaw object
        this.rSession.eval("xRaw <- new(\"xcmsRaw\")");
        this.rSession.eval("xRaw@tic <- intensity");
        this.rSession.eval("xRaw@scantime <- scantime * " + SECONDS_PER_MINUTE);
        this.rSession.eval("xRaw@scanindex <- scanindex");
        this.rSession.eval("xRaw@env$mz <- mass");
        this.rSession.eval("xRaw@env$intensity <- intensity");

        // Create the xcmsSet object.
        this.rSession.eval("xs <- new(\"xcmsSet\")");

        // Set peaks.
        this.rSession.eval("xs@peaks <- peaks");

        // Set file (dummy) file path.
        this.rSession.eval("xs@filepaths  <- ''");

        // Set sample name.
        this.rSession.assign("sampleName", peakList.getName());
        this.rSession.eval("sampnames(xs) <- sampleName");

        // Create an empty xsAnnotate.
        this.rSession.eval("an <- xsAnnotate(xs, sample=1)");

        // Group by RT.
        this.rSession.eval("an <- groupFWHM(an, sigma=" + fwhmSigma + ", perfwhm=" + fwhmPercentage + ')');
        progress += progressInc;

        // Identify isotopes.
        this.rSession.eval("an <- findIsotopes(an, maxcharge=" + isoMaxCharge + ", maxiso=" + isoMaxCount
                + ", ppm=" + isoMassTolerance.getPpmTolerance() + ", mzabs=" + isoMassTolerance.getMzTolerance()
                + ')');
        progress += progressInc;

        // Split groups by correlating peak shape (need to set xraw to raw
        // data).
        this.rSession.eval("an <- groupCorr(an, calcIso=TRUE, xraw=xRaw, cor_eic_th=" + corrThreshold
                + ", pval=" + corrPValue + ')');
        progress += progressInc;

        // Get the peak list.
        this.rSession.eval("peakList <- getPeaklist(an)");

        // Extract the pseudo-spectra and isotope annotations from the peak
        // list.
        rSession.eval("pcgroup <- as.integer(peakList$pcgroup)");
        rSession.eval("isotopes <- peakList$isotopes");
        final int[] spectra = (int[]) rSession.collect("pcgroup");
        final String[] isotopes = (String[]) rSession.collect("isotopes");

        // Add identities.
        if (spectra != null) {

            addPseudoSpectraIdentities(peaks, spectra, isotopes);
        }
        progress += progressInc;
        // Turn off R instance, once task ended gracefully.
        if (!this.userCanceled)
            this.rSession.close(false);

    } catch (RSessionWrapperException e) {
        if (!this.userCanceled) {
            errorMsg = "'R computing error' during CAMERA search. \n" + e.getMessage();
            e.printStackTrace();
        }
    } catch (Exception e) {
        if (!this.userCanceled) {
            errorMsg = "'Unknown error' during CAMERA search. \n" + e.getMessage();
            e.printStackTrace();
        }
    }

    // Turn off R instance, once task ended UNgracefully.
    try {
        if (!this.userCanceled)
            this.rSession.close(this.userCanceled);
    } catch (RSessionWrapperException e) {
        if (!this.userCanceled) {
            // Do not override potential previous error message.
            if (errorMsg == null) {
                errorMsg = e.getMessage();
            }
        } else {
            // User canceled: Silent.
        }
    }

    // Report error.
    if (errorMsg != null) {
        setErrorMessage(errorMsg);
        setStatus(TaskStatus.ERROR);
    }
}