Example usage for com.google.common.collect Range singleton

List of usage examples for com.google.common.collect Range singleton

Introduction

In this page you can find the example usage for com.google.common.collect Range singleton.

Prototype

public static <C extends Comparable<?>> Range<C> singleton(C value) 

Source Link

Document

Returns a range that Range#contains(Comparable) contains only the given value.

Usage

From source file:io.druid.query.filter.InDimFilter.java

@Override
public RangeSet<String> getDimensionRangeSet(String dimension) {
    if (!Objects.equals(getDimension(), dimension) || getExtractionFn() != null) {
        return null;
    }/*from  w ww  . jav  a2 s  .  c  o  m*/
    RangeSet<String> retSet = TreeRangeSet.create();
    for (String value : values) {
        retSet.add(Range.singleton(Strings.nullToEmpty(value)));
    }
    return retSet;
}

From source file:net.sf.mzmine.modules.masslistmethods.chromatogrambuilder.Chromatogram.java

public void finishChromatogram() {

    int allScanNumbers[] = Ints.toArray(dataPointsMap.keySet());
    Arrays.sort(allScanNumbers);/*  w  w w  . java 2 s .c o m*/

    // Calculate median m/z
    double allMzValues[] = new double[allScanNumbers.length];
    for (int i = 0; i < allScanNumbers.length; i++) {
        allMzValues[i] = dataPointsMap.get(allScanNumbers[i]).getMZ();
    }
    mz = MathUtils.calcQuantile(allMzValues, 0.5f);

    // Update raw data point ranges, height, rt and representative scan
    height = Double.MIN_VALUE;
    for (int i = 0; i < allScanNumbers.length; i++) {

        DataPoint mzPeak = dataPointsMap.get(allScanNumbers[i]);

        // Replace the MzPeak instance with an instance of SimpleDataPoint,
        // to reduce the memory usage. After we finish this Chromatogram, we
        // don't need the additional data provided by the MzPeak

        dataPointsMap.put(allScanNumbers[i], mzPeak);

        if (i == 0) {
            rawDataPointsIntensityRange = Range.singleton(mzPeak.getIntensity());
            rawDataPointsMZRange = Range.singleton(mzPeak.getMZ());
        } else {
            rawDataPointsIntensityRange = rawDataPointsIntensityRange
                    .span(Range.singleton(mzPeak.getIntensity()));
            rawDataPointsMZRange = rawDataPointsMZRange.span(Range.singleton(mzPeak.getMZ()));
        }

        if (height < mzPeak.getIntensity()) {
            height = mzPeak.getIntensity();
            rt = dataFile.getScan(allScanNumbers[i]).getRetentionTime();
            representativeScan = allScanNumbers[i];
        }
    }

    // Update area
    area = 0;
    for (int i = 1; i < allScanNumbers.length; i++) {
        // For area calculation, we use retention time in seconds
        double previousRT = dataFile.getScan(allScanNumbers[i - 1]).getRetentionTime() * 60d;
        double currentRT = dataFile.getScan(allScanNumbers[i]).getRetentionTime() * 60d;
        double previousHeight = dataPointsMap.get(allScanNumbers[i - 1]).getIntensity();
        double currentHeight = dataPointsMap.get(allScanNumbers[i]).getIntensity();
        area += (currentRT - previousRT) * (currentHeight + previousHeight) / 2;
    }

    // Update fragment scan
    fragmentScan = ScanUtils.findBestFragmentScan(dataFile, dataFile.getDataRTRange(1), rawDataPointsMZRange);

    if (fragmentScan > 0) {
        Scan fragmentScanObject = dataFile.getScan(fragmentScan);
        int precursorCharge = fragmentScanObject.getPrecursorCharge();
        if (precursorCharge > 0)
            this.charge = precursorCharge;
    }

    // Victor Trevio
    // using allScanNumbers : rawDataPointsRTRange = new
    // Range(dataFile.getScan(allScanNumbers[0]).getRetentionTime(),
    // dataFile.getScan(allScanNumbers[allScanNumbers.length-1]).getRetentionTime());
    rawDataPointsRTRange = Range.closed(minTime, maxTime); // using the
                                                           // "cached"
                                                           // values

    // Discard the fields we don't need anymore
    buildingSegment = null;
    lastMzPeak = null;

}

From source file:net.sf.mzmine.modules.peaklistmethods.peakpicking.smoothing.SmoothingTask.java

@Override
public void run() {

    setStatus(TaskStatus.PROCESSING);//from   w ww  . j  a  va  2  s .co  m

    try {
        // Get filter weights.
        final double[] filterWeights = SavitzkyGolayFilter.getNormalizedWeights(filterWidth);

        // Create new peak list
        newPeakList = new SimplePeakList(origPeakList + " " + suffix, origPeakList.getRawDataFiles());
        int peakID = 1;

        // Process each row.
        for (final PeakListRow row : origPeakList.getRows()) {

            if (!isCanceled()) {

                // Create a new peak-list row.
                final PeakListRow newRow = new SimplePeakListRow(peakID++);

                // Process each peak.
                for (final Feature peak : row.getPeaks()) {

                    if (!isCanceled()) {

                        // Copy original peak intensities.
                        final int[] scanNumbers = peak.getScanNumbers();
                        final int numScans = scanNumbers.length;
                        final double[] intensities = new double[numScans];
                        for (int i = 0; i < numScans; i++) {

                            final DataPoint dataPoint = peak.getDataPoint(scanNumbers[i]);
                            intensities[i] = dataPoint == null ? 0.0 : dataPoint.getIntensity();
                        }

                        // Smooth peak.
                        final double[] smoothed = convolve(intensities, filterWeights);

                        // Measure peak (max, ranges, area etc.)
                        final RawDataFile dataFile = peak.getDataFile();
                        final DataPoint[] newDataPoints = new DataPoint[numScans];
                        double maxIntensity = 0.0;
                        int maxScanNumber = -1;
                        DataPoint maxDataPoint = null;
                        Range<Double> intensityRange = null;
                        double area = 0.0;
                        for (int i = 0; i < numScans; i++) {

                            final int scanNumber = scanNumbers[i];
                            final DataPoint dataPoint = peak.getDataPoint(scanNumber);
                            final double intensity = smoothed[i];
                            if (dataPoint != null && intensity > 0.0) {

                                // Create a new data point.
                                final double mz = dataPoint.getMZ();
                                final double rt = dataFile.getScan(scanNumber).getRetentionTime();
                                final DataPoint newDataPoint = new SimpleDataPoint(mz, intensity);
                                newDataPoints[i] = newDataPoint;

                                // Track maximum intensity data point.
                                if (intensity > maxIntensity) {

                                    maxIntensity = intensity;
                                    maxScanNumber = scanNumber;
                                    maxDataPoint = newDataPoint;
                                }

                                // Update ranges.
                                if (intensityRange == null) {
                                    intensityRange = Range.singleton(intensity);
                                } else {
                                    intensityRange = intensityRange.span(Range.singleton(intensity));
                                }

                                // Accumulate peak area.
                                if (i != 0) {

                                    final DataPoint lastDP = newDataPoints[i - 1];
                                    final double lastIntensity = lastDP == null ? 0.0 : lastDP.getIntensity();
                                    final double lastRT = dataFile.getScan(scanNumbers[i - 1])
                                            .getRetentionTime();
                                    area += (rt - lastRT) * (intensity + lastIntensity) / 2.0;
                                }
                            }
                        }

                        assert maxDataPoint != null;

                        if (!isCanceled() && maxScanNumber >= 0) {

                            // Create a new peak.
                            newRow.addPeak(dataFile, new SimpleFeature(dataFile, maxDataPoint.getMZ(),
                                    peak.getRT(), maxIntensity, area, scanNumbers, newDataPoints,
                                    peak.getFeatureStatus(), maxScanNumber,
                                    peak.getMostIntenseFragmentScanNumber(), peak.getRawDataPointsRTRange(),
                                    peak.getRawDataPointsMZRange(), intensityRange));
                        }
                    }
                }
                newPeakList.addRow(newRow);
                progress++;
            }
        }

        // Finish up.
        if (!isCanceled()) {

            // Add new peak-list to the project.
            project.addPeakList(newPeakList);

            // Add quality parameters to peaks
            QualityParameters.calculateQualityParameters(newPeakList);

            // Remove the original peak-list if requested.
            if (removeOriginal) {
                project.removePeakList(origPeakList);
            }

            // Copy previously applied methods
            for (final PeakListAppliedMethod method : origPeakList.getAppliedMethods()) {

                newPeakList.addDescriptionOfAppliedTask(method);
            }

            // Add task description to peak-list.
            newPeakList.addDescriptionOfAppliedTask(
                    new SimplePeakListAppliedMethod("Peaks smoothed by Savitzky-Golay filter", parameters));

            LOG.finest("Finished peak smoothing: " + progress + " rows processed");

            setStatus(TaskStatus.FINISHED);
        }
    } catch (Throwable t) {

        LOG.log(Level.SEVERE, "Smoothing error", t);
        setErrorMessage(t.getMessage());
        setStatus(TaskStatus.ERROR);
    }
}

From source file:net.sf.mzmine.project.impl.StorableScan.java

void updateValues() {
    DataPoint dataPoints[] = getDataPoints();

    // find m/z range and base peak
    if (dataPoints.length > 0) {

        basePeak = dataPoints[0];/* ww  w. j av  a  2s  . co m*/
        mzRange = Range.singleton(dataPoints[0].getMZ());
        double tic = 0;

        for (DataPoint dp : dataPoints) {

            if (dp.getIntensity() > basePeak.getIntensity())
                basePeak = dp;

            mzRange = mzRange.span(Range.singleton(dp.getMZ()));

            tic += dp.getIntensity();

        }

        totalIonCurrent = new Double(tic);

    } else {
        mzRange = Range.singleton(0.0);
        totalIonCurrent = new Double(0);
    }
}

From source file:org.apache.druid.query.filter.InDimFilter.java

@Override
public RangeSet<String> getDimensionRangeSet(String dimension) {
    if (!Objects.equals(getDimension(), dimension) || getExtractionFn() != null) {
        return null;
    }//from   w  w w  . j a  v  a  2s  .  c o m
    RangeSet<String> retSet = TreeRangeSet.create();
    for (String value : values) {
        String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
        if (valueEquivalent == null) {
            // Case when SQL compatible null handling is enabled
            // Range.singleton(null) is invalid, so use the fact that
            // only null values are less than empty string.
            retSet.add(Range.lessThan(""));
        } else {
            retSet.add(Range.singleton(valueEquivalent));
        }
    }
    return retSet;
}

From source file:com.cinchapi.concourse.server.concurrent.RangeLockService.java

/**
 * Return {@code true} if an attempt to used {@code token} for a
 * {@code type} lock is range blocked. Range blocking occurs when there is
 * another READ or WRITE happening such that allowing the proposed operation
 * to proceed could lead to inconsistent results (i.e. I want to write X but
 * there is a READ trying to find all values less than Y).
 * /* w w  w . j av  a  2s. c  om*/
 * @param type
 * @param token
 * @return {@code true} if range blocked
 */
protected final boolean isRangeBlocked(LockType type, RangeToken token) {
    Value value = token.getValues()[0];
    if (type == LockType.READ) {
        Preconditions.checkArgument(token.getOperator() != null);
        switch (token.getOperator()) {
        case EQUALS:
            return info.writes(token.getKey()).contains(value);
        case NOT_EQUALS:
            return info.writes(token.getKey()).size() > 1 || (info.writes(token.getKey()).size() == 1
                    && !info.writes(token.getKey()).contains(value));
        default:
            Iterator<Value> it = info.writes(token.getKey()).iterator();
            while (it.hasNext()) {
                Iterable<Range<Value>> ranges = RangeTokens.convertToRange(token);
                Value current = it.next();
                Range<Value> point = Range.singleton(current);
                for (Range<Value> range : ranges) {
                    RangeReadWriteLock lock = null;
                    if (range.isConnected(point) && !range.intersection(point).isEmpty()
                            && (lock = locks.get(RangeToken.forWriting(token.getKey(), current))) != null
                            && !lock.isWriteLockedByCurrentThread()) {
                        return true;
                    }
                }
            }
            return false;
        }
    } else {
        // If I want to WRITE X, I am blocked if there is a READ that
        // touches X (e.g. direct read for X or a range read that includes
        // X)
        return info.reads(token.getKey()).contains(value);

    }
}

From source file:edu.mit.streamjit.impl.compiler2.Storage.java

/**
 * Returns a set containing the indices live before the initialization
 * schedule; that is, the indices holding initial data. (Note that, as a
 * span, not every contained index will be occupied.) The returned range
 * will be/*from w ww. j a v  a  2s . com*/
 * {@link Range#canonical(com.google.common.collect.DiscreteDomain) canonical}.
 * The range is not cached so as to be responsive to changes in initial data
 * index functions.
 * @return a range spanning the indices holding initial data under the
 * current index functions
 * @see #initialDataIndices()
 */
public Range<Integer> initialDataIndexSpan() {
    Range<Integer> range = null;
    for (Pair<ImmutableList<Object>, IndexFunction> p : initialData())
        for (int i = 0; i < p.first.size(); ++i)
            try {
                int x = p.second.applyAsInt(i);
                range = (range == null) ? Range.singleton(x) : range.span(Range.singleton(x));
            } catch (Throwable ex) {
                throw new AssertionError("index functions should not throw", ex);
            }
    range = (range != null ? range : Range.closedOpen(0, 0));
    return range.canonical(DiscreteDomain.integers());
}

From source file:net.sf.mzmine.modules.visualization.spectra.ExportSpectraTask.java

/**
 * Export the chromatogram - mzML format
 *
 * @throws IOException//from  ww  w. ja va  2s.co  m
 *             if there are i/o problems.
 */

public void exportmzML() throws MSDKException {

    // Initialize objects
    DataPointStore store = DataPointStoreFactory.getMemoryDataStore();
    RawDataFile inputFile = MSDKObjectBuilder.getRawDataFile("MZmine2 mzML export", exportFile, FileType.MZML,
            store);

    // Get data from MZmine2 style scan
    Integer scanNum = scan.getScanNumber();
    Integer msLevel = scan.getMSLevel();
    DataPoint[] dp = scan.getDataPoints();
    PolarityType polarity = scan.getPolarity();
    Double precursorMZ = scan.getPrecursorMZ();
    String scanDefinition = scan.getScanDefinition();
    Integer precursorCharge = scan.getPrecursorCharge();

    // GUI progress bar updating
    progressMax = dp.length;

    // Initialize MSDK style Scan
    MsFunction dummyFunction = MSDKObjectBuilder.getMsFunction(msLevel);
    MsScan MSDKscan = MSDKObjectBuilder.getMsScan(store, scanNum, dummyFunction);

    // Iterate & convert from MZmine2 style to MSDK style
    double mzValues[] = new double[dp.length];
    float intensityValues[] = new float[dp.length];
    for (int i = 0; i < dp.length; i++) {
        mzValues[i] = dp[i].getMZ();
        intensityValues[i] = (float) dp[i].getIntensity();
        // GUI progress bar updating
        progress += 1;
    }

    // Put the data in the scan
    MSDKscan.setDataPoints(mzValues, intensityValues, mzValues.length);

    // Parse if data is profile vs centroid
    MassSpectrumType t = scan.getSpectrumType();
    if (t == MassSpectrumType.CENTROIDED)
        MSDKscan.setSpectrumType(MsSpectrumType.CENTROIDED);
    else
        MSDKscan.setSpectrumType(MsSpectrumType.PROFILE);

    // Parse polarity of data from mzMine2 style to MSDK style
    if (polarity.equals(PolarityType.POSITIVE))
        MSDKscan.setPolarity(io.github.msdk.datamodel.rawdata.PolarityType.POSITIVE);
    else if (polarity.equals(PolarityType.NEGATIVE))
        MSDKscan.setPolarity(io.github.msdk.datamodel.rawdata.PolarityType.NEGATIVE);
    else
        MSDKscan.setPolarity(io.github.msdk.datamodel.rawdata.PolarityType.UNKNOWN);

    // Parse precursor from mzMine2 style to MSDK style
    if (!precursorMZ.equals(0f)) {
        List<IsolationInfo> MSDKprecursor = MSDKscan.getIsolations();
        IsolationInfo MSDKisolationInfo = MSDKObjectBuilder.getIsolationInfo(Range.singleton(precursorMZ), null,
                precursorMZ, precursorCharge, null);
        MSDKprecursor.add(MSDKisolationInfo);
    }

    // Parse scanDefinition to MSDK style
    MSDKscan.setScanDefinition(scanDefinition);

    inputFile.addScan(MSDKscan);

    // Actually write to disk
    MzMLFileExportMethod method = new MzMLFileExportMethod(inputFile, exportFile);
    method.execute();
}

From source file:io.github.msdk.io.mzdata.MzDataSaxHandler.java

/**
 * {@inheritDoc}// www  .j a v a2  s. c o  m
 *
 * endElement()
 */
@SuppressWarnings("null")
public void endElement(String namespaceURI, String sName, String qName) throws SAXException {

    if (canceled)
        throw new SAXException("Parsing Cancelled");

    // <spectrumInstrument>
    if (qName.equalsIgnoreCase("spectrumInstrument")) {
        spectrumInstrumentFlag = false;
    }

    // <precursor>
    if (qName.equalsIgnoreCase("precursor")) {
        precursorFlag = false;
    }

    // <spectrum>
    if (qName.equalsIgnoreCase("spectrum")) {

        spectrumInstrumentFlag = false;

        // Auto-detect whether this scan is centroided
        MsSpectrumType spectrumType = SpectrumTypeDetectionAlgorithm.detectSpectrumType(mzBuffer,
                intensityBuffer, peaksCount);

        // Create a new scan
        MsFunction msFunction = MSDKObjectBuilder.getMsFunction(msLevel);

        MsScan newScan = MSDKObjectBuilder.getMsScan(dataStore, scanNumber, msFunction);

        newScan.setDataPoints(mzBuffer, intensityBuffer, peaksCount);
        newScan.setSpectrumType(spectrumType);
        newScan.setPolarity(polarity);

        if (retentionTime != null) {
            ChromatographyInfo chromInfo = MSDKObjectBuilder.getChromatographyInfo1D(SeparationType.UNKNOWN,
                    retentionTime);
            newScan.setChromatographyInfo(chromInfo);
        }

        if (precursorMz != null) {
            IsolationInfo isolation = MSDKObjectBuilder.getIsolationInfo(Range.singleton(precursorMz), null,
                    precursorMz, precursorCharge, null);
            newScan.getIsolations().add(isolation);
        }

        // Add the scan to the file
        newRawFile.addScan(newScan);
        parsedScans++;

    }

    // <mzArrayBinary>
    if (qName.equalsIgnoreCase("mzArrayBinary")) {

        mzArrayBinaryFlag = false;

        // Allocate space for the whole array
        if (mzBuffer.length < peaksCount)
            mzBuffer = new double[peaksCount * 2];

        byte[] peakBytes = Base64.decodeBase64(charBuffer.toString().getBytes());

        ByteBuffer currentMzBytes = ByteBuffer.wrap(peakBytes);

        if (endian.equals("big")) {
            currentMzBytes = currentMzBytes.order(ByteOrder.BIG_ENDIAN);
        } else {
            currentMzBytes = currentMzBytes.order(ByteOrder.LITTLE_ENDIAN);
        }

        for (int i = 0; i < peaksCount; i++) {
            if (precision == null || precision.equals("32"))
                mzBuffer[i] = (double) currentMzBytes.getFloat();
            else
                mzBuffer[i] = currentMzBytes.getDouble();
        }

    }

    // <intenArrayBinary>
    if (qName.equalsIgnoreCase("intenArrayBinary")) {

        intenArrayBinaryFlag = false;

        // Allocate space for the whole array
        if (intensityBuffer.length < peaksCount)
            intensityBuffer = new float[peaksCount * 2];

        byte[] peakBytes = Base64.decodeBase64(charBuffer.toString().getBytes());

        ByteBuffer currentIntensityBytes = ByteBuffer.wrap(peakBytes);

        if (endian.equals("big")) {
            currentIntensityBytes = currentIntensityBytes.order(ByteOrder.BIG_ENDIAN);
        } else {
            currentIntensityBytes = currentIntensityBytes.order(ByteOrder.LITTLE_ENDIAN);
        }

        for (int i = 0; i < peaksCount; i++) {
            if (precision == null || precision.equals("32"))
                intensityBuffer[i] = currentIntensityBytes.getFloat();
            else
                intensityBuffer[i] = (float) currentIntensityBytes.getDouble();
        }
    }
}

From source file:net.sf.mzmine.datamodel.impl.SimplePeakList.java

private void updateMaxIntensity() {
    maxDataPointIntensity = 0;//from  w  w w  .j  a  va  2s .  co  m
    mzRange = null;
    rtRange = null;
    for (PeakListRow peakListRow : peakListRows) {
        if (peakListRow.getDataPointMaxIntensity() > maxDataPointIntensity)
            maxDataPointIntensity = peakListRow.getDataPointMaxIntensity();

        if (mzRange == null) {
            mzRange = Range.singleton(peakListRow.getAverageMZ());
            rtRange = Range.singleton(peakListRow.getAverageRT());
        } else {
            mzRange = mzRange.span(Range.singleton(peakListRow.getAverageMZ()));
            rtRange = rtRange.span(Range.singleton(peakListRow.getAverageRT()));
        }
    }
}