Example usage for com.google.common.collect Range contains

List of usage examples for com.google.common.collect Range contains

Introduction

In this page you can find the example usage for com.google.common.collect Range contains.

Prototype

public boolean contains(C value) 

Source Link

Document

Returns true if value is within the bounds of this range.

Usage

From source file:com.wealdtech.collect.TreeRangedMap.java

@Override
public void put(final Range<K> key, final V value) {
    validateRange(key);//from www  .j a  v  a 2  s  .c  o  m
    K resultantStart = key.lowerEndpoint();
    K resultantEnd = key.upperEndpoint();

    // Truncate or coalesce anything which overlaps the start of our new entry
    final Map.Entry<K, TwoTuple<Range<K>, V>> prior = getEntry(key.lowerEndpoint());
    if (prior != null) {
        if (prior.getValue().getT().equals(value)) {
            // Values are the same so we can coalesce.
            if (resultantEnd.compareTo(prior.getValue().getS().upperEndpoint()) < 0) {
                // Existing entry already covers this; we don't have to do anything more
                return;
            }
            underlying.remove(prior.getKey());
            // Set our start to the start of the prior entry
            resultantStart = prior.getKey();
        } else {
            // Values are different; truncate prior item
            underlying.put(prior.getKey(),
                    new TwoTuple<>(Range.closedOpen(prior.getKey(), resultantStart), prior.getValue().getT()));
            // If the prior entry stretches beyond the new entry we also need to put in our remaining item
            if (resultantEnd.compareTo(prior.getValue().getS().upperEndpoint()) < 0) {
                underlying.put(resultantEnd,
                        new TwoTuple<>(Range.closedOpen(resultantEnd, prior.getValue().getS().upperEndpoint()),
                                prior.getValue().getT()));
            }

        }
    }

    // Remove any items which are covered by our new entry, and truncate or coalesce anything which overlaps the end of it
    Map.Entry<K, TwoTuple<Range<K>, V>> potentialVictim = underlying.ceilingEntry(resultantStart);
    while (potentialVictim != null) {
        if (key.encloses(potentialVictim.getValue().getS())) {
            // Totally enclosed; remove it
            underlying.remove(potentialVictim.getKey());
            potentialVictim = underlying.ceilingEntry(resultantStart);
        } else if (key.contains(potentialVictim.getKey())) {
            // Partial overlap
            if (potentialVictim.getValue().getT().equals(value)) {
                // Values are the same so we can coalesce.  Remove the entry and update our bounds accordingly
                resultantEnd = potentialVictim.getValue().getS().upperEndpoint();
                underlying.remove(potentialVictim.getKey());
            } else {
                // Values are different; truncate victim item
                underlying.remove(potentialVictim.getKey());
                underlying.put(resultantEnd,
                        new TwoTuple<>(
                                Range.closedOpen(resultantEnd,
                                        potentialVictim.getValue().getS().upperEndpoint()),
                                potentialVictim.getValue().getT()));
            }
            potentialVictim = null;
        } else {
            // No relationship
            potentialVictim = null;
        }
    }

    // Write out our final result
    underlying.put(resultantStart, new TwoTuple<>(Range.closedOpen(resultantStart, resultantEnd), value));
}

From source file:com.b2international.snowowl.snomed.core.ecl.SnomedEclRefinementEvaluator.java

/**
 * Evaluates partial results coming from a binary operator's left and right side within attribute group based refinements.
 * @param groupCardinality - the cardinality to check
 * @param groupOperator - the operator to use (AND or OR, aka {@link Sets#intersection(Set, Set)} or {@link Sets#union(Set, Set)})
 * @return a function that will can be chained via {@link Promise#then(Function)} to evaluate partial results when they are available
 *//*from w  w w  .  ja va 2s. co m*/
private Function<List<Object>, Collection<Property>> evalParts(final Range<Long> groupCardinality,
        BinaryOperator<Set<Integer>> groupOperator) {
    return input -> {
        final Collection<Property> left = (Collection<Property>) input.get(0);
        final Collection<Property> right = (Collection<Property>) input.get(1);

        final Collection<Property> matchingAttributes = newHashSet();

        // group left and right side by source ID
        final Multimap<String, Property> leftRelationshipsBySource = Multimaps.index(left,
                Property::getObjectId);
        final Multimap<String, Property> rightRelationshipsBySource = Multimaps.index(right,
                Property::getObjectId);

        // check that each ID has the required number of groups with left and right relationships
        for (String sourceConcept : Iterables.concat(leftRelationshipsBySource.keySet(),
                rightRelationshipsBySource.keySet())) {
            final Multimap<Integer, Property> validGroups = ArrayListMultimap.create();

            final Collection<Property> leftSourceRelationships = leftRelationshipsBySource.get(sourceConcept);
            final Collection<Property> rightSourceRelationships = rightRelationshipsBySource.get(sourceConcept);

            final Multimap<Integer, Property> leftRelationshipsByGroup = Multimaps
                    .index(leftSourceRelationships, Property::getGroup);
            final Multimap<Integer, Property> rightRelationshipsByGroup = Multimaps
                    .index(rightSourceRelationships, Property::getGroup);

            for (Integer group : groupOperator.apply(leftRelationshipsByGroup.keySet(),
                    rightRelationshipsByGroup.keySet())) {
                validGroups.get(group).addAll(leftRelationshipsByGroup.get(group));
                validGroups.get(group).addAll(rightRelationshipsByGroup.get(group));
            }

            if (groupCardinality.contains((long) validGroups.keySet().size())) {
                matchingAttributes.addAll(validGroups.values());
            }
        }
        return matchingAttributes;
    };
}

From source file:net.sf.mzmine.modules.peaklistmethods.peakpicking.deconvolution.minimumsearch.MinimumSearchPeakDetector.java

@Override
public Feature[] resolvePeaks(final Feature chromatogram, final int[] scanNumbers,
        final double[] retentionTimes, final double[] intensities, ParameterSet parameters,
        RSessionWrapper rSession) {/*from w  w w. ja  v a 2 s.  c  om*/

    final int scanCount = scanNumbers.length;
    final int lastScan = scanCount - 1;

    assert scanCount > 0;

    final Range<Double> peakDuration = parameters.getParameter(PEAK_DURATION).getValue();
    final double searchRTRange = parameters.getParameter(SEARCH_RT_RANGE).getValue();
    final double minRatio = parameters.getParameter(MIN_RATIO).getValue();
    final double minHeight = Math.max(parameters.getParameter(MIN_ABSOLUTE_HEIGHT).getValue(),
            parameters.getParameter(MIN_RELATIVE_HEIGHT).getValue() * chromatogram.getHeight());

    final List<ResolvedPeak> resolvedPeaks = new ArrayList<ResolvedPeak>(2);

    // First, remove all data points below chromatographic threshold.
    final double chromatographicThresholdLevel = MathUtils.calcQuantile(intensities,
            parameters.getParameter(CHROMATOGRAPHIC_THRESHOLD_LEVEL).getValue());
    for (int i = 0; i < intensities.length; i++) {

        if (intensities[i] < chromatographicThresholdLevel) {

            intensities[i] = 0.0;
        }
    }

    // Current region is a region between two minima, representing a
    // candidate for a resolved peak.
    startSearch: for (int currentRegionStart = 0; currentRegionStart < lastScan - 2; currentRegionStart++) {

        // Find at least two consecutive non-zero data points
        if (intensities[currentRegionStart] != 0.0 && intensities[currentRegionStart + 1] != 0.0) {

            double currentRegionHeight = intensities[currentRegionStart];

            endSearch: for (int currentRegionEnd = currentRegionStart
                    + 1; currentRegionEnd < scanCount; currentRegionEnd++) {

                // Update height of current region.
                currentRegionHeight = Math.max(currentRegionHeight, intensities[currentRegionEnd]);

                // If we reached the end, or if the next intensity is 0, we
                // have to stop here.
                if (currentRegionEnd == lastScan || intensities[currentRegionEnd + 1] == 0.0) {

                    // Find the intensity at the sides (lowest data points).
                    final double peakMinLeft = intensities[currentRegionStart];
                    final double peakMinRight = intensities[currentRegionEnd];

                    // Check the shape of the peak.
                    if (currentRegionHeight >= minHeight && currentRegionHeight >= peakMinLeft * minRatio
                            && currentRegionHeight >= peakMinRight * minRatio && peakDuration.contains(
                                    retentionTimes[currentRegionEnd] - retentionTimes[currentRegionStart])) {

                        resolvedPeaks.add(new ResolvedPeak(chromatogram, currentRegionStart, currentRegionEnd));
                    }

                    // Set the next region start to current region end - 1
                    // because it will be immediately
                    // increased +1 as we continue the for-cycle.
                    currentRegionStart = currentRegionEnd - 1;
                    continue startSearch;
                }

                // Minimum duration of peak must be at least searchRTRange.
                if (retentionTimes[currentRegionEnd] - retentionTimes[currentRegionStart] >= searchRTRange) {

                    // Set the RT range to check
                    final Range<Double> checkRange = Range.closed(
                            retentionTimes[currentRegionEnd] - searchRTRange,
                            retentionTimes[currentRegionEnd] + searchRTRange);

                    // Search if there is lower data point on the left from
                    // current peak i.
                    for (int i = currentRegionEnd - 1; i > 0 && checkRange.contains(retentionTimes[i]); i--) {

                        if (intensities[i] < intensities[currentRegionEnd]) {

                            continue endSearch;
                        }
                    }

                    // Search on the right from current peak i.
                    for (int i = currentRegionEnd + 1; i < scanCount
                            && checkRange.contains(retentionTimes[i]); i++) {

                        if (intensities[i] < intensities[currentRegionEnd]) {

                            continue endSearch;
                        }
                    }

                    // Find the intensity at the sides (lowest data points).
                    final double peakMinLeft = intensities[currentRegionStart];
                    final double peakMinRight = intensities[currentRegionEnd];

                    // If we have reached a minimum which is non-zero, but
                    // the peak shape would not fulfill the
                    // ratio condition, continue searching for next minimum.
                    if (currentRegionHeight >= peakMinRight * minRatio) {

                        // Check the shape of the peak.
                        if (currentRegionHeight >= minHeight && currentRegionHeight >= peakMinLeft * minRatio
                                && currentRegionHeight >= peakMinRight * minRatio
                                && peakDuration.contains(retentionTimes[currentRegionEnd]
                                        - retentionTimes[currentRegionStart])) {

                            resolvedPeaks
                                    .add(new ResolvedPeak(chromatogram, currentRegionStart, currentRegionEnd));
                        }

                        // Set the next region start to current region end-1
                        // because it will be immediately
                        // increased +1 as we continue the for-cycle.
                        currentRegionStart = currentRegionEnd - 1;
                        continue startSearch;
                    }
                }
            }
        }
    }

    return resolvedPeaks.toArray(new Feature[resolvedPeaks.size()]);
}

From source file:net.sf.mzmine.modules.peaklistmethods.filtering.peakcomparisonrowfilter.PeakComparisonRowFilterTask.java

/**
 * Filter the peak list rows by comparing peaks within a row.
 *
 * @param peakList//from w  w w  .j a v  a 2  s.  c om
 *            peak list to filter.
 * @return a new peak list with rows of the original peak list that pass the
 *         filtering.
 */
private PeakList filterPeakListRows(final PeakList peakList) {

    // Create new peak list.
    final PeakList newPeakList = new SimplePeakList(
            peakList.getName() + ' '
                    + parameters.getParameter(PeakComparisonRowFilterParameters.SUFFIX).getValue(),
            peakList.getRawDataFiles());

    // Copy previous applied methods.
    for (final PeakListAppliedMethod method : peakList.getAppliedMethods()) {

        newPeakList.addDescriptionOfAppliedTask(method);
    }

    // Add task description to peakList.
    newPeakList.addDescriptionOfAppliedTask(new SimplePeakListAppliedMethod(getTaskDescription(), parameters));

    // Get parameters.
    final boolean evalutateFoldChange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE)
            .getValue();
    final boolean evalutatePPMdiff = parameters.getParameter(PeakComparisonRowFilterParameters.MZ_PPM_DIFF)
            .getValue();
    final boolean evalutateRTdiff = parameters.getParameter(PeakComparisonRowFilterParameters.RT_DIFF)
            .getValue();
    final int columnIndex1 = parameters.getParameter(PeakComparisonRowFilterParameters.COLUMN_INDEX_1)
            .getValue();
    final int columnIndex2 = parameters.getParameter(PeakComparisonRowFilterParameters.COLUMN_INDEX_2)
            .getValue();
    final Range<Double> foldChangeRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE)
            .getEmbeddedParameter().getValue();
    final Range<Double> ppmDiffRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE)
            .getEmbeddedParameter().getValue();
    final Range<Double> rtDiffRange = parameters.getParameter(PeakComparisonRowFilterParameters.FOLD_CHANGE)
            .getEmbeddedParameter().getValue();

    // Setup variables
    final PeakListRow[] rows = peakList.getRows();
    RawDataFile rawDataFile1;
    RawDataFile rawDataFile2;
    Feature peak1;
    Feature peak2;
    totalRows = rows.length;
    final RawDataFile[] rawDataFiles = peakList.getRawDataFiles();

    boolean allCriteriaMatched = true;

    // Error handling. User tried to select a column from the peaklist that
    // doesn't exist.
    if (columnIndex1 > rawDataFiles.length) {
        setErrorMessage("Column 1 set too large.");
        setStatus(TaskStatus.ERROR);
        return null;
    }
    if (columnIndex2 > rawDataFiles.length) {
        setErrorMessage("Column 2 set too large.");
        setStatus(TaskStatus.ERROR);
        return null;
    }

    // Loop over the rows & filter
    for (processedRows = 0; !isCanceled() && processedRows < totalRows; processedRows++) {

        if (isCanceled())
            return null;

        allCriteriaMatched = true;

        double peak1Area = 1.0; // Default value in case of null peak
        double peak2Area = 1.0;
        double peak1MZ = -1.0;
        double peak2MZ = -1.0;
        double peak1RT = -1.0;
        double peak2RT = -1.0;
        double foldChange = 0.0;
        double ppmDiff = 0.0;
        double rtDiff = 0.0;
        final PeakListRow row = rows[processedRows];
        rawDataFile1 = rawDataFiles[columnIndex1];
        rawDataFile2 = rawDataFiles[columnIndex2];

        peak1 = row.getPeak(rawDataFile1);
        peak2 = row.getPeak(rawDataFile2);

        if (peak1 != null) {
            peak1Area = peak1.getArea();
            peak1MZ = peak1.getMZ();
            peak1RT = peak1.getRT();
        }

        if (peak2 != null) {
            peak2Area = peak2.getArea();
            peak2MZ = peak2.getMZ();
            peak2RT = peak2.getRT();
        }

        // Fold change criteria checking.
        if (evalutateFoldChange) {
            foldChange = Math.log(peak1Area / peak2Area) / Math.log(2);
            if (!foldChangeRange.contains(foldChange))
                allCriteriaMatched = false;

            // PPM difference evaluation
            if (evalutatePPMdiff) {
                ppmDiff = (peak1MZ - peak2MZ) / peak1MZ * 1E6;
                if (!ppmDiffRange.contains(ppmDiff))
                    allCriteriaMatched = false;
            }

            // RT difference evaluation
            if (evalutateRTdiff) {
                rtDiff = peak1RT - peak2RT;
                if (!rtDiffRange.contains(rtDiff))
                    allCriteriaMatched = false;
            }

        }

        // Good row?
        if (allCriteriaMatched)
            newPeakList.addRow(copyPeakRow(row));

    }

    return newPeakList;
}