Example usage for com.google.common.collect Multiset add

List of usage examples for com.google.common.collect Multiset add

Introduction

In this page you can find the example usage for com.google.common.collect Multiset add.

Prototype

int add(@Nullable E element, int occurrences);

Source Link

Document

Adds a number of occurrences of an element to this multiset.

Usage

From source file:com.b2international.snowowl.snomed.reasoner.server.diff.SourceConceptNamespaceAndModuleAssigner.java

@Override
public void allocateRelationshipIdsAndModules(Multiset<String> conceptIds,
        final SnomedEditingContext editingContext) {
    Multiset<String> reservedIdsByNamespace = HashMultiset.create();
    for (Multiset.Entry<String> conceptIdWithCount : conceptIds.entrySet()) {
        String namespace = SnomedIdentifiers.getNamespace(conceptIdWithCount.getElement());
        reservedIdsByNamespace.add(namespace, conceptIdWithCount.getCount());
    }/*from  w  w w.  ja va  2  s .  c om*/

    ISnomedIdentifierService identifierService = getServiceForClass(ISnomedIdentifierService.class);
    for (Multiset.Entry<String> namespaceWithCount : reservedIdsByNamespace.entrySet()) {
        Collection<String> reservedIds = identifierService.reserve(namespaceWithCount.getElement(),
                ComponentCategory.RELATIONSHIP, namespaceWithCount.getCount());
        this.reservedIds.addAll(reservedIds);
        namespaceToRelationshipIdMap.put(namespaceWithCount.getElement(), reservedIds.iterator());
    }

    for (String conceptId : conceptIds.elementSet()) {
        Concept concept = editingContext.lookup(conceptId, Concept.class);
        conceptIdToRelationshipModuleMap.put(conceptId, concept.getModule());
    }
}

From source file:org.sonar.plugins.core.issue.IssueCountersDecorator.java

private void saveIssuesPerRules(DecoratorContext context, RulePriority severity,
        Map<RulePriority, Multiset<Rule>> rulesPerSeverity) {
    Metric metric = SeverityUtils.severityToIssueMetric(severity);

    Collection<Measure> children = context.getChildrenMeasures(MeasuresFilters.rules(metric));
    for (Measure child : children) {
        RuleMeasure childRuleMeasure = (RuleMeasure) child;
        Rule rule = childRuleMeasure.getRule();
        if (rule != null && MeasureUtils.hasValue(childRuleMeasure)) {
            Multiset<Rule> rulesBag = initRules(rulesPerSeverity, severity);
            rulesBag.add(rule, childRuleMeasure.getIntValue());
        }//from   w ww .j a v a2s . co  m
    }

    Multiset<Rule> rulesBag = rulesPerSeverity.get(severity);
    if (rulesBag != null) {
        for (Multiset.Entry<Rule> entry : rulesBag.entrySet()) {
            RuleMeasure measure = RuleMeasure.createForRule(metric, entry.getElement(),
                    (double) entry.getCount());
            measure.setSeverity(severity);
            context.saveMeasure(measure);
        }
    }
}

From source file:org.onebusaway.nyc.vehicle_tracking.impl.inference.CategoricalDist.java

@SuppressWarnings("unchecked")
public Multiset<T> sample(int samples) {

    Preconditions.checkArgument(samples > 0);
    Preconditions.checkState(!_entriesToLogProbs.isEmpty(), "No entries in the CDF");
    Preconditions.checkState(!Double.isInfinite(_logCumulativeProb), "No cumulative probability in CDF");

    final Multiset<T> sampled = HashMultiset.create(samples);
    if (_entriesToLogProbs.size() == 1) {
        sampled.add(Iterables.getOnlyElement(_entriesToLogProbs.keySet()), samples);
    } else {// ww  w .  j a v a 2 s. c  o  m

        if (_emd == null) {
            initializeDistribution();
        }

        _emd.setNumTrials(samples);
        final Vector sampleRes = _emd.sample(threadLocalRng.get());

        int i = 0;
        for (final VectorEntry ventry : sampleRes) {
            if (ventry.getValue() > 0.0)
                sampled.add((T) _entries[_objIdx.get(i)], (int) ventry.getValue());
            i++;
        }
    }

    return sampled;
}

From source file:org.sonar.plugins.issuesdensity.batch.WeightedIssuesDecorator.java

@Override
public void decorate(Resource resource, DecoratorContext context) {
    double value = 0.0;
    Multiset<String> distribution = LinkedHashMultiset.create();

    for (String severity : Severity.ALL) {
        Measure measure = context.getMeasure(severityToIssueMetric(severity));
        if (measure != null && MeasureUtils.hasValue(measure)) {
            distribution.add(severity, measure.getIntValue());
            double add = weightsBySeverity.get(severity) * measure.getIntValue();
            value += add;/*ww  w.  j  a va  2s .  c o  m*/
        }
    }

    String distributionFormatted = KeyValueFormat.format(distribution);
    // SONAR-4987 We should store an empty string for the distribution value
    Measure measure = new Measure(IssuesDensityMetrics.WEIGHTED_ISSUES, value,
            Strings.emptyToNull(distributionFormatted));
    context.saveMeasure(measure);
}

From source file:org.sonar.plugins.core.issue.WeightedIssuesDecorator.java

void decorate(DecoratorContext context) {
    double debt = 0.0;
    Multiset<RulePriority> distribution = TreeMultiset.create();

    for (RulePriority severity : RulePriority.values()) {
        Measure measure = context.getMeasure(SeverityUtils.severityToIssueMetric(severity));
        if (measure != null && MeasureUtils.hasValue(measure)) {
            distribution.add(severity, measure.getIntValue());
            double add = weightsBySeverity.get(severity) * measure.getIntValue();
            debt += add;//w w w .j a va  2s .com
        }
    }

    Measure debtMeasure = new Measure(CoreMetrics.WEIGHTED_VIOLATIONS, debt,
            KeyValueFormat.format(distribution));
    context.saveMeasure(debtMeasure);
}

From source file:org.sonar.plugins.core.sensors.WeightedViolationsDecorator.java

void decorate(DecoratorContext context) {
    double debt = 0.0;
    Multiset<RulePriority> distribution = TreeMultiset.create();

    for (RulePriority severity : RulePriority.values()) {
        Measure measure = context.getMeasure(SeverityUtils.severityToViolationMetric(severity));
        if (measure != null && MeasureUtils.hasValue(measure)) {
            distribution.add(severity, measure.getIntValue());
            double add = weightsBySeverity.get(severity) * measure.getIntValue();
            debt += add;/*from  w  w  w  . jav a  2s. c o  m*/
        }
    }

    Measure debtMeasure = new Measure(CoreMetrics.WEIGHTED_VIOLATIONS, debt,
            KeyValueFormat.format(distribution));
    context.saveMeasure(debtMeasure);
}

From source file:pl.polzone.classifier.Classifier.java

public String predict(java.util.List<String> words) {
    final Multiset<String> scores = HashMultiset.create();
    for (String word : words) {
        word = stem(word);//  w  w  w. j  a va 2 s  .  co  m
        if (wordCount.getCount(word) > feedCount / 2)
            continue;
        if (occurences.containsKey(word))
            for (Object category : occurences.get(word).uniqueSet())
                scores.add((String) category,
                        occurences.get(word).getCount(category) + (feedCount - wordCount.getCount(word)));
    }

    if (scores.isEmpty())
        return null;

    Iterator<Entry<String>> sorted = Multisets.copyHighestCountFirst(scores).entrySet().iterator();
    String highest = sorted.next().getElement();
    if (sorted.hasNext()) {
        String runnerUp = sorted.next().getElement();
        if (scores.count(highest) > scores.count(runnerUp) * 2)
            feed(highest, words);
    }
    return highest;
}

From source file:com.continuuity.loom.layout.change.AddServicesChange.java

@Override
public ClusterLayout applyChange(ClusterLayout originalLayout) {
    Multiset<NodeLayout> newLayout = HashMultiset.create(originalLayout.getLayout());
    for (Multiset.Entry<NodeLayout> entry : countsPerNodeLayout.entrySet()) {
        NodeLayout originalNodeLayout = entry.getElement();
        NodeLayout expandedNodeLayout = NodeLayout.addServicesToNodeLayout(originalNodeLayout, services);
        // add the service count times
        newLayout.add(expandedNodeLayout, entry.getCount());
        // subtract count nodes from the original node layout since that many have now been expanded.
        newLayout.setCount(originalNodeLayout,
                originalLayout.getLayout().count(originalNodeLayout) - entry.getCount());
    }//from   www .  jav a 2  s  .  c  o  m
    return new ClusterLayout(originalLayout.getConstraints(), newLayout);
}

From source file:com.continuuity.loom.layout.change.AddServiceChangeIterator.java

public AddServiceChangeIterator(ClusterLayout clusterLayout, String service) {
    this.service = service;
    // cluster services are needed in order to prune the constraints to only use ones that pertain to services
    // on the cluster
    Set<String> expandedClusterServices = Sets.newHashSet(service);
    for (NodeLayout nodeLayout : clusterLayout.getLayout().elementSet()) {
        expandedClusterServices.addAll(nodeLayout.getServiceNames());
    }/*from   w  w  w. j ava  2  s  .  c om*/
    // first figure out which node layouts can add this service
    this.expandableNodeLayouts = Lists.newArrayListWithCapacity(clusterLayout.getLayout().elementSet().size());
    Multiset<NodeLayout> expandedCounts = HashMultiset.create();
    for (NodeLayout originalNodeLayout : clusterLayout.getLayout().elementSet()) {
        NodeLayout expandedNodeLayout = NodeLayout.addServiceToNodeLayout(originalNodeLayout, service);
        if (expandedNodeLayout.satisfiesConstraints(clusterLayout.getConstraints(), expandedClusterServices)) {
            expandableNodeLayouts.add(originalNodeLayout);
            expandedCounts.add(originalNodeLayout, clusterLayout.getLayout().count(originalNodeLayout));
        }
    }
    // sort expandable node layouts by preference order
    Collections.sort(this.expandableNodeLayouts, new NodeLayoutComparator(null, null));
    // need to pass this to the slotted iterator so we don't try and add the service to a node layout more times
    // than there are nodes for the node layout.
    this.nodeLayoutMaxCounts = new int[expandableNodeLayouts.size()];
    for (int i = 0; i < nodeLayoutMaxCounts.length; i++) {
        nodeLayoutMaxCounts[i] = expandedCounts.count(expandableNodeLayouts.get(i));
    }
    // figure out the max number of nodes we can add the service to. Start off by saying we can add it to all nodes.
    this.nodesToAddTo = expandedCounts.size();
    // we always need to add the service to at least one node.
    this.minNodesToAddTo = 1;
    ServiceConstraint serviceConstraint = clusterLayout.getConstraints().getServiceConstraints().get(service);
    // if there is a max constraint on this service and its less than the number of nodes in the cluster, start
    // there instead. Similarly, if there is a min constraint on this service higher than 1, use that instead.
    if (serviceConstraint != null) {
        this.nodesToAddTo = Math.min(serviceConstraint.getMaxCount(), this.nodesToAddTo);
        this.minNodesToAddTo = Math.max(serviceConstraint.getMinCount(), this.minNodesToAddTo);
    }
    this.nodeLayoutCountIterator = (this.nodesToAddTo < 1) ? null
            : new SlottedCombinationIterator(expandableNodeLayouts.size(), nodesToAddTo, nodeLayoutMaxCounts);
}

From source file:com.davidsoergel.stats.DistributionXYSeries.java

public BinnedXYSeries binXQuantiles(int numQuantiles) {
    try {//from  ww  w . j  av a  2  s.c o  m
        BinnedXYSeries result = new BinnedXYSeries(this);

        Multiset<Double> allXValues = HashMultiset.create();
        for (Map.Entry<Double, Multiset<Double>> entry : yValsPerX.entrySet()) {
            allXValues.add(entry.getKey(), entry.getValue().size());
        }

        EqualWeightHistogram1D theBaseHistogram = new EqualWeightHistogram1D(numQuantiles,
                DSArrayUtils.toPrimitiveDoubleArray(allXValues));

        int numBins = theBaseHistogram.getBins();
        for (int i = 0; i < numBins; i++) {
            //   double bottom = theBaseHistogram.bottomOfBin(i);
            //   double top = theBaseHistogram.topOfBin(i);
            double center = theBaseHistogram.centerOfBin(i);
            double halfBinWidth = theBaseHistogram.halfWidthOfBin(i);
            result.addBin(center, halfBinWidth);
            /*      for (Double x : keys.tailSet(bottom).headSet(top))
               {
               result.addPoints(center, yValsPerX.get(x));  // forget the original x value; collapse all the points to the bin center
               }*/
        }
        return result;
    } catch (DistributionException e) {
        logger.error("Error", e);
        throw new Error(e);
    } catch (StatsException e) {
        logger.error("Error", e);
        throw new Error(e);
    }
}