Example usage for com.google.common.collect Multiset count

List of usage examples for com.google.common.collect Multiset count

Introduction

In this page you can find the example usage for com.google.common.collect Multiset count.

Prototype

int count(@Nullable Object element);

Source Link

Document

Returns the number of occurrences of an element in this multiset (the count of the element).

Usage

From source file:net.timendum.pdf.StatisticParser.java

private float findMax(Multiset<Float> multiset) {
    float actual = 0f;
    int max = -1;
    for (Float k : multiset) {
        int count = multiset.count(k);
        if (count > max) {
            max = count;/*from   w  ww .ja v a  2 s .  co  m*/
            actual = k;

        }
    }
    return actual;
}

From source file:com.github.tomakehurst.wiremock.recording.SnapshotStubMappingPostProcessor.java

public List<StubMapping> process(Iterable<StubMapping> stubMappings) {
    final Multiset<RequestPattern> requestCounts = HashMultiset.create();
    final List<StubMapping> processedStubMappings = new ArrayList<>();

    for (StubMapping stubMapping : stubMappings) {
        requestCounts.add(stubMapping.getRequest());

        // Skip duplicate requests if shouldRecordRepeatsAsScenarios is not enabled
        if (requestCounts.count(stubMapping.getRequest()) > 1 && !shouldRecordRepeatsAsScenarios) {
            continue;
        }//  www .  j  av  a  2  s  .co  m

        if (bodyExtractMatcher != null && bodyExtractMatcher.match(stubMapping.getResponse()).isExactMatch()) {
            bodyExtractor.extractInPlace(stubMapping);
        }

        processedStubMappings.add(stubMapping);
    }

    if (shouldRecordRepeatsAsScenarios) {
        new ScenarioProcessor().putRepeatedRequestsInScenarios(processedStubMappings);
    }

    // Run any stub mapping transformer extensions
    return Lists.transform(processedStubMappings, transformerRunner);
}

From source file:com.github.fhirschmann.clozegen.lib.generators.CollocationGapGenerator.java

@Override
public Optional<Gap> generate(final int count) {
    checkNotNull(model);//from   w w w.j a v  a2s  .  c o m
    Gap gap = new Gap();
    gap.addValidAnswers(triplet.getValue1());

    // Collect a list of possible candidates for this gap
    final Multiset<String> candidates = ConcurrentHashMultiset.create(MultisetUtils.mergeMultiSets(
            model.getTails().get(triplet.getValue2()), model.getHeads().get(triplet.getValue0())));

    // Remove the correct answer from the candidate set
    candidates.remove(triplet.getValue1(), candidates.count(triplet.getValue1()));

    // Remove candidates p* which appear in the context (A, p*, B)
    for (Entry<String> entry : candidates.entrySet()) {
        if (model.getMultiset().contains(
                MiscUtils.WS_JOINER.join(triplet.getValue0(), entry.getElement(), triplet.getValue2()))) {
            candidates.remove(entry.getElement(), entry.getCount());
        }
    }

    if (candidates.elementSet().size() > count - 2) {
        final Set<String> invalidAnswers = Sets
                .newHashSet(MultisetUtils.sortedElementList(candidates, count - 1));
        gap.addInvalidAnswers(invalidAnswers);
        return Optional.of(gap);
    } else {
        return Optional.absent();
    }
}

From source file:org.sosy_lab.cpachecker.util.predicates.pathformula.pointeraliasing.TypeHandlerWithPointerAliasing.java

/**
 * The method is used to speed up member offset computation for declared composite types.
 * @param compositeType//from w w w  . j  a  v a  2 s  .  c o m
 * @param memberName
 * @return
 */
int getOffset(CCompositeType compositeType, final String memberName) {
    compositeType = (CCompositeType) CTypeUtils.simplifyType(compositeType);
    assert compositeType.getKind() != ComplexTypeKind.ENUM : "Enums are not composite: " + compositeType;
    Multiset<String> multiset = offsets.get(compositeType);
    if (multiset == null) {
        addCompositeTypeToCache(compositeType);
        multiset = offsets.get(compositeType);
        assert multiset != null : "Failed adding composite type to cache: " + compositeType;
    }
    return multiset.count(memberName);
}

From source file:org.simmetrics.metrics.CosineSimilarity.java

@Override
public float compare(Multiset<T> a, Multiset<T> b) {

    if (a.isEmpty() && b.isEmpty()) {
        return 1.0f;
    }//w  ww .j  ava  2  s. com

    if (a.isEmpty() || b.isEmpty()) {
        return 0.0f;
    }

    float dotProduct = 0;
    float magnitudeA = 0;
    float magnitudeB = 0;

    for (T entry : union(a, b).elementSet()) {
        float aCount = a.count(entry);
        float bCount = b.count(entry);

        dotProduct += aCount * bCount;
        magnitudeA += aCount * aCount;
        magnitudeB += bCount * bCount;
    }

    //  ab / (||a|| * ||b||)
    return (float) (dotProduct / (sqrt(magnitudeA) * sqrt(magnitudeB)));
}

From source file:org.sonar.server.computation.issue.IssueCounter.java

private void addMeasuresByPeriod(Component component) {
    if (!periodsHolder.getPeriods().isEmpty()) {
        Double[] unresolvedVariations = new Double[PeriodsHolder.MAX_NUMBER_OF_PERIODS];
        for (Period period : periodsHolder.getPeriods()) {
            unresolvedVariations[period.getIndex()
                    - 1] = (double) currentCounters.counterForPeriod(period.getIndex()).unresolved;
        }/*from w  ww . ja v  a 2  s. com*/
        measureRepository.add(component, metricRepository.getByKey(NEW_VIOLATIONS_KEY),
                Measure.newMeasureBuilder().setVariations(new MeasureVariations(unresolvedVariations))
                        .createNoValue());

        for (Map.Entry<String, String> entry : SEVERITY_TO_NEW_METRIC_KEY.entrySet()) {
            String severity = entry.getKey();
            String metricKey = entry.getValue();
            Double[] variations = new Double[PeriodsHolder.MAX_NUMBER_OF_PERIODS];
            for (Period period : periodsHolder.getPeriods()) {
                Multiset<String> bag = currentCounters.counterForPeriod(period.getIndex()).severityBag;
                variations[period.getIndex() - 1] = (double) bag.count(severity);
            }
            Metric metric = metricRepository.getByKey(metricKey);
            measureRepository.add(component, metric, Measure.newMeasureBuilder()
                    .setVariations(new MeasureVariations(variations)).createNoValue());
        }

        // waiting for Java 8 lambda in order to factor this loop with the previous one
        // (see call currentCounters.counterForPeriod(period.getIndex()).xxx with xxx as severityBag or typeBag)
        for (Map.Entry<RuleType, String> entry : TYPE_TO_NEW_METRIC_KEY.entrySet()) {
            RuleType type = entry.getKey();
            String metricKey = entry.getValue();
            Double[] variations = new Double[PeriodsHolder.MAX_NUMBER_OF_PERIODS];
            for (Period period : periodsHolder.getPeriods()) {
                Multiset<RuleType> bag = currentCounters.counterForPeriod(period.getIndex()).typeBag;
                variations[period.getIndex() - 1] = (double) bag.count(type);
            }
            Metric metric = metricRepository.getByKey(metricKey);
            measureRepository.add(component, metric, Measure.newMeasureBuilder()
                    .setVariations(new MeasureVariations(variations)).createNoValue());
        }
    }
}

From source file:com.mapr.storm.CounterBolt.java

/**
 * Records and then clears all pending counts if we have crossed a window boundary
 * or have a bunch of data accumulated or if forced.
 * @param force  If true, then windows and such are ignored and the data is pushed out regardless
 *//*from   w  ww.j  a  v  a 2s  .  c  o m*/
private void recordCounts(boolean force) {
    long currentRecordWindowStart = (now() / reportingInterval) * reportingInterval;
    if (lastRecordOutput == 0) {
        lastRecordOutput = currentRecordWindowStart;
    }

    final int bufferedTuples = tupleLog.get().size();
    if (force || currentRecordWindowStart > lastRecordOutput || bufferedTuples > maxBufferedTuples) {
        if (force) {
            logger.info("Forced recording");
        } else if (bufferedTuples > maxBufferedTuples) {
            logger.info("Recording due to max tuples");
        } else {
            logger.info("Recording due to time");
        }

        // atomic get and set avoids the need to locks and still avoids races
        // grabbing the entire queue at once avoids contention as we count the queue elements
        Queue<Tuple> oldLog = tupleLog.getAndSet(new LinkedBlockingQueue<Tuple>());

        Multiset<String> counts = HashMultiset.create();
        for (Tuple tuple : oldLog) {
            counts.add(tuple.getString(0) + "\t" + tuple.getString(1));
        }

        // record all keys
        for (String keyValue : counts.elementSet()) {
            final int n = counts.count(keyValue);
            outputCollector.emit(oldLog, new Values(keyValue, n));
            count.addAndGet(n);
        }
        logger.info(String.format("Logged %d events", count.get()));

        for (Tuple tuple : oldLog) {
            outputCollector.ack(tuple);
        }
        lastRecordOutput = currentRecordWindowStart;
    }
}

From source file:Controller.BeanUCV.java

private String computeMostVotedCategory() {

    String mostCat = "";
    int mostVotes = 0;
    Multiset<String> setCountVotes = HashMultiset.create();
    setCountVotes.addAll(sharedBean.getOneMapIPToCategories(sessionCode).values());
    for (String cat : setCountVotes) {
        int countCategory = setCountVotes.count(cat);
        if (countCategory > mostVotes) {
            mostVotes = countCategory;/*from  w  ww  .j a va2  s. c  o  m*/
            mostCat = cat;
        }
    }
    return mostCat;
}

From source file:org.eclipse.emf.compare.internal.utils.DiffUtil.java

/**
 * This will be called to try and find the insertion index for an element that is located in-between two
 * elements of the LCS between {@code source} and {@code target}.
 * //from www . ja v  a 2s .c  o m
 * @param source
 *            The List from which one element has to be added to the {@code target} list.
 * @param target
 *            The List into which one element from {@code source} has to be added.
 * @param equalityHelper
 *            The equality helper to use for this computation.
 * @param lcs
 *            The lcs between {@code source} and {@code target}.
 * @param currentIndex
 *            Current index (in {@code source} of the element we are to insert into {@code target}.
 * @param <E>
 *            Type of the sequences content.
 * @return The index in the target list in which should be inserted that element.
 */
private static <E> int findInsertionIndexWithinLCS(List<E> source, List<E> target,
        final IEqualityHelper equalityHelper, final List<E> lcs, int currentIndex) {
    int insertionIndex = -1;
    /*
     * If any element of the subsequence {<index of A>, <index of B>} from source had been in the same
     * subsequence in target, it would have been part of the LCS. We thus know none is.
     */
    // The insertion index will be just after A in target

    // First, find which element of the LCS is "A"
    int lcsIndexOfSubsequenceStart = -1;
    for (int i = 0; i < currentIndex; i++) {
        final E sourceElement = source.get(i);

        boolean isInLCS = false;
        for (int j = lcsIndexOfSubsequenceStart + 1; j < lcs.size() && !isInLCS; j++) {
            final E lcsElement = lcs.get(j);

            if (equalityHelper.matchingValues(sourceElement, lcsElement)) {
                isInLCS = true;
                lcsIndexOfSubsequenceStart++;
            }
        }
    }

    if (lcsIndexOfSubsequenceStart > -1) {
        // Do we have duplicates before A in the lcs?
        final Multiset<E> dupesLCS = HashMultiset.create(lcs.subList(0, lcsIndexOfSubsequenceStart + 1));
        final E subsequenceStart = lcs.get(lcsIndexOfSubsequenceStart);
        int duplicatesToGo = dupesLCS.count(subsequenceStart) - 1;

        // Then, find the index of "A" in target
        for (int i = 0; i < target.size() && insertionIndex == -1; i++) {
            final E targetElement = target.get(i);

            if (equalityHelper.matchingValues(targetElement, subsequenceStart)) {
                if (duplicatesToGo > 0) {
                    duplicatesToGo--;
                } else {
                    insertionIndex = i + 1;
                }
            }
        }
    }

    return insertionIndex;
}

From source file:uk.ac.ebi.intact.editor.services.admin.report.AssignmentReportService.java

@Transactional(value = "jamiTransactionManager", propagation = Propagation.REQUIRED, readOnly = true)
public List<AssignmentInfo> calculatePublicationReviewerAssignments(Date fromDate, Date toDate) {
    List<AssignmentInfo> assignmentInfos = new ArrayList<AssignmentInfo>();

    Query query = getIntactDao().getEntityManager()
            .createQuery("select distinct p from IntactPublication p join p.lifecycleEvents as e where "
                    + "e.cvEvent.shortName = :cvEvent and e.when >= :dateFrom and e.when <= :dateTo and e.note is null");
    query.setParameter("cvEvent", LifeCycleEventType.READY_FOR_CHECKING.shortLabel());
    query.setParameter("dateFrom", fromDate);
    query.setParameter("dateTo", new DateTime(toDate).plusDays(1).minusSeconds(1).toDate());

    List<IntactPublication> pubs = query.getResultList();

    Multiset<String> multiset = HashMultiset.create();

    for (IntactPublication pub : pubs) {
        for (LifeCycleEvent event : pub.getLifecycleEvents()) {
            multiset.add(pub.getCurrentReviewer().getLogin());
        }//from   w  w w . ja  v a  2s . c  om
    }

    int total = multiset.size();

    for (String reviewer : multiset.elementSet()) {
        int count = multiset.count(reviewer);
        int percentage = count * 100 / total;
        assignmentInfos.add(new AssignmentInfo(reviewer, count, percentage));
    }

    return assignmentInfos;
}