Example usage for com.google.common.collect HashMultiset create

List of usage examples for com.google.common.collect HashMultiset create

Introduction

In this page you can find the example usage for com.google.common.collect HashMultiset create.

Prototype

public static <E> HashMultiset<E> create() 

Source Link

Document

Creates a new, empty HashMultiset using the default initial capacity.

Usage

From source file:pl.polzone.classifier.Classifier.java

public String predict(java.util.List<String> words) {
    final Multiset<String> scores = HashMultiset.create();
    for (String word : words) {
        word = stem(word);/*  w  ww . j  av  a2s  .  c o  m*/
        if (wordCount.getCount(word) > feedCount / 2)
            continue;
        if (occurences.containsKey(word))
            for (Object category : occurences.get(word).uniqueSet())
                scores.add((String) category,
                        occurences.get(word).getCount(category) + (feedCount - wordCount.getCount(word)));
    }

    if (scores.isEmpty())
        return null;

    Iterator<Entry<String>> sorted = Multisets.copyHighestCountFirst(scores).entrySet().iterator();
    String highest = sorted.next().getElement();
    if (sorted.hasNext()) {
        String runnerUp = sorted.next().getElement();
        if (scores.count(highest) > scores.count(runnerUp) * 2)
            feed(highest, words);
    }
    return highest;
}

From source file:com.davidsoergel.stats.DistributionXYSeries.java

private Multiset<Double> getYMultiset(double x) {
    Multiset<Double> result = yValsPerX.get(x);

    if (result == null) {
        result = HashMultiset.create();
        yValsPerX.put(x, result);//from w  w w . j a  va 2 s  . c o m
    }
    return result;
}

From source file:edu.uci.ics.sourcerer.tools.java.db.exported.ComponentVerifier.java

public static void computeJaccard() {
    new DatabaseRunnable() {
        SelectQuery groupQuery = null;/*from  w  ww  .j  a v a  2 s .  c  om*/
        ConstantCondition<String> groupEquals = null;
        ConstantCondition<String> nameEquals = null;

        Averager<Double> jaccards = Averager.create();
        Averager<Double> fragmented = Averager.create();
        Averager<Double> combined = Averager.create();
        Averager<Double> fragmentedAndCombined = Averager.create();

        Averager<Integer> groupCount = Averager.create();
        Multiset<String> fragmentedLibs = HashMultiset.create();
        Set<String> combinedLibs = new HashSet<>();
        Set<String> perfectLibs = new HashSet<>();

        Set<Integer> ids = new HashSet<>();
        Set<Pair<String, String>> groups = new HashSet<>();

        private void compute(Integer libraryID, LogFileWriter writer, LogFileWriter imperfectWriter) {
            if (libraryID != null) {
                writer.writeAndIndent(Integer.toString(libraryID));
                Averager<Double> avg = Averager.create();
                Set<Integer> other = new HashSet<>();
                boolean superset = true;
                for (Pair<String, String> group : groups) {
                    // Compute one jaccard for each
                    groupEquals.setValue(group.getFirst());
                    nameEquals.setValue(group.getSecond());
                    String lib = group.getFirst() + "." + group.getSecond();
                    fragmentedLibs.add(lib);
                    if (lib.equals("patterntesting.patterntesting-aspectj")) {
                        logger.info("wtf");
                    }
                    other.addAll(groupQuery.select().toCollection(ProjectsTable.PROJECT_ID));

                    superset &= ids.containsAll(other);
                    // Jaccard is size of intersection over size of union
                    double jaccard = CollectionUtils.compuateJaccard(ids, other);
                    writer.write(group.getFirst() + "." + group.getSecond() + " " + jaccard);
                    avg.addValue(jaccard);
                    other.clear();
                }
                writer.write("" + avg.getMean());
                writer.unindent();
                jaccards.addValue(avg.getMean());
                if (avg.getMean() < 1.0) {
                    if (groups.size() > 1) {
                        for (Pair<String, String> group : groups) {
                            combinedLibs.add(group.getFirst() + "." + group.getSecond());
                        }
                        if (superset) {
                            combined.addValue(avg.getMean());
                        } else {
                            fragmentedAndCombined.addValue(avg.getMean());
                        }
                        groupCount.addValue(groups.size());
                    } else {
                        fragmented.addValue(avg.getMean());
                    }
                    imperfectWriter.write(libraryID + " " + avg.getMean());
                } else {
                    for (Pair<String, String> group : groups) {
                        perfectLibs.add(group.getFirst() + "." + group.getSecond());
                    }
                }
                ids.clear();
                groups.clear();
            }
        }

        @Override
        protected void action() {
            TaskProgressLogger task = TaskProgressLogger.get();

            // Set up the group query 
            groupQuery = exec.createSelectQuery(ProjectsTable.TABLE);
            groupQuery.addSelect(ProjectsTable.PROJECT_ID);
            groupEquals = ProjectsTable.GROUP.compareEquals();
            nameEquals = ProjectsTable.NAME.compareEquals();
            groupQuery.andWhere(groupEquals.and(nameEquals));

            // Get all the jars for each library
            QualifiedTable l2lv = ComponentRelationsTable.TABLE.qualify("a");
            QualifiedTable j2lv = ComponentRelationsTable.TABLE.qualify("b");
            try (SelectQuery query = exec.createSelectQuery(
                    ComponentRelationsTable.TARGET_ID.qualify(l2lv)
                            .compareEquals(ComponentRelationsTable.TARGET_ID.qualify(j2lv)),
                    ComponentRelationsTable.SOURCE_ID.qualify(j2lv).compareEquals(ProjectsTable.PROJECT_ID));
                    LogFileWriter writer = IOUtils.createLogFileWriter(JACCARD_LOG);
                    LogFileWriter imperfectWriter = IOUtils.createLogFileWriter(IMPERFECT_JACCARD_LOG)) {
                QualifiedColumn<Integer> libraryIDcol = ComponentRelationsTable.SOURCE_ID.qualify(l2lv);
                query.addSelect(libraryIDcol, ProjectsTable.PROJECT_ID, ProjectsTable.GROUP,
                        ProjectsTable.NAME);
                query.andWhere(
                        ComponentRelationsTable.TYPE.qualify(l2lv)
                                .compareEquals(ComponentRelation.LIBRARY_CONTAINS_LIBRARY_VERSION),
                        ComponentRelationsTable.TYPE.qualify(j2lv)
                                .compareEquals(ComponentRelation.JAR_MATCHES_LIBRARY_VERSION));
                query.orderBy(libraryIDcol, true);

                task.start("Querying project listing");
                TypedQueryResult result = query.select();
                task.finish();

                Integer lastLibraryID = null;

                task.start("Processing libraries", "libraries processed", 500);
                while (result.next()) {
                    Integer libraryID = result.getResult(libraryIDcol);
                    Integer id = result.getResult(ProjectsTable.PROJECT_ID);
                    String group = result.getResult(ProjectsTable.GROUP);
                    String name = result.getResult(ProjectsTable.NAME);
                    if (!libraryID.equals(lastLibraryID)) {
                        compute(lastLibraryID, writer, imperfectWriter);
                        lastLibraryID = libraryID;
                        task.progress();
                    }
                    ids.add(id);
                    groups.add(new Pair<>(group, name));
                }
                compute(lastLibraryID, writer, imperfectWriter);
            } catch (IOException e) {
                logger.log(Level.SEVERE, "Exception writing log file", e);
            }
            task.finish();

            task.start("Reporting general statistics");
            task.report("AVG: " + jaccards.getMean() + " +-" + jaccards.getStandardDeviation());
            task.report("Count: " + jaccards.getCount());
            task.report("MIN: " + jaccards.getMin());
            task.report("MAX: " + jaccards.getMax());
            jaccards.writeDoubleValueMap(JACCARD_TABLE.getValue(), 1);
            task.finish();

            task.start("Reporting fragmented statistics");
            task.report("AVG: " + fragmented.getMean() + " +-" + fragmented.getStandardDeviation());
            task.report("Count: " + fragmented.getCount());
            task.report("MIN: " + fragmented.getMin());
            task.report("MAX: " + fragmented.getMax());
            fragmented.writeDoubleValueMap(FRAGMENTED_TABLE.getValue(), 1);
            task.finish();

            task.start("Reporting combined statistics");
            task.report("AVG: " + combined.getMean() + " +-" + combined.getStandardDeviation());
            task.report("Count: " + combined.getCount());
            task.report("MIN: " + combined.getMin());
            task.report("MAX: " + combined.getMax());
            combined.writeDoubleValueMap(COMBINED_TABLE.getValue(), 1);
            task.finish();

            task.start("Reporting fragmented and combined statistics");
            task.report("AVG: " + fragmentedAndCombined.getMean() + " +-"
                    + fragmentedAndCombined.getStandardDeviation());
            task.report("Count: " + fragmentedAndCombined.getCount());
            task.report("MIN: " + fragmentedAndCombined.getMin());
            task.report("MAX: " + fragmentedAndCombined.getMax());
            fragmentedAndCombined.writeDoubleValueMap(FRAGMENTED_AND_COMBINED_TABLE.getValue(), 1);
            task.finish();

            task.start("Reporting group size statistics");
            task.report("AVG: " + groupCount.getMean() + " +-" + groupCount.getStandardDeviation());
            task.report("Count: " + groupCount.getCount());
            task.report("MIN: " + groupCount.getMin());
            task.report("MAX: " + groupCount.getMax());
            task.finish();

            for (String lib : perfectLibs) {
                if (fragmentedLibs.count(lib) > 1) {
                    task.report(lib + " should be perfect but is fragmented");
                }
                if (combinedLibs.contains(lib)) {
                    task.report(lib + " should be perfect but is combined");
                }
            }
            int perfect = 0;
            int fragAndCombined = 0;
            Averager<Integer> frag = Averager.create();
            for (String lib : fragmentedLibs.elementSet()) {
                int count = fragmentedLibs.count(lib);
                if (count > 1) {
                    frag.addValue(count);
                    if (combinedLibs.contains(lib)) {
                        fragAndCombined++;
                    }
                } else if (!combinedLibs.contains(lib)) {
                    perfect++;
                }
            }
            task.start("Reporting stats from maven pov");

            task.start("Reporting perfect match statistics");
            task.report("Count: " + perfect);
            task.finish();

            task.start("Reporting fragmentation statistics");
            task.report("AVG: " + frag.getMean() + " +-" + frag.getStandardDeviation());
            task.report("Count: " + frag.getCount());
            task.report("MIN: " + frag.getMin());
            task.report("MAX: " + frag.getMax());
            task.finish();

            task.start("Reporting combination statistics");
            task.report("Count: " + combinedLibs.size());
            task.finish();

            task.start("Reporting fragmented & combined statistics");
            task.report("Count: " + fragAndCombined);
            task.finish();

            task.finish();
        }
    }.run();
}

From source file:com.jeffreybosboom.lyne.Solver.java

/**
 * Returns the paths through the given solved puzzle, one per color, or null
 * if the solution paths are unsatisfying.
 * @param puzzle a solved puzzle//from w ww.ja va2s.c o  m
 * @return the solution paths, one per color, or null
 */
private static Set<List<Node>> solutionPaths(Puzzle puzzle) {
    puzzle.getClass();
    checkArgument(puzzle.edges().allMatch(a -> puzzle.possibilities(a.first, a.second).size() == 1));
    ImmutableSet.Builder<List<Node>> pathsBuilder = ImmutableSet.builder();
    for (Iterator<Pair<Node, Node>> it = puzzle.terminals().iterator(); it.hasNext();) {
        Pair<Node, Node> pair = it.next();
        List<Node> path = new ArrayList<>();
        path.add(pair.first);
        path = findPath(puzzle, path, pair.second, new HashSet<>());
        if (path == null)
            return null;
        pathsBuilder.add(path);
    }
    ImmutableSet<List<Node>> paths = pathsBuilder.build();
    Multiset<Node> counts = HashMultiset.create();
    paths.stream().forEachOrdered(counts::addAll);
    //ensure each node appears enough times over all the paths
    //TODO: we check colored node appearances in findPath, so this could be
    //just octagons?
    if (!puzzle.nodes().allMatch(n -> counts.count(n) == (n.desiredEdges() + 1) / 2))
        return null;
    return paths;
}

From source file:org.datanucleus.store.types.guava.wrappers.Multiset.java

/**
 * Method to initialise the SCO from an existing value.
 * @param c The object to set from//from   ww w .ja  v  a2 s . c  o  m
 */
public void initialise(com.google.common.collect.Multiset c) {
    delegate = HashMultiset.create();
    if (c != null) {
        delegate.addAll(c); // Make copy of the elements rather than using same memory
    }
    if (NucleusLogger.PERSISTENCE.isDebugEnabled()) {
        NucleusLogger.PERSISTENCE.debug(Localiser.msg("023003", this.getClass().getName(),
                ownerOP.getObjectAsPrintable(), ownerMmd.getName(), "" + size(),
                SCOUtils.getSCOWrapperOptionsMessage(true, false, true, false)));
    }
}

From source file:additionalpipes.chunk.ChunkManager.java

public void sendPersistentChunks(EntityPlayerMP player) {
    Multiset<ChunkCoordIntPair> oldSet = players.get(player);
    boolean showAllPersistentChunks = oldSet.remove(null);
    Multiset<ChunkCoordIntPair> newSet = HashMultiset.create();

    WorldServer world = (WorldServer) player.worldObj;
    for (Map.Entry<ChunkCoordIntPair, Ticket> e : ForgeChunkManager.getPersistentChunksFor(world).entries()) {
        if (!showAllPersistentChunks && !APDefaultProps.ID.equals(e.getValue().getModId())) {
            continue;
        }//w  w  w .j  av  a 2 s  .  co  m

        if (world.getPlayerManager().isPlayerWatchingChunk(player, e.getKey().chunkXPos,
                e.getKey().chunkZPos)) {
            newSet.add(e.getKey());
        }
    }

    if (!oldSet.equals(newSet)) {
        PacketChunkCoordList packet = new PacketChunkCoordList(APPacketIds.UPDATE_LASERS, newSet);
        CoreProxy.proxy.sendToPlayer(player, packet);
        if (showAllPersistentChunks) {
            newSet.add(null);
        }
        players.put(player, newSet);
    }
}

From source file:com.codetroopers.maven.mergeprops.MergeProperty.java

private static void checkCountMismatch(final Map<String, Properties> propertiesMap, final Merge merge,
        final Map<String, Integer> localeSizeMap) throws MojoFailureException {
    //if we got the same numbers of keys, the set will flatten every values
    if (shouldFailIfNoMatchFromProperty() && merge.getFailOnCountMismatch()
            && new HashSet<Integer>(localeSizeMap.values()).size() != 1) {

        final HashMultiset<String> multiset = HashMultiset.create();
        for (Map.Entry<String, Properties> entry : propertiesMap.entrySet()) {
            multiset.addAll(Maps.fromProperties(entry.getValue()).keySet());
        }//from w w w  .jav a  2s .  co m
        final int bundlesAmount = propertiesMap.keySet().size();
        final Set<String> lonelyKeys = Sets.newHashSet(Collections2.filter(multiset, new Predicate<String>() {
            @Override
            public boolean apply(final String input) {
                return multiset.count(input) != bundlesAmount;
            }
        }));
        throw new MojoFailureException(lonelyKeys, "Invalid property count for file : " + merge.getTarget(),
                "Lonely keys are : \n" + Joiner.on("\n").join(lonelyKeys));
    }
}

From source file:org.sonar.plugins.core.issue.IssueCountersDecorator.java

public void decorate(Resource resource, DecoratorContext context) {
    Issuable issuable = perspectives.as(Issuable.class, resource);
    if (issuable != null) {
        Collection<Issue> issues = getOpenIssues(issuable.issues());
        boolean shouldSaveNewMetrics = shouldSaveNewMetrics(context);

        Multiset<RulePriority> severitiesBag = HashMultiset.create();
        Map<RulePriority, Multiset<Rule>> rulesPerSeverity = Maps.newHashMap();
        ListMultimap<RulePriority, Issue> issuesPerSeverities = ArrayListMultimap.create();
        int countUnassigned = 0;
        int falsePositives = 0;

        for (Issue issue : issues) {
            severitiesBag.add(RulePriority.valueOf(issue.severity()));
            Multiset<Rule> rulesBag = initRules(rulesPerSeverity, RulePriority.valueOf(issue.severity()));
            rulesBag.add(rulefinder.findByKey(issue.ruleKey().repository(), issue.ruleKey().rule()));
            issuesPerSeverities.put(RulePriority.valueOf(issue.severity()), issue);

            if (issue.assignee() == null) {
                countUnassigned++;/*  w  ww. j a  v  a  2s.  c  o  m*/
            }
            if (Issue.RESOLUTION_FALSE_POSITIVE.equals(issue.resolution())) {
                falsePositives++;
            }
        }

        for (RulePriority ruleSeverity : RulePriority.values()) {
            saveIssuesForSeverity(context, ruleSeverity, severitiesBag);
            saveIssuesPerRules(context, ruleSeverity, rulesPerSeverity);
            saveNewIssuesForSeverity(context, ruleSeverity, issuesPerSeverities, shouldSaveNewMetrics);
            saveNewIssuesPerRule(context, ruleSeverity, issues, shouldSaveNewMetrics);
        }

        saveTotalIssues(context, issues);
        saveNewIssues(context, issues, shouldSaveNewMetrics);

        saveMeasure(context, CoreMetrics.UNASSIGNED_ISSUES, countUnassigned);
        saveMeasure(context, CoreMetrics.FALSE_POSITIVE_ISSUES, falsePositives);
    }
}

From source file:com.hxj.websimplejava.tools.MavenUtils.java

public void checkMavenConflict() {
    HashMultiset<String> hashMultiset = HashMultiset.create();
    hashMultiset.addAll(lineSet);//from w ww .  j  a  v  a2 s.com
    for (String lineString : hashMultiset) {
        int count = hashMultiset.count(lineString);
        if (count > 1) {
            conflictSet.add(lineString);
        }
    }
}

From source file:org.sonar.plugins.core.issue.CountUnresolvedIssuesDecorator.java

public void decorate(Resource resource, DecoratorContext context) {
    Issuable issuable = perspectives.as(Issuable.class, resource);
    if (issuable != null) {
        Collection<Issue> issues = issuable.issues();
        boolean shouldSaveNewMetrics = shouldSaveNewMetrics(context);

        Multiset<RulePriority> severityBag = HashMultiset.create();
        Map<RulePriority, Multiset<Rule>> rulesPerSeverity = Maps.newHashMap();
        ListMultimap<RulePriority, Issue> issuesPerSeverity = ArrayListMultimap.create();
        int countOpen = 0;
        int countReopened = 0;
        int countConfirmed = 0;

        for (Issue issue : issues) {
            severityBag.add(RulePriority.valueOf(issue.severity()));
            Multiset<Rule> rulesBag = initRules(rulesPerSeverity, RulePriority.valueOf(issue.severity()));
            rulesBag.add(rulefinder.findByKey(issue.ruleKey().repository(), issue.ruleKey().rule()));
            issuesPerSeverity.put(RulePriority.valueOf(issue.severity()), issue);

            if (Issue.STATUS_OPEN.equals(issue.status())) {
                countOpen++;/*from   w ww  .j  av a2 s  .  co m*/
            }
            if (Issue.STATUS_REOPENED.equals(issue.status())) {
                countReopened++;
            }
            if (Issue.STATUS_CONFIRMED.equals(issue.status())) {
                countConfirmed++;
            }
        }

        for (RulePriority ruleSeverity : RulePriority.values()) {
            saveIssuesForSeverity(context, ruleSeverity, severityBag);
            saveIssuesPerRules(context, ruleSeverity, rulesPerSeverity);
            saveNewIssuesForSeverity(context, ruleSeverity, issuesPerSeverity, shouldSaveNewMetrics);
            saveNewIssuesPerRule(context, ruleSeverity, issues, shouldSaveNewMetrics);
        }

        saveTotalIssues(context, issues);
        saveNewIssues(context, issues, shouldSaveNewMetrics);

        saveMeasure(context, CoreMetrics.OPEN_ISSUES, countOpen);
        saveMeasure(context, CoreMetrics.REOPENED_ISSUES, countReopened);
        saveMeasure(context, CoreMetrics.CONFIRMED_ISSUES, countConfirmed);
    }
}