Example usage for com.google.common.collect HashMultiset create

List of usage examples for com.google.common.collect HashMultiset create

Introduction

In this page you can find the example usage for com.google.common.collect HashMultiset create.

Prototype

public static <E> HashMultiset<E> create() 

Source Link

Document

Creates a new, empty HashMultiset using the default initial capacity.

Usage

From source file:org.sonar.java.se.JavaCheckVerifier.java

private static void validateSecondaryLocations(List<AnalyzerMessage> actual, List<Integer> expected) {
    Multiset<Integer> actualLines = HashMultiset.create();
    actualLines.addAll(/* w  ww .  j  a  va2  s  . com*/
            actual.stream().map(secondaryLocation -> secondaryLocation.getLine()).collect(Collectors.toList()));
    List<Integer> unexpected = new ArrayList<>();
    for (Integer actualLine : actualLines) {
        if (expected.contains(actualLine)) {
            expected.remove(actualLine);
        } else {
            unexpected.add(actualLine);
        }
    }
    if (!expected.isEmpty() || !unexpected.isEmpty()) {
        fail("Secondary locations: expected: " + expected + " unexpected:" + unexpected);
    }
}

From source file:it.units.malelab.ege.util.DUMapper.java

private static double[][][] getGomeaData(String baseDir, String fileNamePattern, int generations,
        int genotypeSize) throws IOException {
    double[][] usages = new double[generations][];
    Set<Character>[] domains = new Set[genotypeSize];
    Multiset<Character>[][] symbols = new Multiset[generations][];
    for (int i = 0; i < genotypeSize; i++) {
        domains[i] = new HashSet<>();
    }/*from   w w w.  j a  v a2  s  .  c om*/
    for (int g = 0; g < generations; g++) {
        symbols[g] = new Multiset[genotypeSize];
        for (int i = 0; i < genotypeSize; i++) {
            symbols[g][i] = HashMultiset.create();
        }
        usages[g] = new double[genotypeSize];
        BufferedReader reader = Files.newBufferedReader(
                FileSystems.getDefault().getPath(baseDir, String.format(fileNamePattern, g)));
        String line;
        int populationSize = 0;
        while ((line = reader.readLine()) != null) {
            populationSize = populationSize + 1;
            String[] pieces = line.split(" ");
            String genotype = pieces[0];
            for (int i = 0; i < genotypeSize; i++) {
                domains[i].add(genotype.charAt(i));
                symbols[g][i].add(genotype.charAt(i));
            }
            for (int i = 2; i < pieces.length; i++) {
                int intronIndex = Integer.parseInt(pieces[i]);
                usages[g][intronIndex] = usages[g][intronIndex] + 1;
            }
        }
        for (int i = 0; i < genotypeSize; i++) {
            usages[g][i] = (populationSize - usages[g][i]) / populationSize;
        }
        reader.close();
    }
    double[][] diversities = new double[generations][];
    for (int g = 0; g < generations; g++) {
        diversities[g] = new double[genotypeSize];
        for (int i = 0; i < genotypeSize; i++) {
            diversities[g][i] = Utils.multisetDiversity(symbols[g][i], domains[i]);
        }
    }
    return new double[][][] { diversities, usages };
}

From source file:org.onebusaway.nyc.vehicle_tracking.impl.inference.VehicleInferenceInstance.java

public VehicleLocationDetails getBadParticleDetails() {
    final VehicleLocationDetails details = new VehicleLocationDetails();

    setLastRecordForDetails(details);/*from   www .j a v a  2s .c o  m*/

    if (_badParticles != null)
        details.setParticleFilterFailure(true);

    final Multiset<Particle> particles = HashMultiset.create();
    if (_badParticles != null)
        particles.addAll(_badParticles);
    details.setParticles(particles);

    return details;
}

From source file:org.onebusaway.nyc.vehicle_tracking.impl.inference.MotionModelImpl.java

private Particle sampleTransitionParticle(Entry<Particle> parent, BlockStateObservation newParentBlockStateObs,
        Observation obs, final double vehicleHasNotMovedProb, Set<BlockStateObservation> transitions)
        throws ParticleFilterException {

    final long timestamp = obs.getTime();
    final VehicleState parentState = parent.getElement().getData();

    final CategoricalDist<Particle> transitionDist = new CategoricalDist<Particle>();

    Multiset<Particle> debugTransitions = null;
    if (ParticleFilter.getDebugEnabled())
        debugTransitions = HashMultiset.create();

    for (final BlockStateObservation proposalEdge : transitions) {

        final SensorModelResult transProb = new SensorModelResult("Transition");
        final double inMotionSample = ParticleFactoryImpl.getThreadLocalRng().get().nextDouble();
        final boolean vehicleNotMoved = inMotionSample < vehicleHasNotMovedProb;
        final MotionState motionState = updateMotionState(parentState, obs, vehicleNotMoved);

        final Map.Entry<BlockSampleType, BlockStateObservation> newEdge;
        JourneyState journeyState;/*w w w  .  j av a  2  s .  co  m*/
        /*
         * We have to allow for a driver to start a trip late, so we check to see
         * if during the transition it was snapped or not. If not, then we assume
         * it may still be not in service
         */
        newEdge = sampleEdgeFromProposal(newParentBlockStateObs, proposalEdge, obs,
                parentState.getJourneyState().getPhase(), vehicleNotMoved);
        journeyState = _journeyStateTransitionModel.getJourneyState(newEdge.getValue(), parentState, obs,
                vehicleNotMoved);

        final VehicleState newState = new VehicleState(motionState, newEdge.getValue(), journeyState, null,
                obs);
        final Context context = new Context(parentState, newState, obs);

        transProb.addResultAsAnd(edgeLikelihood.likelihood(context));
        transProb.addResultAsAnd(gpsLikelihood.likelihood(context));
        transProb.addResultAsAnd(dscLikelihood.likelihood(context));
        transProb.addResultAsAnd(runLikelihood.likelihood(context));
        transProb.addResultAsAnd(schedLikelihood.likelihood(context));
        transProb.addResultAsAnd(runTransitionLikelihood.likelihood(context));
        transProb.addResultAsAnd(nullStateLikelihood.likelihood(context));
        transProb.addResultAsAnd(nullLocationLikelihood.likelihood(context));
        transProb.addResultAsAnd(movedLikelihood.likelihood(context));

        /*
         * TODO: this is mainly for debug and can/should be removed.
         */
        transProb.addLogResultAsAnd(newEdge.getKey().name(), 0);

        final Particle newParticle = new Particle(timestamp, parent.getElement(), 0.0, newState);
        newParticle.setResult(transProb);

        if (ParticleFilter.getDebugEnabled()) {
            final double logWeight = parent.getElement().getLogWeight()
                    + newParticle.getResult().getLogProbability();
            newParticle.setLogWeight(logWeight);
            debugTransitions.add(newParticle);
        }

        transitionDist.logPut(transProb.getLogProbability(), newParticle);
    }

    final Particle newParticle;
    if (transitionDist.canSample()) {
        newParticle = transitionDist.sample();
        final double logWeight = parent.getElement().getLogWeight()
                + newParticle.getResult().getLogProbability();
        newParticle.setLogWeight(logWeight);

    } else {
        final SensorModelResult transProb = new SensorModelResult("Transition (null)");
        final double inMotionSample = ParticleFactoryImpl.getThreadLocalRng().get().nextDouble();
        final boolean vehicleNotMoved = inMotionSample < vehicleHasNotMovedProb;
        final MotionState motionState = updateMotionState(parentState, obs, vehicleNotMoved);
        final JourneyState journeyState = _journeyStateTransitionModel.getJourneyState(null, null, obs,
                vehicleNotMoved);
        final VehicleState nullState = new VehicleState(motionState, null, journeyState, null, obs);
        final Context context = new Context(parentState, nullState, obs);
        transProb.addResultAsAnd(edgeLikelihood.likelihood(context));
        transProb.addResultAsAnd(gpsLikelihood.likelihood(context));
        transProb.addResultAsAnd(dscLikelihood.likelihood(context));
        transProb.addResultAsAnd(runLikelihood.likelihood(context));
        transProb.addResultAsAnd(schedLikelihood.likelihood(context));
        transProb.addResultAsAnd(runTransitionLikelihood.likelihood(context));
        transProb.addResultAsAnd(nullStateLikelihood.likelihood(context));
        transProb.addResultAsAnd(nullLocationLikelihood.likelihood(context));
        transProb.addResultAsAnd(movedLikelihood.likelihood(context));
        newParticle = new Particle(timestamp, parent.getElement(), 0.0, nullState);
        newParticle.setResult(transProb);
        final double logWeight = parent.getElement().getLogWeight()
                + newParticle.getResult().getLogProbability();
        newParticle.setLogWeight(logWeight);

    }

    if (ParticleFilter.getDebugEnabled())
        newParticle.setTransitions(debugTransitions);

    return newParticle;
}

From source file:BibTex.IOmethods.java

public void writeConnectedCategories(Set<BibTexRef> refs) throws IOException {

    BufferedWriter bw = new BufferedWriter(new FileWriter(folder + "connected categories.csv"));
    StringBuilder sb = new StringBuilder();
    int maxCountCategory = 0;
    sb.append("Source,Target,Type,Weight").append("\n");

    //creation of convenient data structures for I/O
    Multiset<Edge> edges = HashMultiset.create();
    Multiset<String> multisetCategoryNames = HashMultiset.create();

    for (BibTexRef ref : refs) {
        Set<Category> categories = ref.getCategories();
        Set<String> categoriesNames = new HashSet();

        for (Category category : categories) {
            categoriesNames.add(category.getCategoryName());
            multisetCategoryNames.add(category.getCategoryName());
        }// w ww .  j  ava2 s .c  o m

        FindAllPairs findAllPairs = new FindAllPairs();
        List<Pair<String>> pairs = findAllPairs.getAllUndirectedPairsAsList(categoriesNames);

        for (Pair<String> pair : pairs) {
            Edge edge = new Edge();
            edge.setNode1(pair.getLeft());
            edge.setNode2(pair.getRight());
            edges.add(edge);

        }

    }

    //finding the max number for a category, for normalization purposes
    for (String string : multisetCategoryNames.elementSet()) {
        if (maxCountCategory < multisetCategoryNames.count(string)) {
            maxCountCategory = multisetCategoryNames.count(string);
        }
    }

    //writing of the first line of the csv: headers of the categories.
    for (Edge edge : edges.elementSet()) {
        //we devalue the weight of an edge by how frequent the 2 nodes of the edge are.
        float weight = edges.count(edge) / (float) (multisetCategoryNames.count(edge.getNode1())
                * multisetCategoryNames.count(edge.getNode2()));
        //            float weight = edges.count(edge);
        //normalization to a 0 -> 10 scale to visualize the weight on Gephi
        weight = weight * 10 / (float) maxCountCategory * 100000;
        sb.append(edge.getNode1()).append(",").append(edge.getNode2()).append(",Undirected,").append(weight);
        sb.append("\n");
    }
    bw.write(sb.toString());
    bw.close();
}

From source file:com.google.devtools.kythe.analyzers.java.JavaEntrySets.java

private int hashSymbol(Symbol sym) {
    // This method is necessary because Symbol, and most other javac internals, do not overload the
    // Object#hashCode() method and the default implementation, System#identityHashCode(Object), is
    // practically useless because it can change across JVM instances.  This method instead only
    // uses stable hashing methods such as String#hashCode(), Multiset#hashCode(), and
    // Integer#hashCode().

    if (symbolHashes.containsKey(sym)) {
        return symbolHashes.get(sym);
    }/*from   ww w.java 2s.co m*/

    Multiset<Integer> hashes = HashMultiset.create();
    if (sym.members() != null) {
        for (Symbol member : sym.members().getSymbols()) {
            if (member.isPrivate()
                    || member instanceof MethodSymbol && ((MethodSymbol) member).isStaticOrInstanceInit()
                    || ((member.flags_field & (Flags.BRIDGE | Flags.SYNTHETIC)) != 0)) {
                // Ignore initializers, private members, and synthetic members.  It's possible these do
                // not appear in the symbol's scope outside of its .java source compilation (i.e. they do
                // not appear in dependent compilations for Bazel's java rules).
                continue;
            }
            // We can't recursively get the result of hashSymbol(member) since the extractor removes all
            // .class files not directly used by a compilation meaning that member may not be complete.
            hashes.add(member.getSimpleName().toString().hashCode());
            hashes.add(member.kind.ordinal());
        }
    }

    hashes.add(sym.getQualifiedName().toString().hashCode());
    hashes.add(sym.getKind().ordinal());
    for (Modifier mod : sym.getModifiers()) {
        hashes.add(mod.ordinal());
    }

    int h = hashes.hashCode();
    symbolHashes.put(sym, h);
    return h;
}

From source file:com.continuuity.weave.internal.appmaster.ApplicationMasterService.java

/**
 * Handling containers that are completed.
 *///from   ww w . ja  va 2 s . c  om
private void handleCompleted(List<YarnContainerStatus> completedContainersStatuses) {
    Multiset<String> restartRunnables = HashMultiset.create();
    for (YarnContainerStatus status : completedContainersStatuses) {
        LOG.info("Container {} completed with {}:{}.", status.getContainerId(), status.getState(),
                status.getDiagnostics());
        runningContainers.handleCompleted(status, restartRunnables);
    }

    for (Multiset.Entry<String> entry : restartRunnables.entrySet()) {
        LOG.info("Re-request container for {} with {} instances.", entry.getElement(), entry.getCount());
        for (int i = 0; i < entry.getCount(); i++) {
            runnableContainerRequests.add(createRunnableContainerRequest(entry.getElement()));
        }
    }

    // For all runnables that needs to re-request for containers, update the expected count timestamp
    // so that the EventHandler would triggered with the right expiration timestamp.
    expectedContainers.updateRequestTime(restartRunnables.elementSet());
}

From source file:org.icgc.dcc.submission.dictionary.DictionaryValidator.java

private void validateCodeLists(Set<DictionaryConstraintViolation> errors,
        Set<DictionaryConstraintViolation> warnings) {
    for (val codeListName : dictionary.getCodeListNames()) {
        val collection = codeListIndex.get(codeListName);
        int count = collection.size();
        if (count == 0) {
            warnings.add(new DictionaryConstraintViolation("Missing code list", codeListName));
            break;
        }//from   ww  w  .  j  av a 2s.c  om
        if (count > 1) {
            errors.add(new DictionaryConstraintViolation("Duplicate code lists", collection));
        }

        val codeList = getFirst(collection, null);

        Multiset<String> codes = HashMultiset.create();
        Multiset<String> values = HashMultiset.create();
        for (val term : codeList.getTerms()) {
            codes.add(term.getCode());
            values.add(term.getValue());
        }

        for (val term : codeList.getTerms()) {
            val code = term.getCode();
            val value = term.getValue();

            if (codes.count(code) > 1) {
                errors.add(
                        new DictionaryConstraintViolation("Duplicate code list codes", term, code, codeList));
            }
            if (values.count(value) > 1) {
                errors.add(
                        new DictionaryConstraintViolation("Duplicate code list values", term, value, codeList));
            }
            if (codes.contains(value) && !code.equals(value)) {
                errors.add(new DictionaryConstraintViolation("Non-disjoint code list code and value", term,
                        value, codeList));
            }
        }
    }
}

From source file:buildcraft.transport.Pipe.java

private void resolveActions() {
    if (!hasGate())
        return;/*from   www. j  av  a  2s. c  o m*/

    boolean oldBroadcastRedstone = broadcastRedstone;
    boolean[] oldBroadcastSignal = broadcastSignal;

    broadcastRedstone = false;
    broadcastSignal = new boolean[] { false, false, false, false };

    // Tell the gate to prepare for resolving actions. (Disable pulser)
    gate.startResolution();

    HashMap<Integer, Boolean> actions = new HashMap<Integer, Boolean>();
    Multiset<Integer> actionCount = HashMultiset.create();

    // Computes the actions depending on the triggers
    for (int it = 0; it < 8; ++it) {
        ITrigger trigger = activatedTriggers[it];
        IAction action = activatedActions[it];
        ITriggerParameter parameter = triggerParameters[it];

        if (trigger != null && action != null) {
            actionCount.add(action.getId());
            if (!actions.containsKey(action.getId())) {
                actions.put(action.getId(), isNearbyTriggerActive(trigger, parameter));
            } else if (gate.getConditional() == GateConditional.AND) {
                actions.put(action.getId(),
                        actions.get(action.getId()) && isNearbyTriggerActive(trigger, parameter));
            } else {
                actions.put(action.getId(),
                        actions.get(action.getId()) || isNearbyTriggerActive(trigger, parameter));
            }
        }
    }

    // Activate the actions
    for (Integer i : actions.keySet())
        if (actions.get(i)) {

            // Custom gate actions take precedence over defaults.
            if (gate.resolveAction(ActionManager.actions[i], actionCount.count(i))) {
                continue;
            }

            if (ActionManager.actions[i] instanceof ActionRedstoneOutput) {
                broadcastRedstone = true;
            } else if (ActionManager.actions[i] instanceof ActionSignalOutput) {
                broadcastSignal[((ActionSignalOutput) ActionManager.actions[i]).color.ordinal()] = true;
            } else {
                for (int a = 0; a < container.tileBuffer.length; ++a)
                    if (container.tileBuffer[a].getTile() instanceof IActionReceptor) {
                        IActionReceptor recept = (IActionReceptor) container.tileBuffer[a].getTile();
                        recept.actionActivated(ActionManager.actions[i]);
                    }
            }
        }

    actionsActivated(actions);

    if (oldBroadcastRedstone != broadcastRedstone) {
        container.scheduleRenderUpdate();
        updateNeighbors(true);
    }

    for (int i = 0; i < oldBroadcastSignal.length; ++i)
        if (oldBroadcastSignal[i] != broadcastSignal[i]) {
            // worldObj.markBlockNeedsUpdate(xCoord, yCoord, zCoord);
            container.scheduleRenderUpdate();
            updateSignalState();
            break;
        }
}

From source file:BibTex.IOmethods.java

public void writeJournalsAndTheirCategories(Set<BibTexRef> refs, Integer minNumber) throws IOException {

    BufferedWriter bw = new BufferedWriter(new FileWriter(folder + "journals and their categories.csv"));
    //        BufferedWriter bwJournals = new BufferedWriter(new FileWriter(folder + "journals.csv"));
    StringBuilder sb = new StringBuilder();
    String sep = "|";

    //creation of convenient data structures for I/O
    Map<String, Multiset<String>> journalsAndTheirCategories = new HashMap();
    Multiset journals = HashMultiset.create();

    JournalAbbreviationsMapping jmap = new JournalAbbreviationsMapping();
    jmap.loadMap();//from  w ww .  j a v a 2  s  .  co m

    for (BibTexRef ref : refs) {
        Set<Category> categories = ref.getCategories();
        String title = ref.getJournal();
        if (title == null || title.isEmpty()) {
            continue;
        }
        title = title.toLowerCase();

        Set<String> abbrev = (Set<String>) jmap.getJournalsToAbbrev().get(title);
        if (abbrev == null || abbrev.isEmpty()) {
            abbrev = new HashSet();
            abbrev.add(title);
        }

        String abbreviation = abbrev.iterator().next();

        journals.add(abbreviation);
        if (!journalsAndTheirCategories.containsKey(abbreviation)) {
            Multiset<String> cats = HashMultiset.create();
            journalsAndTheirCategories.put(abbreviation, cats);
        }

        for (Category category : categories) {
            journalsAndTheirCategories.get(abbreviation).add(category.getCategoryName());
        }
    }

    for (String journal : journalsAndTheirCategories.keySet()) {
        if (journals.count(journal) < minNumber) {
            continue;
        }

        for (String category : journalsAndTheirCategories.get(journal).elementSet()) {
            sb.append(journal).append(sep).append(category).append(sep)
                    .append(journalsAndTheirCategories.get(journal).count(category)).append("\n");
        }
    }
    bw.write(sb.toString());
    bw.close();
    //        sb = new StringBuilder();
    //        for (String journal : journalsAndTheirCategories.keySet()) {
    //            Set<String> abbrev = (Set<String>) jmap.getJournalsToAbbrev().get(journal);
    //            if (abbrev == null || abbrev.isEmpty()) {
    //                abbrev = new HashSet();
    //                abbrev.add(journal);
    //            }
    //            sb.append(journal).append(sep).append(abbrev.iterator().next()).append("\n");
    //        }
    //        bwJournals.write(sb.toString());
    //        bwJournals.close();
}