Example usage for com.google.common.collect Multiset elementSet

List of usage examples for com.google.common.collect Multiset elementSet

Introduction

In this page you can find the example usage for com.google.common.collect Multiset elementSet.

Prototype

Set<E> elementSet();

Source Link

Document

Returns the set of distinct elements contained in this multiset.

Usage

From source file:org.opentripplanner.routing.spt.MultiStateShortestPathTree.java

public void dump() {
    Multiset<Integer> histogram = HashMultiset.create();
    int statesCount = 0;
    int maxSize = 0;
    for (Map.Entry<Vertex, List<State>> kv : stateSets.entrySet()) {
        List<State> states = kv.getValue();
        int size = states.size();
        histogram.add(size);/* w  ww.j  a va2  s . c  o  m*/
        statesCount += size;
        if (size > maxSize) {
            maxSize = size;
        }
    }
    LOG.info("SPT: vertices: " + stateSets.size() + " states: total: " + statesCount + " per vertex max: "
            + maxSize + " avg: " + (statesCount * 1.0 / stateSets.size()));
    List<Integer> nStates = new ArrayList<Integer>(histogram.elementSet());
    Collections.sort(nStates);
    for (Integer nState : nStates) {
        LOG.info(nState + " states: " + histogram.count(nState) + " vertices.");
    }
}

From source file:BibTex.IOmethods.java

public void writeNumberPapersPerYear(Set<BibTexRef> refs) throws IOException {

    BufferedWriter bw = new BufferedWriter(new FileWriter(folder + "papers per year.csv"));
    StringBuilder sb = new StringBuilder();

    String sep = "|";

    //creation of the data structures for I/O
    Multiset<String> years = TreeMultiset.create();

    for (BibTexRef ref : refs) {
        String year = ref.getYear();
        years.add(year);// w w w  .  jav a 2s.com
    }

    for (String year : years.elementSet()) {
        sb.append(year);
        sb.append(sep);
    }
    sb.append("\n");

    for (String year : years.elementSet()) {
        sb.append(years.count(year));
        sb.append(sep);
    }
    sb.append("\n");

    bw.write(sb.toString());
    bw.close();
}

From source file:tr.com.serkanozal.proxyable.util.JvmUtil.java

private static int guessAlignment(int oopSize) {
    final int COUNT = 1000 * 1000;
    Object[] array = new Object[COUNT];
    long[] offsets = new long[COUNT];

    for (int c = 0; c < COUNT - 3; c += 3) {
        array[c + 0] = new MyObject1();
        array[c + 1] = new MyObject2();
        array[c + 1] = new MyObject3();
    }/*  ww  w.  j a  v  a 2 s .com*/

    for (int c = 0; c < COUNT; c++) {
        offsets[c] = addressOfObject(array[c], oopSize);
    }

    Arrays.sort(offsets);

    Multiset<Integer> sizes = HashMultiset.create();
    for (int c = 1; c < COUNT; c++) {
        sizes.add((int) (offsets[c] - offsets[c - 1]));
    }

    int min = -1;
    for (int s : sizes.elementSet()) {
        if (s <= 0) {
            continue;
        }
        if (min == -1) {
            min = s;
        } else {
            min = gcd(min, s);
        }
    }

    return min;
}

From source file:bacter.model.ACGLikelihood.java

/**
 * Ensure pattern counts are up to date.
 *//*from   w  w w.j a  va2s.  c  o m*/
private void updatePatterns() {
    List<Region> regionList = acg.getRegions(locus);

    // Remove stale pattern sets
    patterns.keySet().retainAll(regionList);
    patternLogLikelihoods.keySet().retainAll(regionList);
    rootPartials.keySet().retainAll(regionList);
    constantPatterns.keySet().retainAll(regionList);

    for (Region region : regionList) {

        if (patterns.containsKey(region))
            continue;

        // Add new pattern set
        Multiset<int[]> patSet = LinkedHashMultiset.create();
        for (int j = region.leftBoundary; j < region.rightBoundary; j++) {
            int[] pat = alignment.getPattern(alignment.getPatternIndex(j));
            patSet.add(pat);
        }
        patterns.put(region, patSet);

        // Allocate memory for corresponding log likelihoods and root partials
        patternLogLikelihoods.put(region, new double[patSet.elementSet().size()]);
        rootPartials.put(region, new double[patSet.elementSet().size() * nStates]);

        // Compute corresponding constant pattern list
        List<Integer> constantPatternList = new ArrayList<>();

        int patternIdx = 0;
        for (int[] pattern : patSet.elementSet()) {
            boolean isConstant = true;
            for (int i = 1; i < pattern.length; i++)
                if (pattern[i] != pattern[0]) {
                    isConstant = false;
                    break;
                }

            if (isConstant && !alignment.getDataType().isAmbiguousState(pattern[0]))
                constantPatternList.add(patternIdx * nStates + pattern[0]);

            patternIdx += 1;
        }

        constantPatterns.put(region, constantPatternList);
    }
}

From source file:com.google.idea.blaze.java.sync.source.SourceDirectoryCalculator.java

/** Adds the java source directories. */
private void calculateJavaSourceDirectories(BlazeContext context, WorkspaceRoot workspaceRoot,
        ArtifactLocationDecoder artifactLocationDecoder, WorkspacePath directoryRoot,
        Collection<SourceArtifact> javaArtifacts, Collection<JavaPackageReader> javaPackageReaders,
        Collection<BlazeSourceDirectory> result) {

    List<SourceRoot> sourceRootsPerFile = Lists.newArrayList();

    // Get java sources
    List<ListenableFuture<SourceRoot>> sourceRootFutures = Lists.newArrayList();
    for (final SourceArtifact sourceArtifact : javaArtifacts) {
        ListenableFuture<SourceRoot> future = executorService.submit(() -> sourceRootForJavaSource(context,
                artifactLocationDecoder, sourceArtifact, javaPackageReaders));
        sourceRootFutures.add(future);// ww w.  j av a  2s.  c  om
    }
    try {
        for (SourceRoot sourceRoot : Futures.allAsList(sourceRootFutures).get()) {
            if (sourceRoot != null) {
                sourceRootsPerFile.add(sourceRoot);
            }
        }
    } catch (ExecutionException | InterruptedException e) {
        LOG.error(e);
        throw new IllegalStateException("Could not read sources");
    }

    // Sort source roots into their respective directories
    Multimap<WorkspacePath, SourceRoot> sourceDirectoryToSourceRoots = HashMultimap.create();
    for (SourceRoot sourceRoot : sourceRootsPerFile) {
        sourceDirectoryToSourceRoots.put(sourceRoot.workspacePath, sourceRoot);
    }

    // Create a mapping from directory to package prefix
    Map<WorkspacePath, SourceRoot> workspacePathToSourceRoot = Maps.newHashMap();
    for (WorkspacePath workspacePath : sourceDirectoryToSourceRoots.keySet()) {
        Collection<SourceRoot> sources = sourceDirectoryToSourceRoots.get(workspacePath);
        Multiset<String> packages = HashMultiset.create();

        for (SourceRoot source : sources) {
            packages.add(source.packagePrefix);
        }

        final String directoryPackagePrefix;
        // Common case -- all source files agree on a single package
        if (packages.elementSet().size() == 1) {
            directoryPackagePrefix = packages.elementSet().iterator().next();
        } else {
            String preferredPackagePrefix = PackagePrefixCalculator.packagePrefixOf(workspacePath);
            directoryPackagePrefix = pickMostFrequentlyOccurring(packages, preferredPackagePrefix);
        }

        SourceRoot candidateRoot = new SourceRoot(workspacePath, directoryPackagePrefix);
        workspacePathToSourceRoot.put(workspacePath, candidateRoot);
    }

    // Add content entry base if it doesn't exist
    if (!workspacePathToSourceRoot.containsKey(directoryRoot)) {
        SourceRoot candidateRoot = new SourceRoot(directoryRoot,
                PackagePrefixCalculator.packagePrefixOf(directoryRoot));
        workspacePathToSourceRoot.put(directoryRoot, candidateRoot);
    }

    // First, create a graph of the directory structure from root to each source file
    Map<WorkspacePath, SourceRootDirectoryNode> sourceRootDirectoryNodeMap = Maps.newHashMap();
    SourceRootDirectoryNode rootNode = new SourceRootDirectoryNode(directoryRoot, null);
    sourceRootDirectoryNodeMap.put(directoryRoot, rootNode);
    for (SourceRoot sourceRoot : workspacePathToSourceRoot.values()) {
        final String sourcePathRelativeToDirectoryRoot = sourcePathRelativeToDirectoryRoot(directoryRoot,
                sourceRoot.workspacePath);
        List<String> pathComponents = !Strings.isNullOrEmpty(sourcePathRelativeToDirectoryRoot)
                ? PATH_SPLITTER.splitToList(sourcePathRelativeToDirectoryRoot)
                : ImmutableList.of();
        SourceRootDirectoryNode previousNode = rootNode;
        for (int i = 0; i < pathComponents.size(); ++i) {
            final WorkspacePath workspacePath = getWorkspacePathFromPathComponents(directoryRoot,
                    pathComponents, i + 1);
            SourceRootDirectoryNode node = sourceRootDirectoryNodeMap.get(workspacePath);
            if (node == null) {
                node = new SourceRootDirectoryNode(workspacePath, pathComponents.get(i));
                sourceRootDirectoryNodeMap.put(workspacePath, node);
                previousNode.children.add(node);
            }
            previousNode = node;
        }
    }

    // Add package prefix votes at each directory node
    for (SourceRoot sourceRoot : workspacePathToSourceRoot.values()) {
        final String sourcePathRelativeToDirectoryRoot = sourcePathRelativeToDirectoryRoot(directoryRoot,
                sourceRoot.workspacePath);

        List<String> packageComponents = PACKAGE_SPLITTER.splitToList(sourceRoot.packagePrefix);
        List<String> pathComponents = !Strings.isNullOrEmpty(sourcePathRelativeToDirectoryRoot)
                ? PATH_SPLITTER.splitToList(sourcePathRelativeToDirectoryRoot)
                : ImmutableList.of();
        int packageIndex = packageComponents.size();
        int pathIndex = pathComponents.size();
        while (pathIndex >= 0 && packageIndex >= 0) {
            final WorkspacePath workspacePath = getWorkspacePathFromPathComponents(directoryRoot,
                    pathComponents, pathIndex);

            SourceRootDirectoryNode node = sourceRootDirectoryNodeMap.get(workspacePath);

            String packagePrefix = PACKAGE_JOINER.join(packageComponents.subList(0, packageIndex));

            // If this is the source root containing Java files, we *have* to pick its package prefix
            // Otherwise just add a vote
            if (sourceRoot.workspacePath.equals(workspacePath)) {
                node.forcedPackagePrefix = packagePrefix;
            } else {
                node.packagePrefixVotes.add(packagePrefix);
            }

            String pathComponent = pathIndex > 0 ? pathComponents.get(pathIndex - 1) : "";
            String packageComponent = packageIndex > 0 ? packageComponents.get(packageIndex - 1) : "";
            if (!pathComponent.equals(packageComponent)) {
                break;
            }

            --packageIndex;
            --pathIndex;
        }
    }

    Map<WorkspacePath, SourceRoot> sourceRoots = Maps.newHashMap();
    SourceRootDirectoryNode root = sourceRootDirectoryNodeMap.get(directoryRoot);
    visitDirectoryNode(sourceRoots, root, null);

    for (SourceRoot sourceRoot : sourceRoots.values()) {
        result.add(BlazeSourceDirectory.builder(workspaceRoot.fileForPath(sourceRoot.workspacePath))
                .setPackagePrefix(sourceRoot.packagePrefix).setGenerated(false).build());
    }
}

From source file:com.github.fhirschmann.clozegen.lib.generators.CollocationGapGenerator.java

@Override
public Optional<Gap> generate(final int count) {
    checkNotNull(model);/*from   www .j av  a2 s  . com*/
    Gap gap = new Gap();
    gap.addValidAnswers(triplet.getValue1());

    // Collect a list of possible candidates for this gap
    final Multiset<String> candidates = ConcurrentHashMultiset.create(MultisetUtils.mergeMultiSets(
            model.getTails().get(triplet.getValue2()), model.getHeads().get(triplet.getValue0())));

    // Remove the correct answer from the candidate set
    candidates.remove(triplet.getValue1(), candidates.count(triplet.getValue1()));

    // Remove candidates p* which appear in the context (A, p*, B)
    for (Entry<String> entry : candidates.entrySet()) {
        if (model.getMultiset().contains(
                MiscUtils.WS_JOINER.join(triplet.getValue0(), entry.getElement(), triplet.getValue2()))) {
            candidates.remove(entry.getElement(), entry.getCount());
        }
    }

    if (candidates.elementSet().size() > count - 2) {
        final Set<String> invalidAnswers = Sets
                .newHashSet(MultisetUtils.sortedElementList(candidates, count - 1));
        gap.addInvalidAnswers(invalidAnswers);
        return Optional.of(gap);
    } else {
        return Optional.absent();
    }
}

From source file:bacter.model.ACGLikelihoodBeagle.java

/**
 * Ensure pattern counts are up to date.
 *///from  w ww  . j a v  a 2  s  .c  o m
private void updatePatterns() {
    List<Region> regionList = acg.getRegions(locus);

    // Remove stale pattern sets
    patterns.keySet().retainAll(regionList);
    constantPatterns.keySet().retainAll(regionList);

    for (Region region : regionList) {

        if (patterns.containsKey(region))
            continue;

        // Add new pattern set
        Multiset<int[]> patSet = LinkedHashMultiset.create();
        for (int j = region.leftBoundary; j < region.rightBoundary; j++) {
            int[] pat = alignment.getPattern(alignment.getPatternIndex(j));
            patSet.add(pat);
        }
        patterns.put(region, patSet);

        // Compute corresponding constant pattern list
        List<Integer> constantPatternList = new ArrayList<>();

        int patternIdx = 0;
        for (int[] pattern : patSet.elementSet()) {
            boolean isConstant = true;
            for (int i = 1; i < pattern.length; i++)
                if (pattern[i] != pattern[0]) {
                    isConstant = false;
                    break;
                }

            if (isConstant) {
                if (alignment.getDataType().isAmbiguousCode(pattern[0])) {
                    if (useAmbiguitiesInput.get()) {
                        for (int state : alignment.getDataType().getStatesForCode(pattern[0]))
                            constantPatternList.add(patternIdx * nStates + state);
                    }
                } else {
                    constantPatternList.add(patternIdx * nStates + pattern[0]);
                }
            }

            patternIdx += 1;
        }

        constantPatterns.put(region, constantPatternList);
    }
}

From source file:com.github.rinde.rinsim.pdptw.common.RouteFollowingVehicle.java

/**
 * Change the route this vehicle is following. The route must adhere to the
 * following requirements:/*  w ww  .j  av  a2  s. co  m*/
 * <ul>
 * <li>Parcels that have not yet been picked up can at maximum occur twice in
 * the route.</li>
 * <li>Parcels that have been picked up can occur at maximum once in the
 * route.</li>
 * <li>Parcels that are delivered may not occur in the route.</li>
 * </ul>
 * These requirements are <b>not</b> checked defensively! It is the callers
 * responsibility to make sure this is the case. Note that the underlying
 * models normally <i>should</i> throw exceptions whenever a vehicle attempts
 * to revisit an already delivered parcel.
 * <p>
 * In some cases the models do not allow this vehicle to change its route
 * immediately. If this is the case the route is changed the next time this
 * vehicle enters its {@link #waitState}. If
 * {@link #isDelayedRouteChangingAllowed()} is set to <code>false</code> any
 * attempts to do this will result in a runtime exception, in this case the
 * caller must ensure that a route can be changed immediately or call this
 * method at a later time. The situations when the route is changed
 * immediately are:
 * <ul>
 * <li>If the vehicle is waiting.</li>
 * <li>If diversion is allowed and the vehicle is not currently servicing.
 * </li>
 * <li>If the current route is empty.</li>
 * <li>If the first destination in the new route equals the first destination
 * of the current route.</li>
 * </ul>
 * @param r The route to set. The elements are copied from the
 *          {@link Iterable} using its iteration order.
 */
public void setRoute(Iterable<? extends Parcel> r) {
    final Iterable<Parcel> adjustedRoute = routeAdjuster.adjust(r, this);
    LOGGER.trace("{} setRoute {}", this, adjustedRoute);

    // note: the following checks can not detect if a parcel has been set to
    // multiple vehicles at the same time
    final Multiset<Parcel> routeSet = LinkedHashMultiset.create(adjustedRoute);
    for (final Parcel dp : routeSet.elementSet()) {
        final ParcelState state = getPDPModel().getParcelState(dp);
        checkArgument(!state.isDelivered(),
                "A parcel that is already delivered can not be part of a route. " + "Parcel %s in route %s.",
                dp, adjustedRoute);
        if (state.isTransitionState()) {
            if (state == ParcelState.PICKING_UP) {
                checkArgument(getPDPModel().getVehicleState(this) == VehicleState.PICKING_UP,
                        "When a parcel in the route is in PICKING UP state the vehicle "
                                + "must also be in that state, route: %s.",
                        adjustedRoute, getPDPModel().getVehicleState(this));
            } else {
                checkArgument(getPDPModel().getVehicleState(this) == VehicleState.DELIVERING,
                        "When a parcel in the route is in DELIVERING state the vehicle"
                                + " must also be in that state.");
            }
            checkArgument(getPDPModel().getVehicleActionInfo(this).getParcel() == dp,
                    "A parcel in the route that is being serviced should be serviced by"
                            + " this truck. This truck is servicing %s.",
                    getPDPModel().getVehicleActionInfo(this).getParcel());
        }

        final int frequency = routeSet.count(dp);
        if (state.isPickedUp()) {
            checkArgument(getPDPModel().getContents(this).contains(dp),
                    "A parcel that is in cargo state must be in cargo of this " + "vehicle.");
            checkArgument(frequency <= 1, "A parcel that is in cargo may not occur more than once in a route,"
                    + " found %s instance(s) of %s.", frequency, dp, state);
        } else {
            checkArgument(frequency <= 2,
                    "A parcel that is available may not occur more than twice in a "
                            + "route, found %s instance(s) of %s (with state %s). Route: %s.",
                    frequency, dp, state, adjustedRoute);
        }
    }

    final boolean firstEqualsFirst = firstEqualsFirstInRoute(adjustedRoute);
    final boolean divertable = isDiversionAllowed && !stateMachine.stateIs(serviceState);

    if (stateMachine.stateIs(waitState) || route.isEmpty() || divertable || firstEqualsFirst) {
        route = newLinkedList(adjustedRoute);
        newRoute = Optional.absent();
    } else {
        checkArgument(allowDelayedRouteChanges,
                "Diversion is not allowed in current state and delayed route changes "
                        + "are also not allowed, rejected route: %s.",
                adjustedRoute);
        newRoute = Optional.of(newLinkedList(adjustedRoute));
    }
}

From source file:Beans.PlayersBean.java

@PostConstruct
public void init() {

    try {/*from   w ww  . j ava  2 s .  com*/
        Set<CodeAward> codeAwards = CodesImporter.importCodes();
        MappingCodeAwards.initializeMapping();

        Datastore ds = singleton.getDatastore();
        Query q = ds.createQuery(Player.class);
        players = q.asList();

        if (players == null) {
            players = new ArrayList();
        }

        for (Player player : players) {

            //            Query<Player> qUNiquePlayer = ds.createQuery(Player.class).filter("twitter", player.getTwitter());
            //            Player foundPlayer = (Player) qUNiquePlayer.get();
            StringBuilder sb = new StringBuilder();

            Multiset<String> categoryCodeAwards = HashMultiset.create();

            for (String code : player.getCodes()) {
                for (CodeAward codeAward : codeAwards) {
                    if (code.equals(codeAward.getCode())) {
                        categoryCodeAwards.add(codeAward.getCategory());
                        try {
                            player.setPoints(player.getPoints() + codeAward.getPoints());
                        } catch (Exception e) {
                            System.out.println("Exception when adding points: ");
                            System.out.println(e);
                        }
                        break;
                    }
                }
            }

            for (String categoryCodeAward : categoryCodeAwards.elementSet()) {
                if (MappingCodeAwards.getMapCategoryToFontIcon().get(categoryCodeAward) != null) {
                    sb.append("<i style=\"font-size:0.8em\" class=\"fa ")
                            .append(MappingCodeAwards.getMapCategoryToFontIcon().get(categoryCodeAward))
                            .append("\"></i> x ").append(categoryCodeAwards.count(categoryCodeAward));
                    sb.append(", ");
                }
            }
            if (sb.lastIndexOf(", ") > 0) {
                sb.delete(sb.lastIndexOf(","), sb.length() - 1);
            }
            player.setHtmlListOfCodeAwards(sb.toString());

        }
    } catch (IOException ex) {
        Logger.getLogger(PlayersBean.class.getName()).log(Level.SEVERE, null, ex);
    }

    Collections.sort(players);
    Collections.reverse(players);

    //find rank
    Player previous = null;
    int counterPlayers = 0;

    for (Player player : players) {
        counterPlayers++;
        if (previous != null) {
            if (player.getPoints() == previous.getPoints()) {
                player.setRank(previous.getRank());
            } else {
                player.setRank(counterPlayers);
            }
        } else {
            player.setRank(counterPlayers);
        }
        previous = player;

    }
}

From source file:it.units.malelab.ege.benchmark.mapper.MappingPropertiesFitness.java

@Override
public MultiObjectiveFitness<Double> compute(Node<String> mapperRawPhenotype) {
    Map<Property, double[]> propertyValues = new LinkedHashMap<>();
    for (Property property : properties) {
        propertyValues.put(property, new double[problems.size()]);
    }/*from  w w  w.  j av a2s  . co m*/
    int i = 0;
    for (Problem<String, NumericFitness> problem : problems.keySet()) {
        List<Node<String>> phenotypes = new ArrayList<>();
        Multiset<Node<String>> groups = LinkedHashMultiset.create();
        //build mapper
        RecursiveMapper<String> mapper = new RecursiveMapper<>(mapperRawPhenotype, maxMappingDepth,
                EXPRESSIVENESS_DEPTH, problem.getGrammar());
        //map
        for (BitsGenotype genotype : genotypes) {
            Node<String> phenotype = Node.EMPTY_TREE;
            try {
                phenotype = mapper.map(genotype, Collections.EMPTY_MAP);
            } catch (MappingException ex) {
                //ignore
            }
            phenotypes.add(phenotype);
            groups.add(phenotype);
        }
        //compute properties
        if (propertyValues.keySet().contains(Property.REDUNDANCY)) {
            propertyValues.get(Property.REDUNDANCY)[i] = 1d
                    - (double) groups.elementSet().size() / (double) genotypes.size();
        }
        if (propertyValues.keySet().contains(Property.NON_UNIFORMITY)) {
            double[] groupSizes = new double[groups.elementSet().size()];
            int c = 0;
            for (Node<String> phenotype : groups.elementSet()) {
                groupSizes[c] = (double) groups.count(phenotype);
                c = c + 1;
            }
            propertyValues.get(Property.NON_UNIFORMITY)[i] = Math.sqrt(StatUtils.variance(groupSizes))
                    / StatUtils.mean(groupSizes);
        }
        if (propertyValues.keySet().contains(Property.NON_LOCALITY)) {
            double[] phenotypeDistances = computeDistances(phenotypes, problems.get(problem));
            double locality = 1d
                    - (1d + (new PearsonsCorrelation().correlation(genotypeDistances, phenotypeDistances)))
                            / 2d;
            propertyValues.get(Property.NON_LOCALITY)[i] = Double.isNaN(locality) ? 1d : locality;
        }
        i = i + 1;
    }
    Double[] meanValues = new Double[properties.length];
    for (int j = 0; j < properties.length; j++) {
        meanValues[j] = StatUtils.mean(propertyValues.get(properties[j]));
    }
    MultiObjectiveFitness<Double> mof = new MultiObjectiveFitness<Double>(meanValues);
    return mof;
}