Example usage for com.google.common.collect Multiset entrySet

List of usage examples for com.google.common.collect Multiset entrySet

Introduction

In this page you can find the example usage for com.google.common.collect Multiset entrySet.

Prototype

Set<Entry<E>> entrySet();

Source Link

Document

Returns a view of the contents of this multiset, grouped into Multiset.Entry instances, each providing an element of the multiset and the count of that element.

Usage

From source file:com.continuuity.weave.internal.appmaster.ApplicationMasterService.java

/**
 * Handling containers that are completed.
 *///from   w w w .ja va 2 s . c o m
private void handleCompleted(List<YarnContainerStatus> completedContainersStatuses) {
    Multiset<String> restartRunnables = HashMultiset.create();
    for (YarnContainerStatus status : completedContainersStatuses) {
        LOG.info("Container {} completed with {}:{}.", status.getContainerId(), status.getState(),
                status.getDiagnostics());
        runningContainers.handleCompleted(status, restartRunnables);
    }

    for (Multiset.Entry<String> entry : restartRunnables.entrySet()) {
        LOG.info("Re-request container for {} with {} instances.", entry.getElement(), entry.getCount());
        for (int i = 0; i < entry.getCount(); i++) {
            runnableContainerRequests.add(createRunnableContainerRequest(entry.getElement()));
        }
    }

    // For all runnables that needs to re-request for containers, update the expected count timestamp
    // so that the EventHandler would triggered with the right expiration timestamp.
    expectedContainers.updateRequestTime(restartRunnables.elementSet());
}

From source file:edu.berkeley.compbio.ml.cluster.bayesian.TacoaClustering.java

/**
 * Hack the prior probabilities using the number of clusters per training label.  TacoaDistanceMeasure takes the prior
 * to be per label, not per cluster.   So, the "distance" between a sample and a cluster depends on how many clusters
 * share the same training label.//ww  w .j a  v a 2s. c  o m
 */
protected synchronized void preparePriors() //throws DistributionException
{
    //normalizeClusterLabelProbabilities();
    try {
        final Multiset<String> populatedTrainingLabels = HashMultiset.create();
        //int clustersWithTrainingLabel = 0;
        final Collection<? extends CentroidCluster<T>> immutableClusters = getClusters();
        for (final CentroidCluster<T> theCluster : immutableClusters) {
            try {
                // note this also insures that every cluster has a training label, otherwise it throws NoSuchElementException
                final String label = theCluster.getImmutableWeightedLabels()
                        .getDominantKeyInSet(potentialTrainingBins);
                // could use theCluster.getDerivedLabelProbabilities() there except they're not normalized yet, and there's no need

                populatedTrainingLabels.add(label);
                //clustersWithTrainingLabel++;
            } catch (NoSuchElementException e) {
                logger.warn("Cluster has no training label: " + theCluster);
            }
        }

        logger.info(String.valueOf(populatedTrainingLabels.size()) + " of " + getNumClusters()
                + " clusters have a training label; " + populatedTrainingLabels.entrySet().size()
                + " labels were trained");

        final ImmutableMap.Builder<Cluster<T>, Double> builder = ImmutableMap.builder();

        final Multinomial<String> labelPriors = new Multinomial<String>(populatedTrainingLabels);
        for (final CentroidCluster<T> theCluster : immutableClusters) {
            final String label = theCluster.getImmutableWeightedLabels()
                    .getDominantKeyInSet(potentialTrainingBins); // PERF redundant
            builder.put(theCluster, labelPriors.get(label));
        }

        clusterPriors = builder.build();
    } catch (DistributionException e) {
        logger.error("Error", e);
        throw new ClusterRuntimeException(e);
    }
}

From source file:fr.obeo.releng.targetplatform.validation.TargetPlatformValidator.java

private void reportDuplicatedEnvironmentOptions(final TargetPlatform targetPlatform,
        final Multiset<String> valuesInFile, final String msg) {
    if (((valuesInFile.size() > 1) || IterableExtensions.<Multiset.Entry<String>>exists(valuesInFile.entrySet(),
            new Function1<Multiset.Entry<String>, Boolean>() {
                @Override/* www  .  j  a v a2  s . c  o m*/
                public Boolean apply(final Multiset.Entry<String> it) {
                    int _count = it.getCount();
                    return Boolean.valueOf((_count > 1));
                }
            }))) {
        final Consumer<String> _function = new Consumer<String>() {
            @Override
            public void accept(final String e) {
                final Consumer<Environment> _function = new Consumer<Environment>() {
                    @Override
                    public void accept(final Environment env) {
                        for (int i = 0; (i < env.getEnv().size()); i++) {
                            {
                                final String it = env.getEnv().get(i);
                                boolean _equals = e.equals(it);
                                if (_equals) {
                                    TargetPlatformValidator.this.error(msg, env,
                                            TargetPlatformPackage.Literals.ENVIRONMENT__ENV, i,
                                            TargetPlatformValidator.CHECK__NO_DUPLICATE_ENVIRONMENT_OPTIONS);
                                }
                            }
                        }
                    }
                };
                Iterables.<Environment>filter(targetPlatform.getContents(), Environment.class)
                        .forEach(_function);
            }
        };
        valuesInFile.elementSet().forEach(_function);
    }
}

From source file:org.apache.twill.internal.appmaster.ApplicationMasterService.java

/**
 * Handling containers that are completed.
 *///from w ww.  j a  v  a 2 s .co  m
private void handleCompleted(List<YarnContainerStatus> completedContainersStatuses) {
    Multiset<String> restartRunnables = HashMultiset.create();
    for (YarnContainerStatus status : completedContainersStatuses) {
        LOG.info("Container {} completed with {}:{}.", status.getContainerId(), status.getState(),
                status.getDiagnostics());
        runningContainers.handleCompleted(status, restartRunnables);
    }

    for (Multiset.Entry<String> entry : restartRunnables.entrySet()) {
        LOG.info("Re-request container for {} with {} instances.", entry.getElement(), entry.getCount());
        runnableContainerRequests.add(createRunnableContainerRequest(entry.getElement(), entry.getCount()));
    }

    // For all runnables that needs to re-request for containers, update the expected count timestamp
    // so that the EventHandler would triggered with the right expiration timestamp.
    expectedContainers.updateRequestTime(restartRunnables.elementSet());
}

From source file:org.sonar.server.es.request.ProxyBulkRequestBuilder.java

@Override
public String toString() {
    StringBuilder message = new StringBuilder();
    message.append("Bulk[");
    Multiset<BulkRequestKey> groupedRequests = LinkedHashMultiset.create();
    for (int i = 0; i < request.requests().size(); i++) {
        ActionRequest<?> item = request.requests().get(i);
        String requestType;//from  w  w  w.  j  a va 2s.  c om
        String index;
        String docType;
        if (item instanceof IndexRequest) {
            IndexRequest request = (IndexRequest) item;
            requestType = "index";
            index = request.index();
            docType = request.type();
        } else if (item instanceof UpdateRequest) {
            UpdateRequest request = (UpdateRequest) item;
            requestType = "update";
            index = request.index();
            docType = request.type();
        } else if (item instanceof DeleteRequest) {
            DeleteRequest request = (DeleteRequest) item;
            requestType = "delete";
            index = request.index();
            docType = request.type();
        } else {
            // Cannot happen, not allowed by BulkRequest's contract
            throw new IllegalStateException("Unsupported bulk request type: " + item.getClass());
        }
        groupedRequests.add(new BulkRequestKey(requestType, index, docType));
    }

    Set<Multiset.Entry<BulkRequestKey>> entrySet = groupedRequests.entrySet();
    int size = entrySet.size();
    int current = 0;
    for (Multiset.Entry<BulkRequestKey> requestEntry : entrySet) {
        message.append(requestEntry.getCount()).append(" ").append(requestEntry.getElement().toString());
        current++;
        if (current < size) {
            message.append(", ");
        }
    }

    message.append("]");
    return message.toString();
}

From source file:annis.ql.parser.SemanticValidator.java

public void checkAlternative(QueryData data, List<QueryNode> alternative, int alternativeIndex) {
    // check if there is at least one search expression
    if (alternative.isEmpty()) {
        throw new AnnisQLSemanticsException("Missing search expression.");
    }//from w w  w  .  j av  a 2s  . c o  m

    // there are not linguistic binary relations allowed if there is only one node
    if (alternative.size() == 1) {
        QueryNode n = alternative.get(0);
        for (Join j : n.getOutgoingJoins()) {
            if (j.getTarget() != null) {
                throw new AnnisQLSemanticsException(
                        "No binary linguistic relations allowed if there is only one node in query.");
            }
        }
    }

    // get all nodes connected to the first one
    Multimap<Long, QueryNode> connected = calculateConnected(alternative);
    Set<Long> transitiveHull = new HashSet<>();
    transitiveHull.add(alternative.get(0).getId());
    createTransitiveHull(alternative.get(0), connected, transitiveHull);

    Multiset<String> variableNames = TreeMultiset.create();

    Set<Long> unconnectedNodes = new HashSet<>();
    for (QueryNode n : alternative) {
        unconnectedNodes.add(n.getId());
        variableNames.add(n.getVariable());
    }
    unconnectedNodes.removeAll(transitiveHull);

    // check if each node is contained in the connected nodes
    if (!unconnectedNodes.isEmpty()) {
        List<String> variables = new LinkedList<>();
        for (QueryNode n : alternative) {
            if (unconnectedNodes.contains(n.getId())) {
                variables.add(n.getVariable());
            }
        }

        if (alternative.size() == 1) {
            throw new AnnisQLSemanticsException("Variable(s) [" + Joiner.on(",").join(variables)
                    + "] not bound (use linguistic operators).");
        } else {
            throw new AnnisQLSemanticsException("Variable(s) [" + Joiner.on(",").join(variables)
                    + "] not bound in alternative " + alternativeIndex + "(use linguistic operators). "
                    + "Normalized query is: \n" + data.toAQL());
        }
    }

    // check if any variable name was given more than once
    List<String> invalidNames = new LinkedList<>();
    for (Multiset.Entry<String> e : variableNames.entrySet()) {
        if (e.getCount() > 1) {
            invalidNames.add(e.getElement());
        }
    }
    if (!invalidNames.isEmpty()) {
        throw new AnnisQLSemanticsException(
                "The following variable names are " + "used for more than one node: "
                        + Joiner.on(", ").join(invalidNames) + "\nNormalized Query is: \n" + data.toAQL());
    }
}

From source file:org.corpus_tools.annis.ql.parser.SemanticValidator.java

public void checkAlternative(QueryData data, List<QueryNode> alternative, int alternativeIndex,
        boolean queryWasNormalized) {
    // check if there is at least one search expression
    if (alternative.isEmpty()) {
        throw new AnnisQLSemanticsException("Missing search expression.");
    }/* w  w w  .  ja v  a 2s  .c o m*/

    // there are not linguistic binary relations allowed if there is only one node
    if (alternative.size() == 1) {
        QueryNode n = alternative.get(0);
        for (Join j : n.getOutgoingJoins()) {
            if (j.getTarget() != null) {
                throw new AnnisQLSemanticsException(j.getParseLocation(),
                        "No binary linguistic relations allowed if there is only one node in query.");
            }
        }
    }

    // get all nodes connected to the first one
    Multimap<Long, QueryNode> connected = calculateConnected(alternative);
    Set<Long> transitiveHull = new HashSet<>();
    transitiveHull.add(alternative.get(0).getId());
    createTransitiveHull(alternative.get(0), connected, transitiveHull);

    Multiset<String> variableNames = TreeMultiset.create();

    Set<Long> unconnectedNodes = new HashSet<>();
    for (QueryNode n : alternative) {
        unconnectedNodes.add(n.getId());
        variableNames.add(n.getVariable());
    }
    unconnectedNodes.removeAll(transitiveHull);

    // check if each node is contained in the connected nodes
    if (!unconnectedNodes.isEmpty()) {
        List<AqlParseError> errors = new LinkedList<>();

        for (QueryNode n : alternative) {
            if (unconnectedNodes.contains(n.getId())) {
                errors.add(new AqlParseError(n,
                        "variable \"" + n.getVariable() + "\" not bound (use linguistic operators)"));

            }
        }

        if (!errors.isEmpty()) {
            if (queryWasNormalized) {
                // add the normalized query as "error" so the user is able to see it
                errors.add(new AqlParseError("Normalized query is: \n" + data.toAQL()));
            }

            throw new AnnisQLSemanticsException("Not all variables bound", errors);
        }
    }

    // check if any variable name was given more than once
    List<String> invalidNames = new LinkedList<>();
    for (Multiset.Entry<String> e : variableNames.entrySet()) {
        if (e.getCount() > 1) {
            invalidNames.add(e.getElement());
        }
    }
    if (!invalidNames.isEmpty()) {
        throw new AnnisQLSemanticsException(
                "The following variable names are " + "used for more than one node: "
                        + Joiner.on(", ").join(invalidNames) + "\nNormalized Query is: \n" + data.toAQL());
    }

    // check no non-reflexive operator is used with the same operands
    for (QueryNode source : alternative) {
        for (Join join : source.getOutgoingJoins()) {
            if (join instanceof Inclusion || join instanceof SameSpan || join instanceof Overlap
                    || join instanceof RightOverlap || join instanceof LeftOverlap
                    || join instanceof RightAlignment || join instanceof LeftAlignment) {
                if (source.equals(join.getTarget())) {
                    throw new AnnisQLSemanticsException(join,
                            "Not-reflexive operator used with the same node as argument.");
                }
            }
        }
    }
}

From source file:bots.mctsbot.ai.bots.bot.gametree.search.expander.SamplingExpander.java

public List<Pair<ActionWrapper, WeightedNode>> getWeightedChildren(boolean uniformTokens) {
    List<ProbabilityAction> probActions = new ArrayList<ProbabilityAction>(getProbabilityActions());
    double[] cumulProb = new double[probActions.size()];

    for (int i = 0; i < probActions.size(); i++) {
        cumulProb[i] = (i > 0 ? cumulProb[i - 1] : 0) + probActions.get(i).getProbability();
    }/*from   w w  w  .  j av a 2s . co  m*/
    if (logger.isTraceEnabled()) {
        for (int i = 0; i < probActions.size(); i++) {
            logger.trace("cumulProb[" + i + "]=" + cumulProb[i] + " for action " + probActions.get(i));

        }
    }

    // ordening for sexy debugging output
    Multiset<ProbabilityAction> samples = TreeMultiset.create(new Comparator<ProbabilityAction>() {
        @Override
        public int compare(ProbabilityAction o1, ProbabilityAction o2) {
            if (o2.getProbability() < o1.getProbability()) {
                return -1;
            }
            if (o2.getProbability() > o1.getProbability()) {
                return 1;
            }
            if (o1.getAction() instanceof RaiseAction && o2.getAction() instanceof RaiseAction) {
                return ((RaiseAction) o2.getAction()).amount - ((RaiseAction) o1.getAction()).amount;
            }
            if (o1.getAction() instanceof BetAction && o2.getAction() instanceof BetAction) {
                return ((BetAction) o2.getAction()).amount - ((BetAction) o1.getAction()).amount;
            }
            // if probabilities are equal for different classes,
            // objects are NOT equal per se
            // go alphabetically?
            return o1.toString().compareTo(o2.toString());
        }
    });
    // Multiset<ProbabilityAction> samples = new
    // HashMultiset<ProbabilityAction>();
    int nbSamples = Math.min(Max_Granularity, tokens);
    for (int i = 0; i < nbSamples; i++) {
        ProbabilityAction sampledAction = sampleAction(probActions, cumulProb);
        samples.add(sampledAction);
    }

    Set<Entry<ProbabilityAction>> entrySet = samples.entrySet();
    ImmutableList.Builder<Pair<ActionWrapper, WeightedNode>> childrenBuilder = ImmutableList.builder();
    for (Entry<ProbabilityAction> entry : entrySet) {
        int tokensShare = uniformTokens ? tokens / entrySet.size() : tokens * entry.getCount() / nbSamples;
        //         
        childrenBuilder.add(new Pair<ActionWrapper, WeightedNode>(entry.getElement(), new WeightedNode(
                node.getChildAfter(entry.getElement(), tokensShare), entry.getCount() / (double) nbSamples)));
    }
    return childrenBuilder.build();
}

From source file:com.b2international.snowowl.snomed.datastore.request.SnomedConceptCreateRequest.java

private void convertDescriptions(TransactionContext context, final String conceptId) {
    final Set<String> requiredDescriptionTypes = newHashSet(Concepts.FULLY_SPECIFIED_NAME,
            Concepts.REFSET_DESCRIPTION_ACCEPTABILITY_PREFERRED);
    final Multiset<String> preferredLanguageRefSetIds = HashMultiset.create();
    final Set<String> synonymAndDescendantIds = context.service(Synonyms.class).get();

    for (final SnomedDescriptionCreateRequest descriptionRequest : descriptions) {

        descriptionRequest.setConceptId(conceptId);

        if (null == descriptionRequest.getModuleId()) {
            descriptionRequest.setModuleId(getModuleId());
        }/*from   w w  w.  ja  v  a 2  s  . com*/

        descriptionRequest.execute(context);

        final String typeId = descriptionRequest.getTypeId();

        if (synonymAndDescendantIds.contains(typeId)) {
            for (final Entry<String, Acceptability> acceptability : descriptionRequest.getAcceptability()
                    .entrySet()) {
                if (Acceptability.PREFERRED.equals(acceptability.getValue())) {
                    preferredLanguageRefSetIds.add(acceptability.getKey());
                    requiredDescriptionTypes.remove(Concepts.REFSET_DESCRIPTION_ACCEPTABILITY_PREFERRED);
                }
            }
        }

        requiredDescriptionTypes.remove(typeId);
    }

    if (!requiredDescriptionTypes.isEmpty()) {
        throw new BadRequestException(
                "At least one fully specified name and one preferred term must be supplied with the concept.");
    }

    for (final com.google.common.collect.Multiset.Entry<String> languageRefSetIdOccurence : preferredLanguageRefSetIds
            .entrySet()) {
        if (languageRefSetIdOccurence.getCount() > 1) {
            throw new BadRequestException(
                    "More than one preferred term has been added for language reference set %s.",
                    languageRefSetIdOccurence.getElement());
        }
    }
}

From source file:tufts.vue.ds.DataTree.java

private static void buildValueChildren(Field field, DataNode fieldNode) {
    final Multiset<String> valueCounts = field.getValueSet();
    final Set<Multiset.Entry<String>> entrySet = valueCounts.entrySet();

    final Iterable<Multiset.Entry<String>> valueEntries;

    if (field.isQuantile() || (SORT_BY_COUNT && field.isPossibleKeyField())) {
        // cases we don't need to bother sorting: (1) quantiles, which are
        //  pre-sorted (2) possible key fields: if sorting by counts (frequency)
        //  don't need to bother sorting if field is a possible key field, as all
        //  value counts == 1
        valueEntries = entrySet;//w w w.j a va2  s.  c o m
    } else {
        final ArrayList<Multiset.Entry<String>> sortedValues = new ArrayList(entrySet);

        Collections.sort(sortedValues, new Comparator<Multiset.Entry<String>>() {
            public int compare(final Multiset.Entry<String> e1, final Multiset.Entry<String> e2) {
                // always put any empty value item last, otherwise sort on frequency
                if (e1.getElement() == Field.EMPTY_VALUE)
                    return 1;
                else if (e2.getElement() == Field.EMPTY_VALUE)
                    return -1;
                else if (SORT_BY_COUNT)
                    return e2.getCount() - e1.getCount();
                else // SORT_BY_VALUE
                    return tufts.Strings.compareNaturalIgnoreCaseAscii(e1.getElement(), e2.getElement());
            }
        });

        valueEntries = sortedValues;
    }

    //-----------------------------------------------------------------------------
    // Add the enumerated values
    //-----------------------------------------------------------------------------

    for (Multiset.Entry<String> e : valueEntries) {

        final String value = e.getElement();
        final String display;

        int nValues = e.getCount();
        if (field.isQuantile() && value != Field.EMPTY_VALUE) {
            // non-empty Quantile values always have an extra count,
            // which was the "init" count to enforce quantile-order
            // on the values list.
            nValues--;
        }

        if (field.isPossibleKeyField()) {

            display = field.valueDisplay(value);

        } else {

            final String countTxt = String.format("%3d", nValues).replaceAll(" ", "&nbsp;");

            final String color;

            if (nValues <= 0)
                display = String.format(HTML("<font color=#AAAAAA><code>%s</code> %s"), countTxt,
                        valueText(value));
            else
                display = String.format(HTML("<code><font color=#888888>%s</font></code> %s"), countTxt,
                        valueText(value));
        }

        final ValueNode valueNode = new ValueNode(field, value, display, nValues);

        fieldNode.add(valueNode);

    }

    for (String comment : field.getDataComments()) {
        fieldNode.add(new DataNode(HTML("<font color=#AAAAAA>" + comment)));
    }

}