List of usage examples for com.google.common.collect Multiset setCount
int setCount(E element, int count);
From source file:edu.cornell.cs.nlp.spf.test.ccg.lambda.SingleSentencePartialCreditTestingStatistics.java
private static PartialCreditTriplet partialCompare(LogicalExpression gold, LogicalExpression label) { final Multiset<Pair<? extends LogicalExpression, ? extends LogicalExpression>> goldPairs = GetPredConstPairs .of(gold);/*from www .jav a 2s.co m*/ final Multiset<Pair<? extends LogicalExpression, ? extends LogicalExpression>> labelPairs; if (label == null) { labelPairs = HashMultiset.create(); } else { labelPairs = GetPredConstPairs.of(label); } // The "intersection" of the gold and label pair sets = the number of // matches final Multiset<Pair<? extends LogicalExpression, ? extends LogicalExpression>> intersection = HashMultiset .create(); for (final Entry<Pair<? extends LogicalExpression, ? extends LogicalExpression>> entry : goldPairs .entrySet()) { intersection.setCount(entry.getElement(), Math.min(entry.getCount(), labelPairs.count(entry.getElement()))); } return new PartialCreditTriplet(goldPairs.size(), labelPairs.size(), intersection.size()); }
From source file:org.javafunk.funk.Multisets.java
public static <T> Multiset<T> union(Iterable<? extends Iterable<? extends T>> iterables) { Multiset<T> unionMultiset = multisetFrom(first(iterables).get()); for (Iterable<? extends T> iterable : rest(iterables)) { Multiset<T> currentMultiset = multisetFrom(iterable); for (T element : currentMultiset.elementSet()) { int numberInUnionMultiset = unionMultiset.count(element); int numberInCurrentMultiset = currentMultiset.count(element); if (numberInUnionMultiset < numberInCurrentMultiset) { unionMultiset.setCount(element, numberInCurrentMultiset); }/*from w w w . j ava2s .c o m*/ } } return unionMultiset; }
From source file:org.sosy_lab.cpachecker.util.predicates.pathformula.pointeraliasing.TypeHandlerWithPointerAliasing.java
/** * Adds the declared composite type to the cache saving its size as well as the offset of every * member of the composite.// w w w .j a v a 2 s. c o m * @param compositeType */ void addCompositeTypeToCache(CCompositeType compositeType) { compositeType = (CCompositeType) CTypeUtils.simplifyType(compositeType); if (offsets.containsKey(compositeType)) { // Support for empty structs though it's a GCC extension assert sizes.contains(compositeType) || Integer.valueOf(0).equals( compositeType.accept(sizeofVisitor)) : "Illegal state of PointerTargetSet: no size for type:" + compositeType; return; // The type has already been added } final Integer size = compositeType.accept(sizeofVisitor); assert size != null : "Can't evaluate size of a composite type: " + compositeType; assert compositeType.getKind() != ComplexTypeKind.ENUM : "Enums are not composite: " + compositeType; final Multiset<String> members = HashMultiset.create(); int offset = 0; for (final CCompositeTypeMemberDeclaration memberDeclaration : compositeType.getMembers()) { members.setCount(memberDeclaration.getName(), offset); final CType memberType = CTypeUtils.simplifyType(memberDeclaration.getType()); final CCompositeType memberCompositeType; if (memberType instanceof CCompositeType) { memberCompositeType = (CCompositeType) memberType; if (memberCompositeType.getKind() == ComplexTypeKind.STRUCT || memberCompositeType.getKind() == ComplexTypeKind.UNION) { if (!offsets.containsKey(memberCompositeType)) { assert !sizes .contains(memberCompositeType) : "Illegal state of PointerTargetSet: size for type:" + memberCompositeType; addCompositeTypeToCache(memberCompositeType); } } } else { memberCompositeType = null; } if (compositeType.getKind() == ComplexTypeKind.STRUCT) { if (memberCompositeType != null) { offset += sizes.count(memberCompositeType); } else { offset += memberDeclaration.getType().accept(sizeofVisitor); } } } assert compositeType.getKind() != ComplexTypeKind.STRUCT || offset == size : "Incorrect sizeof or offset of the last member: " + compositeType; sizes.setCount(compositeType, size); offsets.put(compositeType, members); }
From source file:eu.lp0.cursus.scoring.scores.impl.GenericRaceLapsData.java
@Override protected List<Pilot> calculateRaceLapsInOrder(Race race, Map<Pilot, Integer> laps) { ListMultimap<Integer, Pilot> raceOrder = ArrayListMultimap.create(EXPECTED_MAXIMUM_LAPS, scores.getPilots().size());//from w w w . jav a 2 s . c om extractRaceLaps(race, laps, raceOrder, null); // Get penalties for each pilot ListMultimap<Pilot, Penalty> cancelLaps = ArrayListMultimap.create(EXPECTED_MAXIMUM_PENALTIES, scores.getPilots().size()); ListMultimap<Pilot, Penalty> adjustLaps = ArrayListMultimap.create(EXPECTED_MAXIMUM_PENALTIES, scores.getPilots().size()); for (RaceAttendee attendee : Maps.filterKeys(race.getAttendees(), Predicates.in(scores.getPilots())) .values()) { for (Penalty penalty : Iterables.concat(Ordering.natural().immutableSortedCopy(attendee.getPenalties()), scores.getSimulatedRacePenalties(attendee.getPilot(), race))) { if (penalty.getValue() != 0) { switch (penalty.getType()) { case CANCEL_LAPS: cancelLaps.put(attendee.getPilot(), penalty); break; case ADJUST_LAPS: adjustLaps.put(attendee.getPilot(), penalty); break; default: break; } } } } // Apply lap cancellation penalties if (!cancelLaps.isEmpty()) { final Multiset<Pilot> pilotLaps = HashMultiset.create(laps.size()); for (Map.Entry<Pilot, Integer> pilotLapCount : laps.entrySet()) { pilotLaps.setCount(pilotLapCount.getKey(), pilotLapCount.getValue()); } for (Map.Entry<Pilot, Penalty> entry : cancelLaps.entries()) { int value = entry.getValue().getValue(); if (value > 0) { pilotLaps.remove(entry.getKey(), value); } else { pilotLaps.add(entry.getKey(), Math.abs(value)); } } extractRaceLaps(race, laps, raceOrder, new Predicate<Pilot>() { @Override public boolean apply(@Nullable Pilot pilot) { return pilotLaps.remove(pilot); } }); } // Save pilot order List<Pilot> origPilotOrder = getPilotOrder(raceOrder); SortedSet<Pilot> noLaps = new TreeSet<Pilot>(new PilotRaceNumberComparator()); Set<Integer> changed = new HashSet<Integer>(); // It is intentional that pilots can end up having 0 laps but be considered to have completed the race for (Map.Entry<Pilot, Penalty> entry : adjustLaps.entries()) { Pilot pilot = entry.getKey(); int lapCount = laps.get(pilot); raceOrder.remove(lapCount, pilot); changed.add(lapCount); lapCount += entry.getValue().getValue(); if (lapCount <= 0) { lapCount = 0; noLaps.add(pilot); } laps.put(pilot, lapCount); raceOrder.put(lapCount, pilot); changed.add(lapCount); } // Apply original pilot order if (!changed.isEmpty()) { origPilotOrder.addAll(noLaps); for (Integer lapCount : changed) { raceOrder.replaceValues(lapCount, Ordering.explicit(origPilotOrder).immutableSortedCopy(raceOrder.get(lapCount))); } return getPilotOrder(raceOrder); } else { return origPilotOrder; } }
From source file:com.continuuity.loom.layout.change.AddServicesChange.java
@Override public Set<Node> applyChange(Cluster cluster, Set<Node> clusterNodes, Map<String, Service> serviceMap) { Set<Node> changedNodes = Sets.newHashSet(); Multiset<NodeLayout> countsToAdd = HashMultiset.create(countsPerNodeLayout); for (Node node : clusterNodes) { NodeLayout nodeLayout = NodeLayout.fromNode(node); if (countsToAdd.contains(nodeLayout)) { for (String service : services) { node.addService(serviceMap.get(service)); }/*from w w w. j a va 2 s . c o m*/ countsToAdd.setCount(nodeLayout, countsToAdd.count(nodeLayout) - 1); changedNodes.add(node); } } cluster.setServices(Sets.union(cluster.getServices(), services)); return changedNodes; }
From source file:com.clarkparsia.sbol.order.PartialOrder.java
/** * Returns the elements in an ascending topological order. * /*from w w w .j a v a 2s. c om*/ * @throws IllegalStateException if there are cycles between the elements */ @Override public Iterator<T> iterator() throws IllegalStateException { Multiset<T> degrees = HashMultiset.create(); Queue<T> nodesPending = new ArrayDeque<T>(); List<T> nodesSorted = Lists.newArrayList(); for (Entry<T, Set<T>> entry : precededBy.entrySet()) { T node = entry.getKey(); Set<T> precededByList = entry.getValue(); int degree = precededByList.size(); degrees.setCount(node, degree); if (degree == 0) { nodesPending.add(node); } } while (!nodesPending.isEmpty()) { T node = nodesPending.remove(); int deg = degrees.count(node); if (deg != 0) throw new IllegalStateException("Cycle detected " + node + " " + deg + " " + nodesSorted.size()); nodesSorted.add(node); for (Entry<T, Set<T>> entry : precededBy.entrySet()) { T n = entry.getKey(); Set<T> precededByList = entry.getValue(); if (precededByList.contains(node)) { int degree = degrees.count(n); if (degree == 1) { nodesPending.add(n); degrees.setCount(n, 0); } else { degrees.remove(n); } } } } if (nodesSorted.size() != precededBy.size()) { throw new IllegalStateException("Failed to sort elements"); } return nodesSorted.iterator(); }
From source file:edu.cmu.lti.oaqa.baseqa.answer.yesno.scorers.ConceptOverlapYesNoScorer.java
@Override public Map<String, Double> score(JCas jcas) throws AnalysisEngineProcessException { // create ctype2concepts maps and concept counts in question and snippets SetMultimap<String, Concept> ctype2concepts = HashMultimap.create(); Multiset<Concept> concept2count = HashMultiset.create(); for (Concept concept : TypeUtil.getConcepts(jcas)) { TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation) .forEach(ctype -> ctype2concepts.put(ctype, concept)); long count = TypeUtil.getConceptMentions(concept).stream() .map(cmention -> cmention.getView().getViewName()).distinct().count(); concept2count.setCount(concept, (int) count); }//from w w w. ja va 2 s . c o m Set<Concept> qconcepts = TypeUtil.getConceptMentions(jcas).stream().map(ConceptMention::getConcept) .collect(toSet()); // prepare cross-ctype counts ImmutableMap.Builder<String, Double> features = ImmutableMap.builder(); ListMultimap<String, Double> keyword2values = ArrayListMultimap.create(); for (String ctype : ctype2concepts.keySet()) { Set<Concept> concepts = ctype2concepts.get(ctype); // local counts int[] totalCounts = concepts.stream().mapToInt(concept2count::count).toArray(); double[] questionCounts = concepts.stream().mapToDouble(concept -> qconcepts.contains(concept) ? 1 : 0) .toArray(); double[] questionRatios = IntStream.range(0, concepts.size()) .mapToDouble(i -> questionCounts[i] / totalCounts[i]).toArray(); double[] passageRatios = DoubleStream.of(questionRatios).map(r -> 1.0 - r).toArray(); // create feature counts aggregated for each ctype addAvgMaxMinFeatures(questionCounts, features, keyword2values, "question-count", ctype); addAvgMaxMinFeatures(questionRatios, features, keyword2values, "question-ratio", ctype); addAvgMaxMinFeatures(passageRatios, features, keyword2values, "passage-ratio", ctype); double questionRatioAvgMicro = DoubleStream.of(questionCounts).sum() / IntStream.of(totalCounts).sum(); features.put("question-ratio-avg-micro@" + ctype, questionRatioAvgMicro); keyword2values.put("question-ratio-avg-micro", questionRatioAvgMicro); double passageRatioAvgMicro = 1.0 - questionRatioAvgMicro; features.put("passage-ratio-avg-macro@" + ctype, passageRatioAvgMicro); keyword2values.put("passage-ratio-avg-macro", passageRatioAvgMicro); } // global features keyword2values.asMap().entrySet().stream().map(e -> YesNoScorer.aggregateFeatures(e.getValue(), e.getKey())) .forEach(features::putAll); return features.build(); }
From source file:edu.uw.cs.lil.tiny.test.ccg.lambda.SingleSentencePartialCreditTestingStatistics.java
private PartialCreditTriplet partialCompare(LogicalExpression gold, LogicalExpression label) { final Multiset<Pair<? extends LogicalExpression, ? extends LogicalExpression>> goldPairs = GetPredConstPairs .of(gold);//from w w w . j av a 2s .c om final Multiset<Pair<? extends LogicalExpression, ? extends LogicalExpression>> labelPairs; if (label == null) { labelPairs = HashMultiset.create(); } else { labelPairs = GetPredConstPairs.of(label); } // The "intersection" of the gold and label pair sets = the number of // matches final Multiset<Pair<? extends LogicalExpression, ? extends LogicalExpression>> intersection = HashMultiset .create(); for (final Entry<Pair<? extends LogicalExpression, ? extends LogicalExpression>> entry : goldPairs .entrySet()) { intersection.setCount(entry.getElement(), Math.min(entry.getCount(), labelPairs.count(entry.getElement()))); } return new PartialCreditTriplet(goldPairs.size(), labelPairs.size(), intersection.size()); }
From source file:com.continuuity.loom.layout.change.AddServicesChange.java
@Override public ClusterLayout applyChange(ClusterLayout originalLayout) { Multiset<NodeLayout> newLayout = HashMultiset.create(originalLayout.getLayout()); for (Multiset.Entry<NodeLayout> entry : countsPerNodeLayout.entrySet()) { NodeLayout originalNodeLayout = entry.getElement(); NodeLayout expandedNodeLayout = NodeLayout.addServicesToNodeLayout(originalNodeLayout, services); // add the service count times newLayout.add(expandedNodeLayout, entry.getCount()); // subtract count nodes from the original node layout since that many have now been expanded. newLayout.setCount(originalNodeLayout, originalLayout.getLayout().count(originalNodeLayout) - entry.getCount()); }/*from w w w .ja va 2 s.com*/ return new ClusterLayout(originalLayout.getConstraints(), newLayout); }
From source file:edu.stanford.nlp.util.JBLEU.java
public void stats(List<String> hyp, List<List<String>> refs, int[] result) { assert result.length == 9; assert refs.size() > 0; // 1) choose reference length int selectedRef = pickReference(hyp, refs, verbosity); int selectedRefLen = refs.get(selectedRef).size(); // TODO: Integer-ify everything inside Ngram? Or is there too much // overhead there? // 2) determine the bag of n-grams we can score against // build a simple tries Multiset<Ngram> clippedRefNgrams = HashMultiset.create(); for (List<String> ref : refs) { Multiset<Ngram> refNgrams = HashMultiset.create(); for (int order = 1; order <= N; order++) { for (int i = 0; i <= ref.size() - order; i++) { List<String> toks = ref.subList(i, i + order); Ngram ngram = new Ngram(toks); refNgrams.add(ngram);//from w w w .j av a 2s . c o m } } // clip n-grams by taking the maximum number of counts for any given reference for (Ngram ngram : refNgrams) { int clippedCount = Math.max(refNgrams.count(ngram), clippedRefNgrams.count(ngram)); clippedRefNgrams.setCount(ngram, clippedCount); } } // 3) now match n-grams int[] attempts = new int[N]; int[] matches = new int[N]; for (int order = 1; order <= N; order++) { for (int i = 0; i <= hyp.size() - order; i++) { List<String> toks = hyp.subList(i, i + order); Ngram ngram = new Ngram(toks); boolean found = clippedRefNgrams.remove(ngram); ++attempts[order - 1]; if (found) { ++matches[order - 1]; } } } // 4) assign sufficient stats System.arraycopy(attempts, 0, result, 0, N); System.arraycopy(matches, 0, result, N, N); result[N * 2] = selectedRefLen; }