List of usage examples for com.google.common.collect Multiset count
int count(@Nullable Object element);
From source file:it.units.malelab.ege.ge.mapper.BitsSGEMapper.java
@Override public Node<T> map(BitsGenotype genotype, Map<String, Object> report) throws MappingException { int[] bitUsages = new int[genotype.size()]; //transform genotypes in ints if (genotype.size() < overallSize) { throw new MappingException(String.format("Short genotype (%d<%d)", genotype.size(), overallSize)); }//from w w w . j a va 2s .c o m Map<Pair<T, Integer>, List<Range<Integer>>> codonRanges = new LinkedHashMap<>(); List<Range<Integer>> nonTerminalRanges = Utils.slices(Range.closedOpen(0, genotype.size()), nonTerminalSizes); for (int i = 0; i < nonTerminals.size(); i++) { //int codonSize = (int) Math.max(Math.ceil(Math.log10(nonRecursiveGrammar.getRules().get(nonTerminals.get(i)).size()) / Math.log10(2)), 1); List<Range<Integer>> boundaries = Utils.slices(nonTerminalRanges.get(i), nonTerminalCodonsNumbers.get(i)); codonRanges.put(nonTerminals.get(i), boundaries); } //map Multiset<Pair<T, Integer>> expandedSymbols = LinkedHashMultiset.create(); Node<Pair<T, Integer>> tree = new Node<>(nonRecursiveGrammar.getStartingSymbol()); while (true) { Node<Pair<T, Integer>> nodeToBeReplaced = null; for (Node<Pair<T, Integer>> node : tree.leafNodes()) { if (nonRecursiveGrammar.getRules().keySet().contains(node.getContent())) { nodeToBeReplaced = node; break; } } if (nodeToBeReplaced == null) { break; } //get codon Range<Integer> range = codonRanges.get(nodeToBeReplaced.getContent()) .get(expandedSymbols.count(nodeToBeReplaced.getContent())); List<List<Pair<T, Integer>>> options = nonRecursiveGrammar.getRules() .get(nodeToBeReplaced.getContent()); int codonSize = (int) Math.max(Math.ceil(Math.log10(options.size()) / Math.log10(2)), 1); int codonValue = genotype.slice(range).compress(codonSize).toInt(); int optionIndex = codonValue % options.size(); //update usages for (int i = range.lowerEndpoint(); i < range.upperEndpoint(); i++) { bitUsages[i] = bitUsages[i] + 1; } //add children for (Pair<T, Integer> p : options.get(optionIndex)) { Node<Pair<T, Integer>> newChild = new Node<>(p); nodeToBeReplaced.getChildren().add(newChild); } expandedSymbols.add(nodeToBeReplaced.getContent()); } report.put(BIT_USAGES_INDEX_NAME, bitUsages); //transform tree return transform(tree); }
From source file:com.seajas.search.codex.service.social.SocialProfileService.java
@Override public TwitterProfileSummaryDto getTwitterProfileSummary(final long twitterProfileId) { List<Tweet> tweets = socialFacade.getUserTimeline(twitterProfileId); SocialProfileDto socialProfileDto = null; TwitterProfile twitterProfile = socialFacade.getTwitterProfile(twitterProfileId); if (twitterProfile != null) { socialProfileDto = SocialProfileDto.translate(twitterProfile); socialProfileDto//from w w w. j a va 2s .c om .setProfileImageMediaUrl(this.storeImageOnMediaServer(twitterProfile.getProfileImageUrl())); } Multiset<Long> mentionedCounter = HashMultiset.create(); Multiset<String> hashTagCounter = HashMultiset.create(); this.countTwitterEntities(tweets, mentionedCounter, hashTagCounter); mentionedCounter = Multisets.copyHighestCountFirst(mentionedCounter); hashTagCounter = Multisets.copyHighestCountFirst(hashTagCounter); List<MentionedDto> mentions = this.buildTwitterMentionedList(mentionedCounter); List<HashTagDto> hashTagList = Lists.newArrayList(); for (String hashTag : hashTagCounter.elementSet()) { hashTagList.add(new HashTagDto(hashTag, hashTagCounter.count(hashTag))); } return new TwitterProfileSummaryDto(socialProfileDto, hashTagList, mentions); }
From source file:pl.polzone.classifier.Classifier.java
public String predict(java.util.List<String> words) { final Multiset<String> scores = HashMultiset.create(); for (String word : words) { word = stem(word);/*from ww w .ja va 2 s . c o m*/ if (wordCount.getCount(word) > feedCount / 2) continue; if (occurences.containsKey(word)) for (Object category : occurences.get(word).uniqueSet()) scores.add((String) category, occurences.get(word).getCount(category) + (feedCount - wordCount.getCount(word))); } if (scores.isEmpty()) return null; Iterator<Entry<String>> sorted = Multisets.copyHighestCountFirst(scores).entrySet().iterator(); String highest = sorted.next().getElement(); if (sorted.hasNext()) { String runnerUp = sorted.next().getElement(); if (scores.count(highest) > scores.count(runnerUp) * 2) feed(highest, words); } return highest; }
From source file:edu.uci.ics.sourcerer.tools.java.component.identifier.internal.ComponentRepositoryBuilder.java
private void computeLibraryDependencies() { task.start("Computing library version to library dependencies"); {//from w ww. j a v a 2 s. c om // Build map from Clusters to Libraries Multimap<Cluster, Library> clustersToLibraries = HashMultimap.create(); for (Library library : repo.getLibraries()) { if (library.getCoreCluster() != null) { clustersToLibraries.put(library.getCoreCluster(), library); } for (Cluster cluster : library.getSecondaryClusters()) { clustersToLibraries.put(cluster, library); } } for (Library library : repo.getLibraries()) { for (LibraryVersion version : library.getVersions()) { Multiset<Library> librarySet = HashMultiset.create(); for (ClusterVersion clusterVersion : version.getClusters()) { librarySet.addAll(clustersToLibraries.get(clusterVersion.getCluster())); } for (Library dep : librarySet.elementSet()) { if (library != dep) { if (dep.getCoreCluster() == null) { // Must match every secondary cluster for package libraries if (librarySet.count(dep) == dep.getSecondaryClusters().size()) { version.addLibraryDependency(dep); } } else { // See if there's a jar in this library that matches the right clusters for (Jar jar : dep.getJars()) { if (version.getClusters().containsAll(jarsToClusters.get(jar))) { version.addLibraryDependency(dep); break; } } } } } } } } task.finish(); }
From source file:org.icgc.dcc.submission.dictionary.DictionaryValidator.java
private void validateCodeLists(Set<DictionaryConstraintViolation> errors, Set<DictionaryConstraintViolation> warnings) { for (val codeListName : dictionary.getCodeListNames()) { val collection = codeListIndex.get(codeListName); int count = collection.size(); if (count == 0) { warnings.add(new DictionaryConstraintViolation("Missing code list", codeListName)); break; }/* w ww . j ava 2 s.c om*/ if (count > 1) { errors.add(new DictionaryConstraintViolation("Duplicate code lists", collection)); } val codeList = getFirst(collection, null); Multiset<String> codes = HashMultiset.create(); Multiset<String> values = HashMultiset.create(); for (val term : codeList.getTerms()) { codes.add(term.getCode()); values.add(term.getValue()); } for (val term : codeList.getTerms()) { val code = term.getCode(); val value = term.getValue(); if (codes.count(code) > 1) { errors.add( new DictionaryConstraintViolation("Duplicate code list codes", term, code, codeList)); } if (values.count(value) > 1) { errors.add( new DictionaryConstraintViolation("Duplicate code list values", term, value, codeList)); } if (codes.contains(value) && !code.equals(value)) { errors.add(new DictionaryConstraintViolation("Non-disjoint code list code and value", term, value, codeList)); } } } }
From source file:com.seajas.search.codex.service.social.SocialProfileService.java
protected List<MentionedDto> buildTwitterMentionedList(Multiset<Long> mentionedCounter) { List<MentionedDto> mentions = Lists.newArrayList(); List<Long> ids = Lists.newArrayList(mentionedCounter.elementSet()); List<TwitterProfile> twitterProfiles = socialFacade.getTwitterProfiles(ids); Map<Long, TwitterProfile> profileMap = Maps.newHashMap(); for (TwitterProfile twitterProfile : twitterProfiles) { profileMap.put(twitterProfile.getId(), twitterProfile); }//from w w w.ja v a 2 s . co m List<String> profileImageUrls = Lists.newArrayList(); for (Long mentionId : mentionedCounter.elementSet()) { int count = mentionedCounter.count(mentionId); TwitterProfile profile = profileMap.get(mentionId); if (profile != null) { SocialProfileDto socialProfileDto = SocialProfileDto.translate(profile); profileImageUrls.add(profile.getProfileImageUrl()); mentions.add(new MentionedDto(socialProfileDto, count)); } } if (!profileImageUrls.isEmpty()) { List<String> mediaUrls = this.storeImagesOnMediaServer(profileImageUrls); for (int i = 0; i < mentions.size(); i++) { mentions.get(i).getSocialProfile().setProfileImageMediaUrl(mediaUrls.get(i)); } } return mentions; }
From source file:de.iteratec.iteraplan.businesslogic.service.DashboardServiceImpl.java
/** {@inheritDoc} */ public Map<String, Integer> getIsrStatusMap(List<InformationSystemRelease> isrs) { Multiset<InformationSystemRelease.TypeOfStatus> multiset = EnumMultiset .create(InformationSystemRelease.TypeOfStatus.class); for (InformationSystemRelease tcr : isrs) { multiset.add(tcr.getTypeOfStatus()); }//from w w w . j av a2s .c o m Map<String, Integer> statusMap = Maps.newLinkedHashMap(); for (InformationSystemRelease.TypeOfStatus status : InformationSystemRelease.TypeOfStatus.values()) { statusMap.put(status.toString(), Integer.valueOf(multiset.count(status))); } return statusMap; }
From source file:org.opentripplanner.routing.spt.MultiStateShortestPathTree.java
public void dump() { Multiset<Integer> histogram = HashMultiset.create(); int statesCount = 0; int maxSize = 0; for (Map.Entry<Vertex, List<State>> kv : stateSets.entrySet()) { List<State> states = kv.getValue(); int size = states.size(); histogram.add(size);/*from ww w . ja v a2 s . com*/ statesCount += size; if (size > maxSize) { maxSize = size; } } LOG.info("SPT: vertices: " + stateSets.size() + " states: total: " + statesCount + " per vertex max: " + maxSize + " avg: " + (statesCount * 1.0 / stateSets.size())); List<Integer> nStates = new ArrayList<Integer>(histogram.elementSet()); Collections.sort(nStates); for (Integer nState : nStates) { LOG.info(nState + " states: " + histogram.count(nState) + " vertices."); } }
From source file:de.iteratec.iteraplan.businesslogic.service.DashboardServiceImpl.java
/** {@inheritDoc} */ public Map<String, Integer> getTechnicalComponentsStatusMap(List<TechnicalComponentRelease> tcrs) { Multiset<TechnicalComponentRelease.TypeOfStatus> multiset = EnumMultiset .create(TechnicalComponentRelease.TypeOfStatus.class); for (TechnicalComponentRelease tcr : tcrs) { multiset.add(tcr.getTypeOfStatus()); }/* w w w. j ava2s .com*/ Map<String, Integer> statusMap = Maps.newLinkedHashMap(); for (TechnicalComponentRelease.TypeOfStatus status : TechnicalComponentRelease.TypeOfStatus.values()) { statusMap.put(status.toString(), Integer.valueOf(multiset.count(status))); } return statusMap; }
From source file:org.dllearner.utilities.examples.AutomaticNegativeExampleFinderSPARQL2.java
private void keepMostSpecificClasses(Multiset<OWLClass> classes) { HashMultiset<OWLClass> copy = HashMultiset.create(classes); for (OWLClass nc1 : copy.elementSet()) { for (OWLClass nc2 : copy.elementSet()) { if (!nc1.equals(nc2)) { //remove class nc1 if it is superclass of another class nc2 boolean isSubClassOf = false; if (sr.getClassHierarchy() != null) { isSubClassOf = sr.getClassHierarchy().isSubclassOf(nc2, nc1); } else { isSubClassOf = sr.isSuperClassOf(nc1, nc2); }/*from w w w . j a v a 2 s .co m*/ if (isSubClassOf) { classes.remove(nc1, classes.count(nc1)); break; } } } } }