List of usage examples for com.google.common.collect Multiset add
@Override
boolean add(E element);
From source file:edu.berkeley.compbio.ml.cluster.bayesian.TacoaClustering.java
/** * Hack the prior probabilities using the number of clusters per training label. TacoaDistanceMeasure takes the prior * to be per label, not per cluster. So, the "distance" between a sample and a cluster depends on how many clusters * share the same training label./*from w w w . j ava 2 s. c o m*/ */ protected synchronized void preparePriors() //throws DistributionException { //normalizeClusterLabelProbabilities(); try { final Multiset<String> populatedTrainingLabels = HashMultiset.create(); //int clustersWithTrainingLabel = 0; final Collection<? extends CentroidCluster<T>> immutableClusters = getClusters(); for (final CentroidCluster<T> theCluster : immutableClusters) { try { // note this also insures that every cluster has a training label, otherwise it throws NoSuchElementException final String label = theCluster.getImmutableWeightedLabels() .getDominantKeyInSet(potentialTrainingBins); // could use theCluster.getDerivedLabelProbabilities() there except they're not normalized yet, and there's no need populatedTrainingLabels.add(label); //clustersWithTrainingLabel++; } catch (NoSuchElementException e) { logger.warn("Cluster has no training label: " + theCluster); } } logger.info(String.valueOf(populatedTrainingLabels.size()) + " of " + getNumClusters() + " clusters have a training label; " + populatedTrainingLabels.entrySet().size() + " labels were trained"); final ImmutableMap.Builder<Cluster<T>, Double> builder = ImmutableMap.builder(); final Multinomial<String> labelPriors = new Multinomial<String>(populatedTrainingLabels); for (final CentroidCluster<T> theCluster : immutableClusters) { final String label = theCluster.getImmutableWeightedLabels() .getDominantKeyInSet(potentialTrainingBins); // PERF redundant builder.put(theCluster, labelPriors.get(label)); } clusterPriors = builder.build(); } catch (DistributionException e) { logger.error("Error", e); throw new ClusterRuntimeException(e); } }
From source file:org.opentripplanner.routing.graph.Graph.java
public void summarizeBuilderAnnotations() { List<GraphBuilderAnnotation> gbas = this.graphBuilderAnnotations; Multiset<Class<? extends GraphBuilderAnnotation>> classes = HashMultiset.create(); LOG.info("Summary (number of each type of annotation):"); for (GraphBuilderAnnotation gba : gbas) classes.add(gba.getClass()); for (Multiset.Entry<Class<? extends GraphBuilderAnnotation>> e : classes.entrySet()) { String name = e.getElement().getSimpleName(); int count = e.getCount(); LOG.info(" {} - {}", name, count); }/*w w w . ja va2 s. co m*/ }
From source file:com.trein.gtfs.otp.building.graph.osm.Graph.java
public void summarizeBuilderAnnotations() { List<GraphBuilderAnnotation> gbas = this.graphBuilderAnnotations; Multiset<Class<? extends GraphBuilderAnnotation>> classes = HashMultiset.create(); LOG.info("Summary (number of each type of annotation):"); for (GraphBuilderAnnotation gba : gbas) { classes.add(gba.getClass()); }//from ww w. j a va2 s . c o m for (Multiset.Entry<Class<? extends GraphBuilderAnnotation>> e : classes.entrySet()) { String name = e.getElement().getSimpleName(); int count = e.getCount(); LOG.info(" {} - {}", name, count); } }
From source file:com.music.service.text.TimelineToMusicService.java
private Variation getVariation(List<Tweet> tweets, TimelineMusic meta) { Morphology morphology = new Morphology(new StringReader("")); Multiset<String> words = HashMultiset.create(); for (Tweet tweet : tweets) { String tweetText = tweet.getText().toLowerCase(); List<String> urls = TimelineToMusicService.extractUrls(tweetText); for (String url : urls) { tweetText = tweetText.replace(url, ""); }// w w w .j a va 2 s . c o m List<String> usernames = TimelineToMusicService.extractMentionedUsernames(tweetText); for (String username : usernames) { tweetText = tweetText.replace(username, "").replace("rt", ""); } String[] wordsInTweet = tweetText.split("[^\\p{L}&&[^']]+"); for (String word : wordsInTweet) { try { words.add(morphology.stem(word)); } catch (Exception ex) { words.add(word); } } } words.removeAll(stopwords); // if a word is mentioned more times than is 4% of the tweets, it's considered a topic double topicThreshold = tweets.size() * 4 / 100; for (Iterator<String> it = words.iterator(); it.hasNext();) { String word = it.next(); // remove stopwords not in the list (e.g. in a different language). // We consider all words less than 4 characters to be stop words if (word == null || word.length() < 4) { it.remove(); } else if (words.count(word) < topicThreshold) { it.remove(); } } meta.setTopKeywords(new HashSet<>(words.elementSet())); // the more topics you have, the more variative music if (meta.getTopKeywords().size() > 40) { return Variation.EXTREMELY_VARIATIVE; } else if (meta.getTopKeywords().size() > 30) { return Variation.VERY_VARIATIVE; } else if (meta.getTopKeywords().size() > 20) { return Variation.MOVING; } else if (meta.getTopKeywords().size() > 10) { return Variation.AVERAGE; } else { return Variation.MONOTONOUS; } }
From source file:i5.las2peer.services.recommender.librec.data.FilmTrustDataDAO.java
/** * print out distributions of the dataset <br/> * /* w w w. j av a2s. co m*/ * <ul> * <li>#users (y) -- #ratings (x) (that are issued by each user)</li> * <li>#items (y) -- #ratings (x) (that received by each item)</li> * </ul> */ public void printDistr(boolean isWriteOut) throws Exception { if (rateMatrix == null) readData(); // count how many users give the same number of ratings Multiset<Integer> numURates = HashMultiset.create(); // count how many items recieve the same number of ratings Multiset<Integer> numIRates = HashMultiset.create(); for (int r = 0, rm = rateMatrix.numRows; r < rm; r++) { int numRates = rateMatrix.rowSize(r); numURates.add(numRates); } for (int c = 0, cm = rateMatrix.numColumns; c < cm; c++) { int numRates = rateMatrix.columnSize(c); numIRates.add(numRates); } String ustrs = Strings.toString(numURates); String istrs = Strings.toString(numIRates); if (isWriteOut) { FileIO.writeString(FileIO.desktop + "user-distr.txt", ustrs); FileIO.writeString(FileIO.desktop + "item-distr.txt", istrs); } else { Logs.debug("#ratings (x) ~ #users (y): \n" + ustrs); Logs.debug("#ratings (x) ~ #items (y): \n" + istrs); } Logs.debug("Done!"); }
From source file:google.registry.batch.DeleteContactsAndHostsAction.java
@Override public void run() { LeaseOptions options = LeaseOptions.Builder.withCountLimit(maxLeaseCount()).leasePeriod(LEASE_MINUTES, MINUTES);//from w w w. ja v a2s . c o m List<TaskHandle> tasks = queue.leaseTasks(options); if (tasks.isEmpty()) { response.setPayload("No contact/host deletion tasks in pull queue."); return; } Multiset<String> kindCounts = HashMultiset.create(2); ImmutableList.Builder<DeletionRequest> builder = new ImmutableList.Builder<>(); ImmutableList.Builder<Key<? extends EppResource>> resourceKeys = new ImmutableList.Builder<>(); final List<TaskHandle> tasksToDelete = new ArrayList<>(); for (TaskHandle task : tasks) { try { Optional<DeletionRequest> deletionRequest = DeletionRequest.createFromTask(task, clock.nowUtc()); if (deletionRequest.isPresent()) { builder.add(deletionRequest.get()); resourceKeys.add(deletionRequest.get().key()); kindCounts.add(deletionRequest.get().key().getKind()); } else { tasksToDelete.add(task); } } catch (Exception e) { logger.severefmt(e, "Could not parse async deletion request, delaying task for a day: %s", task); // Grab the lease for a whole day, so that it won't continue throwing errors every five // minutes. queue.modifyTaskLease(task, 1L, DAYS); } } deleteTasksWithRetry(tasksToDelete); ImmutableList<DeletionRequest> deletionRequests = builder.build(); if (deletionRequests.isEmpty()) { logger.info("No asynchronous deletions to process because all were already handled."); response.setPayload("All requested deletions of contacts/hosts have already occurred."); } else { logger.infofmt("Processing asynchronous deletion of %d contacts and %d hosts: %s", kindCounts.count(KIND_CONTACT), kindCounts.count(KIND_HOST), resourceKeys.build()); runMapreduce(deletionRequests); } }
From source file:de.hzi.helmholtz.Compare.PathwayComparisonWithModules.java
public Multimap<Double, String> SubsetsMatching(final PathwayWithModules firstPathway, final PathwayWithModules secondPathway, BiMap<Integer, Integer> newSourceGeneIdToPositionMap, BiMap<Integer, Integer> newTargetGeneIdToPositionMap, int Yes) { Multimap<Double, String> resultPerfect = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural());/*from w w w. j a v a2 s .c om*/ PathwayWithModules firstPathwayCopy = new PathwayWithModules(firstPathway);// Copy of the Query pathway PathwayWithModules secondPathwayCopy = new PathwayWithModules(secondPathway);// Copy of the Target pathway' // PathwayWithModules secondPathwayCopy1 = new PathwayWithModules(secondPathway); int currentQueryGene = 0; Iterator<ModuleGene> sourceGeneIt = firstPathway.moduleGeneIterator(); List<Integer> QueryToRemove = new ArrayList<Integer>(); List<Integer> TargetToRemove = new ArrayList<Integer>(); while (sourceGeneIt.hasNext()) { currentQueryGene++; ModuleGene queryGene = sourceGeneIt.next(); int currentTargetGene = 0; Multiset<String> qfunction = LinkedHashMultiset.create(); List<String> qfunctionList = new ArrayList<String>(); List<String> qactivity = new ArrayList<String>(); List<Set<String>> qsubstrate = new ArrayList<Set<String>>(); for (Module m : queryGene.getModule()) { for (Domain d : m.getDomains()) { qfunction.add(d.getDomainFunctionString()); qfunctionList.add(d.getDomainFunctionString()); qactivity.add(d.getStatus().toString()); qsubstrate.add(d.getSubstrates()); } } Iterator<ModuleGene> targetGeneIt = secondPathway.moduleGeneIterator(); while (targetGeneIt.hasNext()) { currentTargetGene++; ModuleGene targetGene = targetGeneIt.next(); Multiset<String> tfunction = LinkedHashMultiset.create(); List<String> tfunctionList = new ArrayList<String>(); List<String> tactivity = new ArrayList<String>(); List<Set<String>> tsubstrate = new ArrayList<Set<String>>(); for (Module m : targetGene.getModule()) { for (Domain d : m.getDomains()) { tfunctionList.add(d.getDomainFunctionString()); tfunction.add(d.getDomainFunctionString()); tactivity.add(d.getStatus().toString()); tsubstrate.add(d.getSubstrates()); } } Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction); if (DomainsCovered.size() == qfunction.size() && DomainsCovered.size() == tfunction.size()) { Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity, tactivity); Multimap<String, Integer> Functionscores = ArrayListMultimap.create(); int TranspositionDomains = LevenshteinDistance.computeLevenshteinDistance(qfunctionList, tfunctionList); if (TranspositionDomains > 0) { TranspositionDomains = 1; } Functionscores.put(qfunction.size() + "-0", TranspositionDomains); Multimap<Double, Multimap<String, Integer>> substratescore = myFunction .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate)); Object activityScore = activityscores.asMap().keySet().toArray()[0]; Object substrateScore = substratescore.asMap().keySet().toArray()[0]; double finalScore = Math .round((((2.9 * 1.0) + (0.05 * Double.parseDouble(activityScore.toString().trim())) + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0) / 100.0; String ConvertedGeneIDs = ""; if (Yes == 0) { ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap) + "->" + reconstructWithGeneId(Integer.toString(currentTargetGene), newTargetGeneIdToPositionMap); } else { ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentTargetGene), newTargetGeneIdToPositionMap) + "->" + reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap); } resultPerfect.put(finalScore, ConvertedGeneIDs); ScoreFunctionMatchMisMatch.put(ConvertedGeneIDs, Functionscores); ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values()); ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescore.values()); TargetToRemove.add(currentTargetGene); QueryToRemove.add(currentQueryGene); } } } for (int i : TargetToRemove) { secondPathwayCopy.removeGene(i); } for (int i : QueryToRemove) { firstPathwayCopy.removeGene(i); } if (firstPathwayCopy.size() > 0 && secondPathwayCopy.size() > 0) { // Re-construct the bimaps newSourceGeneIdToPositionMap = HashBiMap.create(); int temp = 0; for (ModuleGene e : firstPathwayCopy.getModulegenes()) { temp = temp + 1; newSourceGeneIdToPositionMap.put(e.getGeneId(), temp); } newTargetGeneIdToPositionMap = HashBiMap.create(); temp = 0; for (ModuleGene e : secondPathwayCopy.getModulegenes()) { temp = temp + 1; newTargetGeneIdToPositionMap.put(e.getGeneId(), temp); } resultPerfect.putAll(SubsetIdentification(firstPathwayCopy, secondPathwayCopy, newSourceGeneIdToPositionMap, newTargetGeneIdToPositionMap, Yes)); } System.out.println(resultPerfect); return resultPerfect; }
From source file:org.dllearner.algorithms.pattern.OWLAxiomPatternFinder.java
/** * Start the pattern detection./*from ww w . ja v a 2s . c o m*/ */ public void start() { final ExecutorService tp = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() - 1); Collection<OntologyRepositoryEntry> entries = repository.getEntries(); if (randomOrder) { List<OntologyRepositoryEntry> entryList = new ArrayList<>(repository.getEntries()); Collections.shuffle(entryList); entries = entryList; } final Multiset<OWLAxiom> allAxiomPatterns = HashMultiset.create(); AtomicInteger i = new AtomicInteger(1); manager = OWLManager.createConcurrentOWLOntologyManager(); for (OntologyRepositoryEntry entry : entries) { tp.execute(new Runnable() { @Override public void run() { OWLAxiomRenamer renamer = new OWLAxiomRenamer(dataFactory); System.out.print(i.incrementAndGet() + ": "); URI uri = entry.getPhysicalURI(); // if(uri.toString().startsWith("http://rest.bioontology.org/bioportal/ontologies/download/42764")){ if (!ontologyProcessed(uri)) {//if(entry.getOntologyShortName().equals("00698"))continue; LOGGER.info("Loading \"" + entry.getOntologyShortName() + "\" from " + uri); try { OWLOntology ontology = manager.loadOntology(IRI.create(uri)); Multiset<OWLAxiom> axiomPatterns = HashMultiset.create(); Set<OWLAxiom> logicalAxioms = new HashSet<>(); for (AxiomType<?> type : AxiomType.AXIOM_TYPES) { if (type.isLogical()) { logicalAxioms.addAll(ontology.getAxioms(type, Imports.INCLUDED)); } } LOGGER.info(" (" + logicalAxioms.size() + " axioms)"); for (OWLAxiom axiom : logicalAxioms) { OWLAxiom renamedAxiom = renamer.rename(axiom); axiomPatterns.add(renamedAxiom); } // allAxiomPatterns.addAll(axiomPatterns); addOntologyPatterns(uri, ontology, axiomPatterns); // for (OWLAxiom owlAxiom : Multisets.copyHighestCountFirst(allAxiomPatterns).elementSet()) { // System.out.println(owlAxiom + ": " + allAxiomPatterns.count(owlAxiom)); // } manager.removeOntology(ontology); } catch (OWLOntologyAlreadyExistsException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); addOntologyError(uri, e); } } else { LOGGER.info("Already processed."); } } }); } tp.shutdown(); try { // Wait a while for existing tasks to terminate if (!tp.awaitTermination(60, TimeUnit.MINUTES)) { tp.shutdownNow(); // Cancel currently executing tasks // Wait a while for tasks to respond to being cancelled if (!tp.awaitTermination(60, TimeUnit.SECONDS)) System.err.println("Pool did not terminate"); } } catch (InterruptedException ie) { // (Re-)Cancel if current thread also interrupted tp.shutdownNow(); // Preserve interrupt status Thread.currentThread().interrupt(); } }
From source file:edu.uci.ics.sourcerer.tools.java.component.identifier.stats.CoverageCalculator.java
public static void calculateJarCoverage() { TaskProgressLogger task = TaskProgressLogger.get(); task.start("Calculating coverage by " + JAR_REPO.getValue().getPath() + " of external imports from " + EXTERNAL_REPO.getValue() + " and missing imports from " + MISSING_REPO.getValue()); // Load the jar repo JavaRepository jarRepo = JavaRepositoryFactory.INSTANCE.loadJavaRepository(JAR_REPO); task.start("Populating the prefix tree"); SourcedFqnNode root = SourcedFqnNode.createRoot(); boolean loaded = false; if (SOURCED_CACHE.getValue().exists()) { task.start("Loading cache"); try (BufferedReader reader = IOUtils.createBufferedReader(SOURCED_CACHE.getValue())) { root.createLoader().load(reader); loaded = true;/*from w w w . j ava 2s . c om*/ } catch (IOException | NoSuchElementException e) { logger.log(Level.SEVERE, "Error loading cache", e); root = SourcedFqnNode.createRoot(); } task.finish(); } if (!loaded) { int nonEmptyMaven = 0; task.start("Processing maven jars", "jars processed", 10_000); for (JarFile jar : jarRepo.getMavenJarFiles()) { boolean go = true; for (String fqn : FileUtils.getClassFilesFromJar(jar.getFile().toFile())) { if (go) { nonEmptyMaven++; go = false; } root.getChild(fqn.replace('$', '/'), '/').addSource(Source.MAVEN, jar.getProperties().HASH.getValue()); } task.progress(); } task.finish(); int nonEmptyProject = 0; task.start("Processing project jars", "jars processed", 10_000); for (JarFile jar : jarRepo.getProjectJarFiles()) { boolean go = true; for (String fqn : FileUtils.getClassFilesFromJar(jar.getFile().toFile())) { if (go) { nonEmptyProject++; go = false; } root.getChild(fqn.replace('$', '/'), '/').addSource(Source.PROJECT, jar.getProperties().HASH.getValue()); } task.progress(); } task.finish(); // Save the prefix tree task.start("Saving prefix tree cache"); try (BufferedWriter writer = IOUtils .makeBufferedWriter(FileUtils.ensureWriteable(SOURCED_CACHE.getValue()))) { root.createSaver().save(writer); } catch (IOException e) { logger.log(Level.SEVERE, "Error writing log", e); FileUtils.delete(SOURCED_CACHE.getValue()); } task.finish(); int mavenClassFiles = 0; int projectClassFiles = 0; int mavenUnique = 0; int projectUnique = 0; Set<SourcedFqnNode> mavenPackages = new HashSet<>(); Set<SourcedFqnNode> projectPackages = new HashSet<>(); for (SourcedFqnNode node : root.getPostOrderIterable()) { if (node.has(Source.MAVEN)) { mavenClassFiles += node.getCount(Source.MAVEN); mavenUnique++; mavenPackages.add(node.getParent()); } if (node.has(Source.PROJECT)) { projectClassFiles += node.getCount(Source.PROJECT); projectUnique++; projectPackages.add(node.getParent()); } } task.start("Reporting statistics on jars"); task.start("Maven"); task.report(nonEmptyMaven + " non-empty jars"); task.report(mavenClassFiles + " class files"); task.report(mavenUnique + " unique types"); task.report(mavenPackages.size() + " packages"); task.finish(); task.start("Project"); task.report(nonEmptyProject + " non-empty jars"); task.report(projectClassFiles + " class files"); task.report(projectUnique + " unique types"); task.report(projectPackages.size() + " packages"); task.finish(); task.finish(); } task.finish(); // Load the external repo ExtractedJavaRepository externalRepo = JavaRepositoryFactory.INSTANCE .loadExtractedJavaRepository(EXTERNAL_REPO); // load the missing repo ExtractedJavaRepository missingRepo = JavaRepositoryFactory.INSTANCE .loadExtractedJavaRepository(MISSING_REPO); NumberFormat format = NumberFormat.getNumberInstance(); format.setMaximumFractionDigits(2); { task.start("Processing extracted projects for missing and external types", "projects processed", 10_000); // Averager for external FQNs per project Averager<Integer> externalFqns = Averager.create(); // Averager for missing FQNs per project Averager<Integer> missingFqns = Averager.create(); for (ExtractedJavaProject externalProject : externalRepo.getProjects()) { String loc = externalProject.getLocation().toString(); ExtractedJavaProject missingProject = missingRepo.getProject(externalProject.getLocation()); ReaderBundle externalBundle = ReaderBundle.create(externalProject.getExtractionDir().toFile(), externalProject.getCompressedFile().toFile()); ReaderBundle missingBundle = ReaderBundle.create(missingProject.getExtractionDir().toFile(), missingProject.getCompressedFile().toFile()); int externalCount = 0; int missingCount = 0; // Add all the imports for this project for (ImportEX imp : externalBundle.getTransientImports()) { root.getChild(imp.getImported(), '.').addSource(Source.IMPORTED, loc); } Set<String> validMissing = new HashSet<>(); // Add the external types for (MissingTypeEX missing : externalBundle.getTransientMissingTypes()) { validMissing.add(missing.getFqn()); root.getChild(missing.getFqn(), '.').addSource(Source.EXTERNAL, loc); externalCount++; } // Add the missing types for (MissingTypeEX missing : missingBundle.getTransientMissingTypes()) { if (validMissing.contains(missing.getFqn())) { root.getChild(missing.getFqn(), '.').addSource(Source.MISSING, loc); missingCount++; } } externalFqns.addValue(externalCount); missingFqns.addValue(missingCount); task.progress(); } task.finish(); // Averager<Integer> projectsPerFQN = Averager.create(); // for (SourcedFqnNode fqn : root.getPreOrderIterable()) { // if (fqn.getCount(Source.MISSING) > 0) { // projectsPerFQN.addValue(fqn.getCount(Source.MISSING)); // } // } Percenterator percent = Percenterator.create(externalRepo.getProjectCount()); task.start("Reporting missing type information"); task.report(percent.format(externalFqns.getNonZeroCount()) + " projects with external types"); task.report(percent.format(missingFqns.getNonZeroCount()) + " projects with missing types"); task.report(format.format(externalFqns.getMean()) + " (" + format.format(externalFqns.getStandardDeviation()) + ") imported external types per project, on average"); task.report(format.format(externalFqns.getNonZeroMean()) + " (" + format.format(externalFqns.getNonZeroStandardDeviation()) + ") imported external types per project containing at least one external type, on average"); task.report( format.format(missingFqns.getMean()) + " (" + format.format(missingFqns.getStandardDeviation()) + ") imported missing types per project, on average"); task.report(format.format(missingFqns.getNonZeroMean()) + " (" + format.format(missingFqns.getNonZeroStandardDeviation()) + ") missing FQNs per project containing at least one missing FQN, on average"); task.finish(); // missingFqns.writeValueMap(MISSING_FQNS_PER_PROJECT.getValue()); // projectsPerFQN.writeValueMap(PROJECTS_PER_MISSING_FQN.getValue()); } // Report general statistics { int uniqueTotal = 0; Multiset<Source> uniqueByType = EnumMultiset.create(Source.class); Multiset<Source> totalByType = EnumMultiset.create(Source.class); for (SourcedFqnNode node : root.getPostOrderIterable()) { if (node.hasSource()) { uniqueTotal++; } for (Source source : Source.values()) { int count = node.getCount(source); if (count > 0) { uniqueByType.add(source); totalByType.add(source, count); } } } Percenterator uniqueP = Percenterator.create(uniqueTotal); task.start("Reporting FQN counts broken down by source"); for (Source source : Source.values()) { task.report(source.name() + ":"); task.report(" Unique: " + uniqueP.format(uniqueByType.count(source))); task.report(" Total: " + totalByType.count(source)); } task.report("Sum:"); task.report(" Unique: " + uniqueTotal); task.finish(); } // Identify the most popular imported types and packages { for (final Source source : EnumSet.of(Source.IMPORTED, Source.EXTERNAL, Source.MISSING)) { { TreeSet<SourcedFqnNode> popularTypes = new TreeSet<>(new Comparator<SourcedFqnNode>() { @Override public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { int cmp = Integer.compare(o1.getCount(source), o2.getCount(source)); if (cmp == 0) { return o1.compareTo(o2); } else { return cmp; } } }); for (SourcedFqnNode fqn : root.getPostOrderIterable()) { if (fqn.has(source)) { popularTypes.add(fqn); } } task.start("Logging popular types listing for " + source.name()); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), source.name() + "-popular-types.txt"))) { for (SourcedFqnNode fqn : popularTypes.descendingSet()) { writer.write(fqn.getCount(source) + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); } { final Multimap<SourcedFqnNode, String> packages = HashMultimap.create(); for (SourcedFqnNode fqn : root.getPostOrderIterable()) { if (fqn.has(source)) { packages.putAll(fqn.getParent(), fqn.getSourceIDs(source)); } } List<SourcedFqnNode> sorted = new ArrayList<>(packages.keySet()); Collections.sort(sorted, new Comparator<SourcedFqnNode>() { @Override public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { int cmp = -Integer.compare(packages.get(o1).size(), packages.get(o2).size()); if (cmp == 0) { return o1.compareTo(o2); } else { return cmp; } } }); task.start("Logging popular packages listing for " + source.name()); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), source.name() + "-popular-packages.txt"))) { for (SourcedFqnNode fqn : sorted) { writer.write(packages.get(fqn).size() + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); } } } // Identify the most popular external types found in only maven, project and nothing { { TreeSet<SourcedFqnNode> popularJoint = new TreeSet<>( SourcedFqnNode.createComparator(Source.EXTERNAL)); TreeSet<SourcedFqnNode> popularMaven = new TreeSet<>( SourcedFqnNode.createComparator(Source.EXTERNAL)); TreeSet<SourcedFqnNode> popularProject = new TreeSet<>( SourcedFqnNode.createComparator(Source.EXTERNAL)); TreeSet<SourcedFqnNode> popularMissing = new TreeSet<>( SourcedFqnNode.createComparator(Source.EXTERNAL)); for (SourcedFqnNode fqn : root.getPostOrderIterable()) { if (fqn.has(Source.EXTERNAL)) { boolean maven = fqn.has(Source.MAVEN); boolean project = fqn.has(Source.PROJECT); if (maven && project) { popularJoint.add(fqn); } else { if (maven) { popularMaven.add(fqn); } else if (project) { popularProject.add(fqn); } else { popularMissing.add(fqn); } } } } task.start("Logging popular external joint types"); try (LogFileWriter writer = IOUtils .createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "joint-popular-types.txt"))) { for (SourcedFqnNode fqn : popularJoint.descendingSet()) { writer.write(fqn.getCount(Source.EXTERNAL) + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); task.start("Logging popular external types unique to maven"); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), "maven-unique-popular-types.txt"))) { for (SourcedFqnNode fqn : popularMaven.descendingSet()) { writer.write(fqn.getCount(Source.EXTERNAL) + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); task.start("Logging popular external types unique to project"); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), "project-unique-popular-types.txt"))) { for (SourcedFqnNode fqn : popularProject.descendingSet()) { writer.write(fqn.getCount(Source.EXTERNAL) + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); task.start("Logging popular missing external types"); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), "missing-unique-popular-types.txt"))) { for (SourcedFqnNode fqn : popularMissing.descendingSet()) { writer.write(fqn.getCount(Source.EXTERNAL) + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); } { final Multimap<SourcedFqnNode, String> jointPackages = HashMultimap.create(); final Multimap<SourcedFqnNode, String> mavenPackages = HashMultimap.create(); final Multimap<SourcedFqnNode, String> projectPackages = HashMultimap.create(); final Multimap<SourcedFqnNode, String> missingPackages = HashMultimap.create(); for (SourcedFqnNode fqn : root.getPostOrderIterable()) { if (fqn.has(Source.EXTERNAL)) { boolean maven = fqn.has(Source.MAVEN); boolean project = fqn.has(Source.PROJECT); if (maven && project) { jointPackages.putAll(fqn.getParent(), fqn.getSourceIDs(Source.EXTERNAL)); } else { if (maven) { mavenPackages.putAll(fqn.getParent(), fqn.getSourceIDs(Source.EXTERNAL)); } else if (project) { projectPackages.putAll(fqn.getParent(), fqn.getSourceIDs(Source.EXTERNAL)); } else { missingPackages.putAll(fqn.getParent(), fqn.getSourceIDs(Source.EXTERNAL)); } } } } { List<SourcedFqnNode> sorted = new ArrayList<>(jointPackages.keySet()); Collections.sort(sorted, new Comparator<SourcedFqnNode>() { @Override public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { int cmp = -Integer.compare(jointPackages.get(o1).size(), jointPackages.get(o2).size()); if (cmp == 0) { return o1.compareTo(o2); } else { return cmp; } } }); task.start("Logging popular external joint packages"); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), "joint-popular-packages.txt"))) { for (SourcedFqnNode fqn : sorted) { writer.write(jointPackages.get(fqn).size() + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); } { List<SourcedFqnNode> sorted = new ArrayList<>(mavenPackages.keySet()); Collections.sort(sorted, new Comparator<SourcedFqnNode>() { @Override public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { int cmp = -Integer.compare(mavenPackages.get(o1).size(), mavenPackages.get(o2).size()); if (cmp == 0) { return o1.compareTo(o2); } else { return cmp; } } }); task.start("Logging popular packages unique to maven"); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), "maven-unique-popular-packages.txt"))) { for (SourcedFqnNode fqn : sorted) { writer.write(mavenPackages.get(fqn).size() + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); } { List<SourcedFqnNode> sorted = new ArrayList<>(projectPackages.keySet()); Collections.sort(sorted, new Comparator<SourcedFqnNode>() { @Override public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { int cmp = -Integer.compare(projectPackages.get(o1).size(), projectPackages.get(o2).size()); if (cmp == 0) { return o1.compareTo(o2); } else { return cmp; } } }); task.start("Logging popular packages unique to project"); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), "project-unique-popular-packages.txt"))) { for (SourcedFqnNode fqn : sorted) { writer.write(projectPackages.get(fqn).size() + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); } { List<SourcedFqnNode> sorted = new ArrayList<>(missingPackages.keySet()); Collections.sort(sorted, new Comparator<SourcedFqnNode>() { @Override public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { int cmp = -Integer.compare(missingPackages.get(o1).size(), missingPackages.get(o2).size()); if (cmp == 0) { return o1.compareTo(o2); } else { return cmp; } } }); task.start("Logging popular packages unique to missing"); try (LogFileWriter writer = IOUtils.createLogFileWriter( new File(Arguments.OUTPUT.getValue(), "missing-unique-popular-packages.txt"))) { for (SourcedFqnNode fqn : sorted) { writer.write(missingPackages.get(fqn).size() + "\t" + fqn.getFqn()); } } catch (IOException e) { logger.log(Level.SEVERE, "Error writing file", e); } task.finish(); } } } for (int threshold : new int[] { 1, 2, 10, 50, 100 }) { Multiset<String> externalUniqueByString = HashMultiset.create(6); Multiset<String> externalTotalByString = HashMultiset.create(6); Multiset<String> missingUniqueByString = HashMultiset.create(6); Multiset<String> missingTotalByString = HashMultiset.create(6); int externalUniqueTotal = 0; int externalTotalTotal = 0; int missingUniqueTotal = 0; int missingTotalTotal = 0; for (SourcedFqnNode node : root.getPostOrderIterable()) { int externalCount = node.getCount(Source.EXTERNAL); if (externalCount >= threshold) { externalUniqueTotal++; externalTotalTotal += externalCount; boolean maven = node.has(Source.MAVEN); boolean project = node.has(Source.PROJECT); if (maven) { externalUniqueByString.add("Maven"); externalTotalByString.add("Maven", externalCount); if (!project) { externalUniqueByString.add("Maven only"); externalTotalByString.add("Maven only", externalCount); } else { externalUniqueByString.add("Project"); externalTotalByString.add("Project", externalCount); externalUniqueByString.add("Maven and Project"); externalTotalByString.add("Maven and Project", externalCount); } } else if (project) { externalUniqueByString.add("Project"); externalTotalByString.add("Project", externalCount); externalUniqueByString.add("Project only"); externalTotalByString.add("Project only", externalCount); } else { externalUniqueByString.add("Nothing"); externalTotalByString.add("Nothing", externalCount); } } int missingCount = node.getCount(Source.MISSING); if (missingCount >= threshold) { missingUniqueTotal++; missingTotalTotal += missingCount; boolean maven = node.has(Source.MAVEN); boolean project = node.has(Source.PROJECT); if (maven) { missingUniqueByString.add("Maven"); missingTotalByString.add("Maven", missingCount); if (!project) { missingUniqueByString.add("Maven only"); missingTotalByString.add("Maven only", missingCount); } else { missingUniqueByString.add("Project"); missingTotalByString.add("Project", missingCount); missingUniqueByString.add("Maven and Project"); missingTotalByString.add("Maven and Project", missingCount); } } else if (project) { missingUniqueByString.add("Project"); missingTotalByString.add("Project", missingCount); missingUniqueByString.add("Project only"); missingTotalByString.add("Project only", missingCount); } else { missingUniqueByString.add("Nothing"); missingTotalByString.add("Nothing", missingCount); } } } Percenterator externalUniqueP = Percenterator.create(externalUniqueTotal); Percenterator missingUniqueP = Percenterator.create(missingUniqueTotal); Percenterator externalTotalP = Percenterator.create(externalTotalTotal); Percenterator missingTotalP = Percenterator.create(externalTotalTotal); task.start("Reporting external import coverage for threshold " + threshold); for (String condition : externalUniqueByString.elementSet()) { task.report(condition + ":"); task.report(" Unique: " + externalUniqueP.format(externalUniqueByString.count(condition))); task.report(" Total: " + externalTotalP.format(externalTotalByString.count(condition))); } task.report("Sum:"); task.report(" Unique: " + externalUniqueTotal); task.report(" Total: " + externalTotalTotal); task.finish(); task.start("Reporting missing import coverage for threshold " + threshold); for (String condition : missingUniqueByString.elementSet()) { task.report(condition + ":"); task.report(" Unique: " + missingUniqueP.format(missingUniqueByString.count(condition))); task.report(" Total: " + missingTotalP.format(missingTotalByString.count(condition))); } task.report("Sum:"); task.report(" Unique: " + missingUniqueTotal); task.report(" Total: " + missingTotalTotal); task.finish(); } { Set<String> maven = new HashSet<>(); Set<String> mavenImported = new HashSet<>(); Set<String> project = new HashSet<>(); Set<String> projectImported = new HashSet<>(); // Find the coverage of the maven and project jars for (SourcedFqnNode fqn : root.getPostOrderIterable()) { maven.addAll(fqn.getSourceIDs(Source.MAVEN)); project.addAll(fqn.getSourceIDs(Source.PROJECT)); if (fqn.has(Source.IMPORTED)) { mavenImported.addAll(fqn.getSourceIDs(Source.MAVEN)); projectImported.addAll(fqn.getSourceIDs(Source.PROJECT)); } } Percenterator mavenP = Percenterator.create(maven.size()); Percenterator projectP = Percenterator.create(project.size()); task.start("Reporting coverage of jars"); task.report(mavenP.format(mavenImported.size()) + " maven jars had at least one type imported"); task.report(projectP.format(projectImported.size()) + " project jars had at least one type imported"); task.finish(); } // { // // Find all the most popular fqns per source // for (final Source source : Source.values()) { // TreeSet<SourcedFqnNode> sorted = new TreeSet<>(new Comparator<SourcedFqnNode>() { // @Override // public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { // int cmp = Integer.compare(o1.getCount(source), o2.getCount(source)); // if (cmp == 0) { // return o1.compareTo(o2); // } else { // return cmp; // } // } // }); // // for (SourcedFqnNode node : root.getPostOrderIterable()) { // if (node.has(source)) { // sorted.add(node); // } // } // // task.start("Logging popular types listing for " + source.name()); // try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), source.name() + "-popular.txt"))) { // for (SourcedFqnNode fqn : sorted.descendingSet()) { // writer.write(fqn.getCount(source) + "\t" + fqn.getFqn()); // } // } catch (IOException e) { // logger.log(Level.SEVERE, "Error writing file", e); // } // task.finish(); // } // } // // { // // Find all the fqns unique to that source // for (final Source source : Source.values()) { // TreeSet<SourcedFqnNode> sorted = new TreeSet<>(new Comparator<SourcedFqnNode>() { // @Override // public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { // int cmp = Integer.compare(o1.getCount(source), o2.getCount(source)); // if (cmp == 0) { // return o1.compareTo(o2); // } else { // return cmp; // } // } // }); // // Set<Source> expected = EnumSet.of(Source.MISSING, source); // for (SourcedFqnNode node : root.getPostOrderIterable()) { // Set<Source> sources = node.getSources(); // if (sources.containsAll(expected) && expected.containsAll(sources)) { // sorted.add(node); // } // } // // task.start("Logging missing types listing"); // try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), source.name() + "-missing.txt"))) { // for (SourcedFqnNode fqn : sorted.descendingSet()) { // writer.write(fqn.getCount(Source.MISSING) + "\t" + fqn.getFqn()); // } // } catch (IOException e) { // logger.log(Level.SEVERE, "Error writing file", e); // } // task.finish(); // } // } // // { // final Multiset<SourcedFqnNode> maven = HashMultiset.create(); // final Multiset<SourcedFqnNode> project = HashMultiset.create(); // final Multiset<SourcedFqnNode> mavenProject= HashMultiset.create(); // final Multiset<SourcedFqnNode> missing = HashMultiset.create(); // // // // Find the package specific info // for (SourcedFqnNode node : root.getPostOrderIterable()) { // int missingCount = node.getCount(Source.MISSING); // if (missingCount > 0) { // int mavenCount = node.getCount(Source.MAVEN); // int projectCount = node.getCount(Source.PROJECT); // if (mavenCount > 0) { // if (projectCount == 0) { // maven.add(node.getParent()); // } else { // mavenProject.add(node.getParent()); // } // } else if (projectCount > 0) { // project.add(node.getParent()); // } else { // missing.add(node.getParent()); // } // } // } // // task.start("Reporting package breakdown"); // task.report("Maven only: " + maven.elementSet().size()); // task.report("Project only: " + project.elementSet().size()); // task.report("Maven and Project: " + mavenProject.elementSet().size()); // task.report("Missing: " + missing.elementSet().size()); // task.finish(); // // task.start("Logging package popularity"); // // Maven // SourcedFqnNode[] nodes = maven.elementSet().toArray(new SourcedFqnNode[maven.elementSet().size()]); // Arrays.sort(nodes, new Comparator<SourcedFqnNode>() { // @Override // public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { // int cmp = Integer.compare(maven.count(o2), maven.count(o1)); // if (cmp == 0) { // return o1.compareTo(o2); // } else { // return cmp; // } // } // }); // try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "maven-pkgs.txt"))) { // for (SourcedFqnNode pkg : nodes) { // writer.write(maven.count(pkg) + "\t" + pkg.getFqn()); // } // } catch (IOException e) { // logger.log(Level.SEVERE, "Error writing file", e); // } // // // Project // nodes = project.elementSet().toArray(new SourcedFqnNode[project.elementSet().size()]); // Arrays.sort(nodes, new Comparator<SourcedFqnNode>() { // @Override // public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { // int cmp = Integer.compare(project.count(o2), project.count(o1)); // if (cmp == 0) { // return o1.compareTo(o2); // } else { // return cmp; // } // } // }); // try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "project-pkgs.txt"))) { // for (SourcedFqnNode pkg : nodes) { // writer.write(project.count(pkg) + "\t" + pkg.getFqn()); // } // } catch (IOException e) { // logger.log(Level.SEVERE, "Error writing file", e); // } // // // Maven/Project // nodes = mavenProject.elementSet().toArray(new SourcedFqnNode[mavenProject.elementSet().size()]); // Arrays.sort(nodes, new Comparator<SourcedFqnNode>() { // @Override // public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { // int cmp = Integer.compare(mavenProject.count(o2), mavenProject.count(o1)); // if (cmp == 0) { // return o1.compareTo(o2); // } else { // return cmp; // } // } // }); // try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "maven-project-pkgs.txt"))) { // for (SourcedFqnNode pkg : nodes) { // writer.write(mavenProject.count(pkg) + "\t" + pkg.getFqn()); // } // } catch (IOException e) { // logger.log(Level.SEVERE, "Error writing file", e); // } // // nodes = missing.elementSet().toArray(new SourcedFqnNode[missing.elementSet().size()]); // Arrays.sort(nodes, new Comparator<SourcedFqnNode>() { // @Override // public int compare(SourcedFqnNode o1, SourcedFqnNode o2) { // int cmp = Integer.compare(missing.count(o2), missing.count(o1)); // if (cmp == 0) { // return o1.compareTo(o2); // } else { // return cmp; // } // } // }); // try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "missing-pkgs.txt"))) { // for (SourcedFqnNode pkg : nodes) { // writer.write(missing.count(pkg) + "\t" + pkg.getFqn()); // } // } catch (IOException e) { // logger.log(Level.SEVERE, "Error writing file", e); // } // task.finish(); // } task.finish(); }
From source file:de.hzi.helmholtz.Compare.PathwayComparisonWithModules.java
public Multimap<Double, String> SubsetIdentification(PathwayWithModules firstPathway, PathwayWithModules secondPathway, BiMap<Integer, Integer> newSourceGeneIdToPositionMap, BiMap<Integer, Integer> newTargetGeneIdToPositionMap, int Yes) { Multimap<Double, String> result = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural()); Iterator<ModuleGene> sourceGeneIt = firstPathway.moduleGeneIterator(); int currentQueryGene = 0; while (sourceGeneIt.hasNext()) { currentQueryGene++;/*from w ww . j av a 2 s.c o m*/ ModuleGene queryGene = sourceGeneIt.next(); Multimap<Integer, String> resultr = TreeMultimap.create(Ordering.natural(), Ordering.natural()); int currentTargetGene = 0; Multiset<String> qfunction = LinkedHashMultiset.create(); List<String> qfunctionList = new ArrayList<String>(); List<String> qactivity = new ArrayList<String>(); List<Set<String>> qsubstrate = new ArrayList<Set<String>>(); for (Module m : queryGene.getModule()) { for (Domain d : m.getDomains()) { qfunction.add(d.getDomainFunctionString()); qfunctionList.add(d.getDomainFunctionString()); qactivity.add(d.getStatus().toString()); qsubstrate.add(d.getSubstrates()); } } List<String> TargenesSelected = new ArrayList<String>(); Iterator<ModuleGene> targetGeneIt = secondPathway.moduleGeneIterator(); while (targetGeneIt.hasNext()) { currentTargetGene++; ModuleGene targetGene = targetGeneIt.next(); Multiset<String> tfunction = LinkedHashMultiset.create(); List<String> tactivity = new ArrayList<String>(); List<Set<String>> tsubstrate = new ArrayList<Set<String>>(); List<String> tfunctionList = new ArrayList<String>(); Iterator<Module> mIter = targetGene.moduleIterator(); while (mIter.hasNext()) { Module m = mIter.next(); Iterator<Domain> dIter = m.domainIterator(); while (dIter.hasNext()) { Domain d = dIter.next(); tfunction.add(d.getDomainFunctionString()); tfunctionList.add(d.getDomainFunctionString()); tactivity.add(d.getStatus().toString()); tsubstrate.add(d.getSubstrates()); } } Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction); int Differences = Math.max(Math.abs(DomainsCovered.size() - tfunction.size()), Math.abs(DomainsCovered.size() - qfunction.size())); if (DomainsCovered.size() == tfunction.size() && tfunction.size() > 4) { TargenesSelected.add(Integer.toString(currentTargetGene)); } else { resultr.put(Differences, Integer.toString(currentTargetGene)); } } int count = 0; if (resultr.size() > 0) { while (TargenesSelected.size() < 2) { Multiset<String> k = LinkedHashMultiset.create(resultr.values()); Multiset<String> t = LinkedHashMultiset.create(TargenesSelected); Multiset<String> Covered = Multisets.intersection(k, t); if (Covered.size() == k.size()) { break; } try { TargenesSelected.addAll( resultr.get(Integer.parseInt(resultr.keySet().toArray()[count].toString()))); } catch (Exception ds) { } count = count + 1; } } // //System.out.println(TargenesSelected); // Permutation perm = new Permutation(); // List<String> perms = perm.run(TargenesSelected); CombinationGenerator c = new CombinationGenerator(10, 10); List<String> perms = c.GenerateAllPossibleCombinations(TargenesSelected); myFunction sim = new myFunction(); double score = 0; String targetIdentified = ""; List<ModuleGene> targetGenesList = secondPathway.getModulegenes(); for (String permu : perms) { String[] values = permu.replace("[", "").replace("]", "").split(","); List<String> mergedTargetgenes = new ArrayList<String>(); List<Integer> ToRemove = new ArrayList<Integer>(); List<String> tactivity = new ArrayList<String>(); List<Set<String>> tsubstrate = new ArrayList<Set<String>>(); for (String j : values) { ToRemove.add(Integer.parseInt(j.trim())); for (Module m : targetGenesList.get(Integer.parseInt(j.trim()) - 1).getModule()) { for (Domain i : m.getDomains()) { mergedTargetgenes.add(i.getDomainFunctionString()); tactivity.add(i.getStatus().toString()); tsubstrate.add(i.getSubstrates()); } } } Multimap<Double, Multimap<String, Integer>> FunctionScores = sim.calculate(qfunctionList, mergedTargetgenes); Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity, tactivity); Multimap<Double, Multimap<String, Integer>> substratescores = myFunction .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate)); Object FunctionScore = FunctionScores.asMap().keySet().toArray()[0]; Object activityScore = activityscores.asMap().keySet().toArray()[0]; Object substrateScore = substratescores.asMap().keySet().toArray()[0]; double finalScore = Math .round((((2.9 * Double.parseDouble(FunctionScore.toString().trim())) + (0.05 * Double.parseDouble(activityScore.toString().trim())) + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0) / 100.0; targetIdentified = permu.replace(",", "+"); String ConvertedGeneIDs = ""; if (Yes == 0) { ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap) + "->" + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""), newTargetGeneIdToPositionMap); } else { ConvertedGeneIDs = reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""), newTargetGeneIdToPositionMap) + "->" + reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap); } // String ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap) + "->" + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""), newTargetGeneIdToPositionMap); result.put(finalScore, ConvertedGeneIDs); ScoreFunctionMatchMisMatch.putAll(ConvertedGeneIDs, FunctionScores.values()); ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values()); ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescores.values()); } } return result; }