Example usage for com.google.common.collect Multiset count

List of usage examples for com.google.common.collect Multiset count

Introduction

In this page you can find the example usage for com.google.common.collect Multiset count.

Prototype

int count(@Nullable Object element);

Source Link

Document

Returns the number of occurrences of an element in this multiset (the count of the element).

Usage

From source file:org.eclipse.xtext.xbase.typesystem.references.LightweightTypeReference.java

/**
 * Returns the list of all super types which includes the super class and the 
 * implemented interfaces. The type parameters of the provided super types are resolved.
 * That means, the super types of <code>ArrayList&lt;String&gt;</code> includes
 * <code>List&lt;String&gt;</code> and <code>Collection&lt;String&gt;</code> 
 * rather than <code>Collection&lt;E&gt;</code>.
 * /*from   w ww . ja v  a2  s .  co m*/
 * @return the list of all super types, can be empty.
 */
public List<LightweightTypeReference> getAllSuperTypes() {
    final List<LightweightTypeReference> result = Lists.newArrayList();
    final Multiset<JvmType> distances = HashMultiset.create(7);
    final Multiset<JvmType> counterPerType = HashMultiset.create(7);
    collectSuperTypes(new SuperTypeAcceptor() {

        int counter = 0;

        @Override
        public boolean accept(LightweightTypeReference superType, int distance) {
            JvmType type = superType.getType();
            counterPerType.add(type, counter++);
            if (distances.contains(type)) {
                int currentCount = distances.count(type);
                if (currentCount < distance + 1) {
                    distances.setCount(type, distance + 1);
                } else {
                    return false;
                }
            } else {
                result.add(superType);
                distances.add(type, distance + 1);
            }
            return true;
        }

    });
    Collections.sort(result, new Comparator<LightweightTypeReference>() {
        @Override
        public int compare(/* @Nullable */ LightweightTypeReference o1,
                /* @Nullable */ LightweightTypeReference o2) {
            if (o1 == null || o2 == null) {
                throw new IllegalArgumentException();
            }
            JvmType type1 = o1.getType();
            JvmType type2 = o2.getType();
            if (type1 == null)
                return 1;
            if (type2 == null)
                return -1;
            int distanceCompare = Ints.compare(distances.count(type1), distances.count(type2));
            if (distanceCompare != 0)
                return distanceCompare;
            return Ints.compare(counterPerType.count(type1), counterPerType.count(type2));
        }
    });
    return result;
}

From source file:Beans.PlayersBean.java

@PostConstruct
public void init() {

    try {/*w  ww  . j  ava2s .  com*/
        Set<CodeAward> codeAwards = CodesImporter.importCodes();
        MappingCodeAwards.initializeMapping();

        Datastore ds = singleton.getDatastore();
        Query q = ds.createQuery(Player.class);
        players = q.asList();

        if (players == null) {
            players = new ArrayList();
        }

        for (Player player : players) {

            //            Query<Player> qUNiquePlayer = ds.createQuery(Player.class).filter("twitter", player.getTwitter());
            //            Player foundPlayer = (Player) qUNiquePlayer.get();
            StringBuilder sb = new StringBuilder();

            Multiset<String> categoryCodeAwards = HashMultiset.create();

            for (String code : player.getCodes()) {
                for (CodeAward codeAward : codeAwards) {
                    if (code.equals(codeAward.getCode())) {
                        categoryCodeAwards.add(codeAward.getCategory());
                        try {
                            player.setPoints(player.getPoints() + codeAward.getPoints());
                        } catch (Exception e) {
                            System.out.println("Exception when adding points: ");
                            System.out.println(e);
                        }
                        break;
                    }
                }
            }

            for (String categoryCodeAward : categoryCodeAwards.elementSet()) {
                if (MappingCodeAwards.getMapCategoryToFontIcon().get(categoryCodeAward) != null) {
                    sb.append("<i style=\"font-size:0.8em\" class=\"fa ")
                            .append(MappingCodeAwards.getMapCategoryToFontIcon().get(categoryCodeAward))
                            .append("\"></i> x ").append(categoryCodeAwards.count(categoryCodeAward));
                    sb.append(", ");
                }
            }
            if (sb.lastIndexOf(", ") > 0) {
                sb.delete(sb.lastIndexOf(","), sb.length() - 1);
            }
            player.setHtmlListOfCodeAwards(sb.toString());

        }
    } catch (IOException ex) {
        Logger.getLogger(PlayersBean.class.getName()).log(Level.SEVERE, null, ex);
    }

    Collections.sort(players);
    Collections.reverse(players);

    //find rank
    Player previous = null;
    int counterPlayers = 0;

    for (Player player : players) {
        counterPlayers++;
        if (previous != null) {
            if (player.getPoints() == previous.getPoints()) {
                player.setRank(previous.getRank());
            } else {
                player.setRank(counterPlayers);
            }
        } else {
            player.setRank(counterPlayers);
        }
        previous = player;

    }
}

From source file:it.units.malelab.ege.benchmark.mapper.MappingPropertiesFitness.java

@Override
public MultiObjectiveFitness<Double> compute(Node<String> mapperRawPhenotype) {
    Map<Property, double[]> propertyValues = new LinkedHashMap<>();
    for (Property property : properties) {
        propertyValues.put(property, new double[problems.size()]);
    }/* w  w w . j a  va  2 s  .  c om*/
    int i = 0;
    for (Problem<String, NumericFitness> problem : problems.keySet()) {
        List<Node<String>> phenotypes = new ArrayList<>();
        Multiset<Node<String>> groups = LinkedHashMultiset.create();
        //build mapper
        RecursiveMapper<String> mapper = new RecursiveMapper<>(mapperRawPhenotype, maxMappingDepth,
                EXPRESSIVENESS_DEPTH, problem.getGrammar());
        //map
        for (BitsGenotype genotype : genotypes) {
            Node<String> phenotype = Node.EMPTY_TREE;
            try {
                phenotype = mapper.map(genotype, Collections.EMPTY_MAP);
            } catch (MappingException ex) {
                //ignore
            }
            phenotypes.add(phenotype);
            groups.add(phenotype);
        }
        //compute properties
        if (propertyValues.keySet().contains(Property.REDUNDANCY)) {
            propertyValues.get(Property.REDUNDANCY)[i] = 1d
                    - (double) groups.elementSet().size() / (double) genotypes.size();
        }
        if (propertyValues.keySet().contains(Property.NON_UNIFORMITY)) {
            double[] groupSizes = new double[groups.elementSet().size()];
            int c = 0;
            for (Node<String> phenotype : groups.elementSet()) {
                groupSizes[c] = (double) groups.count(phenotype);
                c = c + 1;
            }
            propertyValues.get(Property.NON_UNIFORMITY)[i] = Math.sqrt(StatUtils.variance(groupSizes))
                    / StatUtils.mean(groupSizes);
        }
        if (propertyValues.keySet().contains(Property.NON_LOCALITY)) {
            double[] phenotypeDistances = computeDistances(phenotypes, problems.get(problem));
            double locality = 1d
                    - (1d + (new PearsonsCorrelation().correlation(genotypeDistances, phenotypeDistances)))
                            / 2d;
            propertyValues.get(Property.NON_LOCALITY)[i] = Double.isNaN(locality) ? 1d : locality;
        }
        i = i + 1;
    }
    Double[] meanValues = new Double[properties.length];
    for (int j = 0; j < properties.length; j++) {
        meanValues[j] = StatUtils.mean(propertyValues.get(properties[j]));
    }
    MultiObjectiveFitness<Double> mof = new MultiObjectiveFitness<Double>(meanValues);
    return mof;
}

From source file:org.apache.mahout.classifier.sgd.NewsgroupHelper.java

Vector encodeFeatureVector(File file, int actual, int leakType, Multiset<String> overallCounts)
        throws IOException {
    long date = (long) (1000 * (DATE_REFERENCE + actual * MONTH + 1 * WEEK * rand.nextDouble()));
    Multiset<String> words = ConcurrentHashMultiset.create();

    BufferedReader reader = Files.newReader(file, Charsets.UTF_8);
    try {/*from   www. j  a va 2 s . c om*/
        String line = reader.readLine();
        Reader dateString = new StringReader(DATE_FORMATS[leakType % 3].format(new Date(date)));
        countWords(analyzer, words, dateString, overallCounts);
        while (line != null && !line.isEmpty()) {
            boolean countHeader = (line.startsWith("From:") || line.startsWith("Subject:")
                    || line.startsWith("Keywords:") || line.startsWith("Summary:")) && leakType < 6;
            do {
                Reader in = new StringReader(line);
                if (countHeader) {
                    countWords(analyzer, words, in, overallCounts);
                }
                line = reader.readLine();
            } while (line != null && line.startsWith(" "));
        }
        if (leakType < 3) {
            countWords(analyzer, words, reader, overallCounts);
        }
    } finally {
        Closeables.closeQuietly(reader);
    }

    Vector v = new RandomAccessSparseVector(FEATURES);
    bias.addToVector("", 1, v);
    for (String word : words.elementSet()) {
        encoder.addToVector(word, Math.log1p(words.count(word)), v);
    }

    return v;
}

From source file:org.apache.mahout.classifier.NewsgroupHelper.java

public Vector encodeFeatureVector(File file, int actual, int leakType, Multiset<String> overallCounts)
        throws IOException {
    long date = (long) (1000 * (DATE_REFERENCE + actual * MONTH + 1 * WEEK * rand.nextDouble()));
    Multiset<String> words = ConcurrentHashMultiset.create();

    BufferedReader reader = Files.newReader(file, Charsets.UTF_8);
    try {//from ww  w.ja  v  a 2s.c o  m
        String line = reader.readLine();
        Reader dateString = new StringReader(DATE_FORMATS[leakType % 3].format(new Date(date)));
        countWords(analyzer, words, dateString, overallCounts);
        while (line != null && !line.isEmpty()) {
            boolean countHeader = (line.startsWith("From:") || line.startsWith("Subject:")
                    || line.startsWith("Keywords:") || line.startsWith("Summary:")) && leakType < 6;
            do {
                Reader in = new StringReader(line);
                if (countHeader) {
                    countWords(analyzer, words, in, overallCounts);
                }
                line = reader.readLine();
            } while (line != null && line.startsWith(" "));
        }
        if (leakType < 3) {
            countWords(analyzer, words, reader, overallCounts);
        }
    } finally {
        Closeables.close(reader, true);
    }

    Vector v = new RandomAccessSparseVector(FEATURES);
    bias.addToVector("", 1, v);
    for (String word : words.elementSet()) {
        encoder.addToVector(word, Math.log1p(words.count(word)), v);
    }

    return v;
}

From source file:org.bridgedb.tools.qc.PatternChecker.java

public void run(File f) throws SQLException, IDMapperException {
    String database = "" + f;
    //TODO: we can use the new Iterator interface here...
    DBConnector con = new DataDerby();
    Connection sqlcon = null;/*from   w  w w. ja  v a 2 s .co m*/
    sqlcon = con.createConnection(database, 0);

    Multimap<DataSource, String> missExamples = HashMultimap.create();
    Multiset<DataSource> misses = HashMultiset.create();
    Multiset<DataSource> totals = HashMultiset.create();
    Map<DataSource, Pattern> patterns = DataSourcePatterns.getPatterns();

    //      String url = "jdbc:derby:jar:(" + f + ")database";
    //      IDMapperRdb gdb = SimpleGdbFactory.createInstance("" + f, url);

    Statement st = sqlcon.createStatement();
    ResultSet rs = st.executeQuery("select id, code from datanode");

    while (rs.next()) {
        String id = rs.getString(1);
        String syscode = rs.getString(2);
        if (DataSource.systemCodeExists(syscode)) {
            DataSource ds = DataSource.getExistingBySystemCode(syscode);
            if (patterns.get(ds) == null)
                continue; // skip if there is no pattern defined.

            Set<DataSource> matches = DataSourcePatterns.getDataSourceMatches(id);
            if (!matches.contains(ds)) {
                if (missExamples.get(ds).size() < 10)
                    missExamples.put(ds, id);
                misses.add(ds);
            }
            totals.add(ds);
        }
    }

    //         String code = rs.getString (2);
    //System.out.println (id + "\t" + code);

    for (DataSource ds : totals.elementSet()) {
        int miss = misses.count(ds);
        int total = totals.count(ds);

        if (miss > 0) {
            String severity = miss < (total / 25) ? "WARNING" : "ERROR";
            System.out.println(severity + ": " + miss + "/" + total + " (" + miss * 100 / total
                    + "%) ids do not match expected pattern for " + ds);
            System.out.println(severity + ": expected pattern is '" + patterns.get(ds) + "'");
            boolean first = true;
            for (String id : missExamples.get(ds)) {
                System.out.print(first ? severity + ": aberrant ids are e.g. " : ", ");
                first = false;
                System.out.print("'" + id + "'");
            }
            System.out.println();
        }
    }

    allMisses.addAll(misses);
    allTotals.addAll(totals);
}

From source file:org.eclipse.sirius.tree.business.internal.dialect.common.tree.TreeItemContainerChildSupport.java

@Override
public void reorderChilds(Iterable<CreatedOutput> outDesc) {
    final Multiset<TreeItemMapping> subMappings = LinkedHashMultiset.create();
    Set<TreeItemMapping> mappings = new HashSet<TreeItemMapping>();
    final Map<EObject, CreatedOutput> outputToItem = Maps.newHashMap();
    for (CreatedOutput createdOutput : outDesc) {
        EObject createdElement = createdOutput.getCreatedElement();
        outputToItem.put(createdElement, createdOutput);
        if (createdElement instanceof DTreeItem) {
            DTreeItem createdDTreeItem = (DTreeItem) createdElement;
            TreeItemMapping actualMapping = createdDTreeItem.getActualMapping();
            subMappings.add(actualMapping);
            mappings.add(actualMapping);
        }//from   ww  w  .jav  a  2 s  .  c  o m
    }

    // Does not need to sort DTreeItem according to their mapping if there
    // is only one mapping
    if (mappings.size() > 1) {

        // Counts subMappings to correctly sort tree items regarding mapping
        // order (items have been created regarding the semantic candidates
        // order)
        int startIndex = 0;
        final Map<TreeItemMapping, Integer> startIndexes = Maps.newHashMap();
        for (TreeItemMapping itemMapping : subMappings) {
            startIndexes.put(itemMapping, startIndex);
            startIndex += subMappings.count(itemMapping);
        }

        Function<DTreeItem, Integer> getNewIndex = new Function<DTreeItem, Integer>() {

            @Override
            public Integer apply(DTreeItem from) {
                // init with element count : elements with unknown mapping
                // will
                // be placed at
                // the end.
                int index = outputToItem.size();
                TreeItemMapping itemMapping = from.getActualMapping();
                if (itemMapping != null && startIndexes.containsKey(itemMapping)) {
                    index = startIndexes.get(itemMapping);
                }

                CreatedOutput createdOutput = outputToItem.get(from);
                if (createdOutput != null) {
                    return index + createdOutput.getNewIndex();
                }
                return -1;
            }
        };

        ECollections.sort(container.getOwnedTreeItems(), Ordering.natural().onResultOf(getNewIndex));
    }
}

From source file:org.eclipse.sirius.ui.debug.SiriusDebugView.java

private void addShowPayloadAccessLogAction() {
    addAction("Show Payload Access Log", new Runnable() {
        @Override/*from w w w . j a  v  a 2s . c  om*/
        public void run() {
            int max = 50;
            List<FeatureAccess> log = PayloadMarkerAdapter.INSTANCE.getAccessLog();
            int totalSize = log.size();
            int shown = Math.min(totalSize, max);
            TabularReport tr = new TabularReport(/* "Timestamp", */"EObject", "Feature");

            try {
                PayloadMarkerAdapter.INSTANCE.setEnable(false);
                for (int i = log.size() > max ? log.size() - max : 0; i < log.size(); i++) {
                    FeatureAccess featureAccess = log.get(i);
                    tr.addLine(Arrays.asList(/*
                                              * String.format("%tT",
                                              * featureAccess.timestamp),
                                              */((Component) featureAccess.setting.getEObject()).getName(),
                            featureAccess.setting.getEStructuralFeature().getName()));
                }
            } finally {
                PayloadMarkerAdapter.INSTANCE.setEnable(true);
            }
            StringBuilder sb = new StringBuilder();
            sb.append("Showing " + shown + " of " + totalSize + " accesses.\n");
            Multiset<String> contexts = PayloadMarkerAdapter.INSTANCE.getUniqueContexts();
            sb.append("Unique contexts: " + contexts.elementSet().size()).append("\n\n");

            int i = 0;
            for (String stack : contexts.elementSet()) {
                int count = contexts.count(stack);
                sb.append("Context #" + i++ + " (" + count + " occurrences)").append("\n");
                sb.append(stack).append("\n");
            }

            sb.append("\n").append(tr.print()).append("\n");
            setText(sb.toString());
        }
    });
}

From source file:visualizer.corpus.zip.InvertedZipCorpus.java

/**
 * Method that creates the ngrams.//  w w  w. ja v  a  2  s .com
 * 
 * @param filename
 * @return
 * @throws IOException
 */
private List<Ngram> getNgramsFromFile(String filename) throws IOException {
    String filecontent = corpus.getFullContent(filename);
    List<Ngram> ngrams = new ArrayList<Ngram>();

    if (filecontent == null || StringUtil.isEmpty(filecontent)) {
        return ngrams;
    }

    Multiset<String> bag = HashMultiset.create();
    TermExtractor<String> termExtractor = new RegExpTermExtractor(filecontent);

    StringCircularBuffer[] buffers = new StringCircularBuffer[nrGrams - 1];
    for (int i = 2; i <= nrGrams; i++) {
        buffers[i - 2] = new StringCircularBuffer(i);
        buffers[i - 2].setSeparator(Corpus.NGRAM_SEPARATOR);
    }

    String term;
    while ((term = termExtractor.next()) != null) {
        if (!StringUtil.isEmpty(term)) {
            bag.add(term);
            for (StringCircularBuffer buffer : buffers) {
                String ngram = buffer.add(term);
                if (ngram != null) {
                    bag.add(ngram);
                }
            }
        }
    }
    for (StringCircularBuffer buffer : buffers) {
        String leftover = buffer.reset();
        if (leftover != null) {
            bag.add(leftover);
        }
    }

    Iterator<String> i = bag.iterator();
    while (i.hasNext()) {
        String ngramText = i.next();
        Ngram ngram = new Ngram(ngramText);
        ngram.setFrequency(bag.count(ngramText));
        ngrams.add(ngram);
    }

    return ngrams;
}

From source file:edu.uci.ics.sourcerer.tools.java.component.identifier.stats.CoverageCalculator.java

public static void calculateJarCoverage() {
    TaskProgressLogger task = TaskProgressLogger.get();

    task.start("Calculating coverage by " + JAR_REPO.getValue().getPath() + " of external imports from "
            + EXTERNAL_REPO.getValue() + " and missing imports from " + MISSING_REPO.getValue());

    // Load the jar repo
    JavaRepository jarRepo = JavaRepositoryFactory.INSTANCE.loadJavaRepository(JAR_REPO);

    task.start("Populating the prefix tree");
    SourcedFqnNode root = SourcedFqnNode.createRoot();

    boolean loaded = false;
    if (SOURCED_CACHE.getValue().exists()) {
        task.start("Loading cache");
        try (BufferedReader reader = IOUtils.createBufferedReader(SOURCED_CACHE.getValue())) {
            root.createLoader().load(reader);
            loaded = true;/*from  w  w w  .  j a v  a 2s  .co m*/
        } catch (IOException | NoSuchElementException e) {
            logger.log(Level.SEVERE, "Error loading cache", e);
            root = SourcedFqnNode.createRoot();
        }
        task.finish();
    }
    if (!loaded) {
        int nonEmptyMaven = 0;
        task.start("Processing maven jars", "jars processed", 10_000);
        for (JarFile jar : jarRepo.getMavenJarFiles()) {
            boolean go = true;
            for (String fqn : FileUtils.getClassFilesFromJar(jar.getFile().toFile())) {
                if (go) {
                    nonEmptyMaven++;
                    go = false;
                }
                root.getChild(fqn.replace('$', '/'), '/').addSource(Source.MAVEN,
                        jar.getProperties().HASH.getValue());
            }
            task.progress();
        }
        task.finish();

        int nonEmptyProject = 0;
        task.start("Processing project jars", "jars processed", 10_000);
        for (JarFile jar : jarRepo.getProjectJarFiles()) {
            boolean go = true;
            for (String fqn : FileUtils.getClassFilesFromJar(jar.getFile().toFile())) {
                if (go) {
                    nonEmptyProject++;
                    go = false;
                }
                root.getChild(fqn.replace('$', '/'), '/').addSource(Source.PROJECT,
                        jar.getProperties().HASH.getValue());
            }
            task.progress();
        }
        task.finish();

        // Save the prefix tree
        task.start("Saving prefix tree cache");
        try (BufferedWriter writer = IOUtils
                .makeBufferedWriter(FileUtils.ensureWriteable(SOURCED_CACHE.getValue()))) {
            root.createSaver().save(writer);
        } catch (IOException e) {
            logger.log(Level.SEVERE, "Error writing log", e);
            FileUtils.delete(SOURCED_CACHE.getValue());
        }
        task.finish();

        int mavenClassFiles = 0;
        int projectClassFiles = 0;
        int mavenUnique = 0;
        int projectUnique = 0;
        Set<SourcedFqnNode> mavenPackages = new HashSet<>();
        Set<SourcedFqnNode> projectPackages = new HashSet<>();
        for (SourcedFqnNode node : root.getPostOrderIterable()) {
            if (node.has(Source.MAVEN)) {
                mavenClassFiles += node.getCount(Source.MAVEN);
                mavenUnique++;
                mavenPackages.add(node.getParent());
            }
            if (node.has(Source.PROJECT)) {
                projectClassFiles += node.getCount(Source.PROJECT);
                projectUnique++;
                projectPackages.add(node.getParent());
            }
        }
        task.start("Reporting statistics on jars");
        task.start("Maven");
        task.report(nonEmptyMaven + " non-empty jars");
        task.report(mavenClassFiles + " class files");
        task.report(mavenUnique + " unique types");
        task.report(mavenPackages.size() + " packages");
        task.finish();

        task.start("Project");
        task.report(nonEmptyProject + " non-empty jars");
        task.report(projectClassFiles + " class files");
        task.report(projectUnique + " unique types");
        task.report(projectPackages.size() + " packages");
        task.finish();
        task.finish();
    }
    task.finish();

    // Load the external repo
    ExtractedJavaRepository externalRepo = JavaRepositoryFactory.INSTANCE
            .loadExtractedJavaRepository(EXTERNAL_REPO);
    // load the missing repo
    ExtractedJavaRepository missingRepo = JavaRepositoryFactory.INSTANCE
            .loadExtractedJavaRepository(MISSING_REPO);

    NumberFormat format = NumberFormat.getNumberInstance();
    format.setMaximumFractionDigits(2);
    {
        task.start("Processing extracted projects for missing and external types", "projects processed",
                10_000);
        // Averager for external FQNs per project
        Averager<Integer> externalFqns = Averager.create();
        // Averager for missing FQNs per project
        Averager<Integer> missingFqns = Averager.create();
        for (ExtractedJavaProject externalProject : externalRepo.getProjects()) {
            String loc = externalProject.getLocation().toString();
            ExtractedJavaProject missingProject = missingRepo.getProject(externalProject.getLocation());

            ReaderBundle externalBundle = ReaderBundle.create(externalProject.getExtractionDir().toFile(),
                    externalProject.getCompressedFile().toFile());
            ReaderBundle missingBundle = ReaderBundle.create(missingProject.getExtractionDir().toFile(),
                    missingProject.getCompressedFile().toFile());

            int externalCount = 0;
            int missingCount = 0;

            // Add all the imports for this project
            for (ImportEX imp : externalBundle.getTransientImports()) {
                root.getChild(imp.getImported(), '.').addSource(Source.IMPORTED, loc);
            }

            Set<String> validMissing = new HashSet<>();
            // Add the external types
            for (MissingTypeEX missing : externalBundle.getTransientMissingTypes()) {
                validMissing.add(missing.getFqn());
                root.getChild(missing.getFqn(), '.').addSource(Source.EXTERNAL, loc);
                externalCount++;
            }

            // Add the missing types
            for (MissingTypeEX missing : missingBundle.getTransientMissingTypes()) {
                if (validMissing.contains(missing.getFqn())) {
                    root.getChild(missing.getFqn(), '.').addSource(Source.MISSING, loc);
                    missingCount++;
                }
            }

            externalFqns.addValue(externalCount);
            missingFqns.addValue(missingCount);

            task.progress();
        }
        task.finish();

        //      Averager<Integer> projectsPerFQN = Averager.create();
        //      for (SourcedFqnNode fqn : root.getPreOrderIterable()) {
        //        if (fqn.getCount(Source.MISSING) > 0) {
        //          projectsPerFQN.addValue(fqn.getCount(Source.MISSING));
        //        }
        //      }

        Percenterator percent = Percenterator.create(externalRepo.getProjectCount());
        task.start("Reporting missing type information");
        task.report(percent.format(externalFqns.getNonZeroCount()) + " projects with external types");
        task.report(percent.format(missingFqns.getNonZeroCount()) + " projects with missing types");
        task.report(format.format(externalFqns.getMean()) + " ("
                + format.format(externalFqns.getStandardDeviation())
                + ") imported external types per project, on average");
        task.report(format.format(externalFqns.getNonZeroMean()) + " ("
                + format.format(externalFqns.getNonZeroStandardDeviation())
                + ") imported external types per project containing at least one external type, on average");
        task.report(
                format.format(missingFqns.getMean()) + " (" + format.format(missingFqns.getStandardDeviation())
                        + ") imported missing types per project, on average");
        task.report(format.format(missingFqns.getNonZeroMean()) + " ("
                + format.format(missingFqns.getNonZeroStandardDeviation())
                + ") missing FQNs per project containing at least one missing FQN, on average");
        task.finish();

        //      missingFqns.writeValueMap(MISSING_FQNS_PER_PROJECT.getValue());
        //      projectsPerFQN.writeValueMap(PROJECTS_PER_MISSING_FQN.getValue());
    }

    // Report general statistics
    {
        int uniqueTotal = 0;
        Multiset<Source> uniqueByType = EnumMultiset.create(Source.class);
        Multiset<Source> totalByType = EnumMultiset.create(Source.class);
        for (SourcedFqnNode node : root.getPostOrderIterable()) {
            if (node.hasSource()) {
                uniqueTotal++;
            }
            for (Source source : Source.values()) {
                int count = node.getCount(source);
                if (count > 0) {
                    uniqueByType.add(source);
                    totalByType.add(source, count);
                }
            }
        }

        Percenterator uniqueP = Percenterator.create(uniqueTotal);

        task.start("Reporting FQN counts broken down by source");
        for (Source source : Source.values()) {
            task.report(source.name() + ":");
            task.report("  Unique: " + uniqueP.format(uniqueByType.count(source)));
            task.report("  Total:  " + totalByType.count(source));
        }
        task.report("Sum:");
        task.report("  Unique: " + uniqueTotal);
        task.finish();
    }

    // Identify the most popular imported types and packages
    {
        for (final Source source : EnumSet.of(Source.IMPORTED, Source.EXTERNAL, Source.MISSING)) {
            {
                TreeSet<SourcedFqnNode> popularTypes = new TreeSet<>(new Comparator<SourcedFqnNode>() {
                    @Override
                    public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
                        int cmp = Integer.compare(o1.getCount(source), o2.getCount(source));
                        if (cmp == 0) {
                            return o1.compareTo(o2);
                        } else {
                            return cmp;
                        }
                    }
                });

                for (SourcedFqnNode fqn : root.getPostOrderIterable()) {
                    if (fqn.has(source)) {
                        popularTypes.add(fqn);
                    }
                }

                task.start("Logging popular types listing for " + source.name());
                try (LogFileWriter writer = IOUtils.createLogFileWriter(
                        new File(Arguments.OUTPUT.getValue(), source.name() + "-popular-types.txt"))) {
                    for (SourcedFqnNode fqn : popularTypes.descendingSet()) {
                        writer.write(fqn.getCount(source) + "\t" + fqn.getFqn());
                    }
                } catch (IOException e) {
                    logger.log(Level.SEVERE, "Error writing file", e);
                }
                task.finish();
            }

            {
                final Multimap<SourcedFqnNode, String> packages = HashMultimap.create();

                for (SourcedFqnNode fqn : root.getPostOrderIterable()) {
                    if (fqn.has(source)) {
                        packages.putAll(fqn.getParent(), fqn.getSourceIDs(source));
                    }
                }

                List<SourcedFqnNode> sorted = new ArrayList<>(packages.keySet());
                Collections.sort(sorted, new Comparator<SourcedFqnNode>() {
                    @Override
                    public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
                        int cmp = -Integer.compare(packages.get(o1).size(), packages.get(o2).size());
                        if (cmp == 0) {
                            return o1.compareTo(o2);
                        } else {
                            return cmp;
                        }
                    }
                });

                task.start("Logging popular packages listing for " + source.name());
                try (LogFileWriter writer = IOUtils.createLogFileWriter(
                        new File(Arguments.OUTPUT.getValue(), source.name() + "-popular-packages.txt"))) {
                    for (SourcedFqnNode fqn : sorted) {
                        writer.write(packages.get(fqn).size() + "\t" + fqn.getFqn());
                    }
                } catch (IOException e) {
                    logger.log(Level.SEVERE, "Error writing file", e);
                }
                task.finish();
            }
        }
    }

    // Identify the most popular external types found in only maven, project and nothing
    {
        {
            TreeSet<SourcedFqnNode> popularJoint = new TreeSet<>(
                    SourcedFqnNode.createComparator(Source.EXTERNAL));
            TreeSet<SourcedFqnNode> popularMaven = new TreeSet<>(
                    SourcedFqnNode.createComparator(Source.EXTERNAL));
            TreeSet<SourcedFqnNode> popularProject = new TreeSet<>(
                    SourcedFqnNode.createComparator(Source.EXTERNAL));
            TreeSet<SourcedFqnNode> popularMissing = new TreeSet<>(
                    SourcedFqnNode.createComparator(Source.EXTERNAL));

            for (SourcedFqnNode fqn : root.getPostOrderIterable()) {
                if (fqn.has(Source.EXTERNAL)) {
                    boolean maven = fqn.has(Source.MAVEN);
                    boolean project = fqn.has(Source.PROJECT);
                    if (maven && project) {
                        popularJoint.add(fqn);
                    } else {
                        if (maven) {
                            popularMaven.add(fqn);
                        } else if (project) {
                            popularProject.add(fqn);
                        } else {
                            popularMissing.add(fqn);
                        }
                    }
                }
            }

            task.start("Logging popular external joint types");
            try (LogFileWriter writer = IOUtils
                    .createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "joint-popular-types.txt"))) {
                for (SourcedFqnNode fqn : popularJoint.descendingSet()) {
                    writer.write(fqn.getCount(Source.EXTERNAL) + "\t" + fqn.getFqn());
                }
            } catch (IOException e) {
                logger.log(Level.SEVERE, "Error writing file", e);
            }
            task.finish();

            task.start("Logging popular external types unique to maven");
            try (LogFileWriter writer = IOUtils.createLogFileWriter(
                    new File(Arguments.OUTPUT.getValue(), "maven-unique-popular-types.txt"))) {
                for (SourcedFqnNode fqn : popularMaven.descendingSet()) {
                    writer.write(fqn.getCount(Source.EXTERNAL) + "\t" + fqn.getFqn());
                }
            } catch (IOException e) {
                logger.log(Level.SEVERE, "Error writing file", e);
            }
            task.finish();

            task.start("Logging popular external types unique to project");
            try (LogFileWriter writer = IOUtils.createLogFileWriter(
                    new File(Arguments.OUTPUT.getValue(), "project-unique-popular-types.txt"))) {
                for (SourcedFqnNode fqn : popularProject.descendingSet()) {
                    writer.write(fqn.getCount(Source.EXTERNAL) + "\t" + fqn.getFqn());
                }
            } catch (IOException e) {
                logger.log(Level.SEVERE, "Error writing file", e);
            }
            task.finish();

            task.start("Logging popular missing external types");
            try (LogFileWriter writer = IOUtils.createLogFileWriter(
                    new File(Arguments.OUTPUT.getValue(), "missing-unique-popular-types.txt"))) {
                for (SourcedFqnNode fqn : popularMissing.descendingSet()) {
                    writer.write(fqn.getCount(Source.EXTERNAL) + "\t" + fqn.getFqn());
                }
            } catch (IOException e) {
                logger.log(Level.SEVERE, "Error writing file", e);
            }
            task.finish();
        }
        {
            final Multimap<SourcedFqnNode, String> jointPackages = HashMultimap.create();
            final Multimap<SourcedFqnNode, String> mavenPackages = HashMultimap.create();
            final Multimap<SourcedFqnNode, String> projectPackages = HashMultimap.create();
            final Multimap<SourcedFqnNode, String> missingPackages = HashMultimap.create();

            for (SourcedFqnNode fqn : root.getPostOrderIterable()) {
                if (fqn.has(Source.EXTERNAL)) {
                    boolean maven = fqn.has(Source.MAVEN);
                    boolean project = fqn.has(Source.PROJECT);
                    if (maven && project) {
                        jointPackages.putAll(fqn.getParent(), fqn.getSourceIDs(Source.EXTERNAL));
                    } else {
                        if (maven) {
                            mavenPackages.putAll(fqn.getParent(), fqn.getSourceIDs(Source.EXTERNAL));
                        } else if (project) {
                            projectPackages.putAll(fqn.getParent(), fqn.getSourceIDs(Source.EXTERNAL));
                        } else {
                            missingPackages.putAll(fqn.getParent(), fqn.getSourceIDs(Source.EXTERNAL));
                        }
                    }
                }
            }

            {
                List<SourcedFqnNode> sorted = new ArrayList<>(jointPackages.keySet());
                Collections.sort(sorted, new Comparator<SourcedFqnNode>() {
                    @Override
                    public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
                        int cmp = -Integer.compare(jointPackages.get(o1).size(), jointPackages.get(o2).size());
                        if (cmp == 0) {
                            return o1.compareTo(o2);
                        } else {
                            return cmp;
                        }
                    }
                });

                task.start("Logging popular external joint packages");
                try (LogFileWriter writer = IOUtils.createLogFileWriter(
                        new File(Arguments.OUTPUT.getValue(), "joint-popular-packages.txt"))) {
                    for (SourcedFqnNode fqn : sorted) {
                        writer.write(jointPackages.get(fqn).size() + "\t" + fqn.getFqn());
                    }
                } catch (IOException e) {
                    logger.log(Level.SEVERE, "Error writing file", e);
                }
                task.finish();
            }

            {
                List<SourcedFqnNode> sorted = new ArrayList<>(mavenPackages.keySet());
                Collections.sort(sorted, new Comparator<SourcedFqnNode>() {
                    @Override
                    public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
                        int cmp = -Integer.compare(mavenPackages.get(o1).size(), mavenPackages.get(o2).size());
                        if (cmp == 0) {
                            return o1.compareTo(o2);
                        } else {
                            return cmp;
                        }
                    }
                });

                task.start("Logging popular packages unique to maven");
                try (LogFileWriter writer = IOUtils.createLogFileWriter(
                        new File(Arguments.OUTPUT.getValue(), "maven-unique-popular-packages.txt"))) {
                    for (SourcedFqnNode fqn : sorted) {
                        writer.write(mavenPackages.get(fqn).size() + "\t" + fqn.getFqn());
                    }
                } catch (IOException e) {
                    logger.log(Level.SEVERE, "Error writing file", e);
                }
                task.finish();
            }

            {
                List<SourcedFqnNode> sorted = new ArrayList<>(projectPackages.keySet());
                Collections.sort(sorted, new Comparator<SourcedFqnNode>() {
                    @Override
                    public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
                        int cmp = -Integer.compare(projectPackages.get(o1).size(),
                                projectPackages.get(o2).size());
                        if (cmp == 0) {
                            return o1.compareTo(o2);
                        } else {
                            return cmp;
                        }
                    }
                });

                task.start("Logging popular packages unique to project");
                try (LogFileWriter writer = IOUtils.createLogFileWriter(
                        new File(Arguments.OUTPUT.getValue(), "project-unique-popular-packages.txt"))) {
                    for (SourcedFqnNode fqn : sorted) {
                        writer.write(projectPackages.get(fqn).size() + "\t" + fqn.getFqn());
                    }
                } catch (IOException e) {
                    logger.log(Level.SEVERE, "Error writing file", e);
                }
                task.finish();
            }

            {
                List<SourcedFqnNode> sorted = new ArrayList<>(missingPackages.keySet());
                Collections.sort(sorted, new Comparator<SourcedFqnNode>() {
                    @Override
                    public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
                        int cmp = -Integer.compare(missingPackages.get(o1).size(),
                                missingPackages.get(o2).size());
                        if (cmp == 0) {
                            return o1.compareTo(o2);
                        } else {
                            return cmp;
                        }
                    }
                });

                task.start("Logging popular packages unique to missing");
                try (LogFileWriter writer = IOUtils.createLogFileWriter(
                        new File(Arguments.OUTPUT.getValue(), "missing-unique-popular-packages.txt"))) {
                    for (SourcedFqnNode fqn : sorted) {
                        writer.write(missingPackages.get(fqn).size() + "\t" + fqn.getFqn());
                    }
                } catch (IOException e) {
                    logger.log(Level.SEVERE, "Error writing file", e);
                }
                task.finish();
            }
        }
    }

    for (int threshold : new int[] { 1, 2, 10, 50, 100 }) {
        Multiset<String> externalUniqueByString = HashMultiset.create(6);
        Multiset<String> externalTotalByString = HashMultiset.create(6);
        Multiset<String> missingUniqueByString = HashMultiset.create(6);
        Multiset<String> missingTotalByString = HashMultiset.create(6);

        int externalUniqueTotal = 0;
        int externalTotalTotal = 0;
        int missingUniqueTotal = 0;
        int missingTotalTotal = 0;
        for (SourcedFqnNode node : root.getPostOrderIterable()) {
            int externalCount = node.getCount(Source.EXTERNAL);
            if (externalCount >= threshold) {
                externalUniqueTotal++;
                externalTotalTotal += externalCount;
                boolean maven = node.has(Source.MAVEN);
                boolean project = node.has(Source.PROJECT);
                if (maven) {
                    externalUniqueByString.add("Maven");
                    externalTotalByString.add("Maven", externalCount);
                    if (!project) {
                        externalUniqueByString.add("Maven only");
                        externalTotalByString.add("Maven only", externalCount);
                    } else {
                        externalUniqueByString.add("Project");
                        externalTotalByString.add("Project", externalCount);
                        externalUniqueByString.add("Maven and Project");
                        externalTotalByString.add("Maven and Project", externalCount);
                    }
                } else if (project) {
                    externalUniqueByString.add("Project");
                    externalTotalByString.add("Project", externalCount);
                    externalUniqueByString.add("Project only");
                    externalTotalByString.add("Project only", externalCount);
                } else {
                    externalUniqueByString.add("Nothing");
                    externalTotalByString.add("Nothing", externalCount);
                }
            }

            int missingCount = node.getCount(Source.MISSING);
            if (missingCount >= threshold) {
                missingUniqueTotal++;
                missingTotalTotal += missingCount;
                boolean maven = node.has(Source.MAVEN);
                boolean project = node.has(Source.PROJECT);
                if (maven) {
                    missingUniqueByString.add("Maven");
                    missingTotalByString.add("Maven", missingCount);
                    if (!project) {
                        missingUniqueByString.add("Maven only");
                        missingTotalByString.add("Maven only", missingCount);
                    } else {
                        missingUniqueByString.add("Project");
                        missingTotalByString.add("Project", missingCount);
                        missingUniqueByString.add("Maven and Project");
                        missingTotalByString.add("Maven and Project", missingCount);
                    }
                } else if (project) {
                    missingUniqueByString.add("Project");
                    missingTotalByString.add("Project", missingCount);
                    missingUniqueByString.add("Project only");
                    missingTotalByString.add("Project only", missingCount);
                } else {
                    missingUniqueByString.add("Nothing");
                    missingTotalByString.add("Nothing", missingCount);
                }
            }
        }

        Percenterator externalUniqueP = Percenterator.create(externalUniqueTotal);
        Percenterator missingUniqueP = Percenterator.create(missingUniqueTotal);
        Percenterator externalTotalP = Percenterator.create(externalTotalTotal);
        Percenterator missingTotalP = Percenterator.create(externalTotalTotal);

        task.start("Reporting external import coverage for threshold " + threshold);
        for (String condition : externalUniqueByString.elementSet()) {
            task.report(condition + ":");
            task.report("  Unique: " + externalUniqueP.format(externalUniqueByString.count(condition)));
            task.report("  Total:  " + externalTotalP.format(externalTotalByString.count(condition)));
        }
        task.report("Sum:");
        task.report("  Unique: " + externalUniqueTotal);
        task.report("  Total: " + externalTotalTotal);
        task.finish();
        task.start("Reporting missing import coverage for threshold " + threshold);
        for (String condition : missingUniqueByString.elementSet()) {
            task.report(condition + ":");
            task.report("  Unique: " + missingUniqueP.format(missingUniqueByString.count(condition)));
            task.report("  Total:  " + missingTotalP.format(missingTotalByString.count(condition)));
        }
        task.report("Sum:");
        task.report("  Unique: " + missingUniqueTotal);
        task.report("  Total: " + missingTotalTotal);
        task.finish();
    }

    {
        Set<String> maven = new HashSet<>();
        Set<String> mavenImported = new HashSet<>();

        Set<String> project = new HashSet<>();
        Set<String> projectImported = new HashSet<>();
        // Find the coverage of the maven and project jars
        for (SourcedFqnNode fqn : root.getPostOrderIterable()) {
            maven.addAll(fqn.getSourceIDs(Source.MAVEN));
            project.addAll(fqn.getSourceIDs(Source.PROJECT));
            if (fqn.has(Source.IMPORTED)) {
                mavenImported.addAll(fqn.getSourceIDs(Source.MAVEN));
                projectImported.addAll(fqn.getSourceIDs(Source.PROJECT));
            }
        }

        Percenterator mavenP = Percenterator.create(maven.size());
        Percenterator projectP = Percenterator.create(project.size());
        task.start("Reporting coverage of jars");
        task.report(mavenP.format(mavenImported.size()) + " maven jars had at least one type imported");
        task.report(projectP.format(projectImported.size()) + " project jars had at least one type imported");
        task.finish();
    }

    //    {
    //      // Find all the most popular fqns per source
    //      for (final Source source : Source.values()) {
    //        TreeSet<SourcedFqnNode> sorted = new TreeSet<>(new Comparator<SourcedFqnNode>() {
    //          @Override
    //          public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
    //            int cmp = Integer.compare(o1.getCount(source), o2.getCount(source));
    //            if (cmp == 0) {
    //              return o1.compareTo(o2);
    //            } else {
    //              return cmp;
    //            }
    //          }
    //        });
    //        
    //        for (SourcedFqnNode node : root.getPostOrderIterable()) {
    //          if (node.has(source)) {
    //            sorted.add(node);
    //          }
    //        }
    //        
    //        task.start("Logging popular types listing for " + source.name());
    //        try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), source.name() + "-popular.txt"))) {
    //          for (SourcedFqnNode fqn : sorted.descendingSet()) {
    //            writer.write(fqn.getCount(source) + "\t" + fqn.getFqn());
    //          }
    //        } catch (IOException e) {
    //          logger.log(Level.SEVERE, "Error writing file", e);
    //        }
    //        task.finish();
    //      }
    //    }
    //    
    //    {
    //      // Find all the fqns unique to that source
    //      for (final Source source : Source.values()) {
    //        TreeSet<SourcedFqnNode> sorted = new TreeSet<>(new Comparator<SourcedFqnNode>() {
    //          @Override
    //          public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
    //            int cmp = Integer.compare(o1.getCount(source), o2.getCount(source));
    //            if (cmp == 0) {
    //              return o1.compareTo(o2);
    //            } else {
    //              return cmp;
    //            }
    //          }
    //        });
    //        
    //        Set<Source> expected = EnumSet.of(Source.MISSING, source);
    //        for (SourcedFqnNode node : root.getPostOrderIterable()) {
    //          Set<Source> sources = node.getSources();
    //          if (sources.containsAll(expected) && expected.containsAll(sources)) {
    //            sorted.add(node);
    //          }
    //        }
    //        
    //        task.start("Logging missing types listing");
    //        try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), source.name() + "-missing.txt"))) {
    //          for (SourcedFqnNode fqn : sorted.descendingSet()) {
    //            writer.write(fqn.getCount(Source.MISSING) + "\t" + fqn.getFqn());
    //          }
    //        } catch (IOException e) {
    //          logger.log(Level.SEVERE, "Error writing file", e);
    //        }
    //        task.finish();
    //      }
    //    }
    //    
    //    {
    //      final Multiset<SourcedFqnNode> maven = HashMultiset.create();
    //      final Multiset<SourcedFqnNode> project = HashMultiset.create();
    //      final Multiset<SourcedFqnNode> mavenProject= HashMultiset.create();
    //      final Multiset<SourcedFqnNode> missing = HashMultiset.create();
    //      
    //      
    //      // Find the package specific info
    //      for (SourcedFqnNode node : root.getPostOrderIterable()) {
    //        int missingCount = node.getCount(Source.MISSING);
    //        if (missingCount > 0) {
    //          int mavenCount = node.getCount(Source.MAVEN);
    //          int projectCount = node.getCount(Source.PROJECT);
    //          if (mavenCount > 0) {
    //            if (projectCount == 0) {
    //              maven.add(node.getParent());
    //            } else {
    //              mavenProject.add(node.getParent());
    //            }
    //          } else if (projectCount > 0) {
    //            project.add(node.getParent());
    //          } else {
    //            missing.add(node.getParent());
    //          }
    //        }
    //      }
    //      
    //      task.start("Reporting package breakdown");
    //      task.report("Maven only:        " + maven.elementSet().size());
    //      task.report("Project only:      " + project.elementSet().size());
    //      task.report("Maven and Project: " + mavenProject.elementSet().size());
    //      task.report("Missing:           " + missing.elementSet().size());
    //      task.finish();
    //      
    //      task.start("Logging package popularity");
    //      // Maven
    //      SourcedFqnNode[] nodes = maven.elementSet().toArray(new SourcedFqnNode[maven.elementSet().size()]);
    //      Arrays.sort(nodes, new Comparator<SourcedFqnNode>() {
    //        @Override
    //        public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
    //          int cmp = Integer.compare(maven.count(o2), maven.count(o1));
    //          if (cmp == 0) {
    //            return o1.compareTo(o2);
    //          } else {
    //            return cmp;
    //          }
    //        }
    //      });
    //      try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "maven-pkgs.txt"))) {
    //        for (SourcedFqnNode pkg : nodes) {
    //          writer.write(maven.count(pkg) + "\t" + pkg.getFqn());
    //        }
    //      } catch (IOException e) {
    //        logger.log(Level.SEVERE, "Error writing file", e);
    //      }
    //      
    //      // Project
    //      nodes = project.elementSet().toArray(new SourcedFqnNode[project.elementSet().size()]);
    //      Arrays.sort(nodes, new Comparator<SourcedFqnNode>() {
    //        @Override
    //        public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
    //          int cmp = Integer.compare(project.count(o2), project.count(o1));
    //          if (cmp == 0) {
    //            return o1.compareTo(o2);
    //          } else {
    //            return cmp;
    //          }
    //        }
    //      });
    //      try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "project-pkgs.txt"))) {
    //        for (SourcedFqnNode pkg : nodes) {
    //          writer.write(project.count(pkg) + "\t" + pkg.getFqn());
    //        }
    //      } catch (IOException e) {
    //        logger.log(Level.SEVERE, "Error writing file", e);
    //      }
    //      
    //      // Maven/Project
    //      nodes = mavenProject.elementSet().toArray(new SourcedFqnNode[mavenProject.elementSet().size()]);
    //      Arrays.sort(nodes, new Comparator<SourcedFqnNode>() {
    //        @Override
    //        public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
    //          int cmp = Integer.compare(mavenProject.count(o2), mavenProject.count(o1));
    //          if (cmp == 0) {
    //            return o1.compareTo(o2);
    //          } else {
    //            return cmp;
    //          }
    //        }
    //      });
    //      try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "maven-project-pkgs.txt"))) {
    //        for (SourcedFqnNode pkg : nodes) {
    //          writer.write(mavenProject.count(pkg) + "\t" + pkg.getFqn());
    //        }
    //      } catch (IOException e) {
    //        logger.log(Level.SEVERE, "Error writing file", e);
    //      }
    //      
    //      nodes = missing.elementSet().toArray(new SourcedFqnNode[missing.elementSet().size()]);
    //      Arrays.sort(nodes, new Comparator<SourcedFqnNode>() {
    //        @Override
    //        public int compare(SourcedFqnNode o1, SourcedFqnNode o2) {
    //          int cmp = Integer.compare(missing.count(o2), missing.count(o1));
    //          if (cmp == 0) {
    //            return o1.compareTo(o2);
    //          } else {
    //            return cmp;
    //          }
    //        }
    //      });
    //      try (LogFileWriter writer = IOUtils.createLogFileWriter(new File(Arguments.OUTPUT.getValue(), "missing-pkgs.txt"))) {
    //        for (SourcedFqnNode pkg : nodes) {
    //          writer.write(missing.count(pkg) + "\t" + pkg.getFqn());
    //        }
    //      } catch (IOException e) {
    //        logger.log(Level.SEVERE, "Error writing file", e);
    //      }
    //      task.finish();
    //    }

    task.finish();
}