Example usage for com.google.common.collect Multiset elementSet

List of usage examples for com.google.common.collect Multiset elementSet

Introduction

In this page you can find the example usage for com.google.common.collect Multiset elementSet.

Prototype

Set<E> elementSet();

Source Link

Document

Returns the set of distinct elements contained in this multiset.

Usage

From source file:com.cloudera.knittingboar.records.TwentyNewsgroupsRecordFactory.java

/**
 * Processes single line of input into: - target variable - Feature vector
 * /*from w w  w.j a v  a  2 s.co  m*/
 * @throws Exception
 */
public int processLine(String line, Vector v) throws Exception {

    String[] parts = line.split(this.class_id_split_string);
    if (parts.length < 2) {
        throw new Exception("wtf: line not formed well.");
    }

    String newsgroup_name = parts[0];
    String msg = parts[1];

    // p.269 ---------------------------------------------------------
    Map<String, Set<Integer>> traceDictionary = new TreeMap<String, Set<Integer>>();

    // encodes the text content in both the subject and the body of the email
    FeatureVectorEncoder encoder = new StaticWordValueEncoder("body");
    encoder.setProbes(2);
    encoder.setTraceDictionary(traceDictionary);

    // provides a constant offset that the model can use to encode the average
    // frequency
    // of each class
    FeatureVectorEncoder bias = new ConstantValueEncoder("Intercept");
    bias.setTraceDictionary(traceDictionary);

    int actual = newsGroups.intern(newsgroup_name);
    // newsGroups.values().contains(arg0)

    // System.out.println( "> newsgroup name: " + newsgroup_name );
    // System.out.println( "> newsgroup id: " + actual );

    Multiset<String> words = ConcurrentHashMultiset.create();
    /*
     * // System.out.println("record: "); for ( int x = 1; x < parts.length; x++
     * ) { //String s = ts.getAttribute(CharTermAttribute.class).toString(); //
     * System.out.print( " " + parts[x] ); String foo = parts[x].trim();
     * System.out.print( " " + foo ); words.add( foo );
     * 
     * } // System.out.println("\nEOR"); System.out.println( "\nwords found: " +
     * (parts.length - 1) ); System.out.println( "words in set: " + words.size()
     * + ", " + words.toString() );
     */

    StringReader in = new StringReader(msg);

    countWords(analyzer, words, in);

    // ----- p.271 -----------
    // Vector v = new RandomAccessSparseVector(FEATURES);

    // original value does nothing in a ContantValueEncoder
    bias.addToVector("", 1, v);

    // original value does nothing in a ContantValueEncoder
    // lines.addToVector("", lineCount / 30, v);

    // original value does nothing in a ContantValueEncoder
    // logLines.addToVector("", Math.log(lineCount + 1), v);

    // now scan through all the words and add them
    // System.out.println( "############### " + words.toArray().length);
    for (String word : words.elementSet()) {
        encoder.addToVector(word, Math.log(1 + words.count(word)), v);
        // System.out.print( words.count(word) + " " );
    }

    // System.out.println("\nEOL\n");

    return actual;
}

From source file:org.sonar.server.component.ws.ComponentAppAction.java

private void appendMeasures(JsonWriter json, Map<String, MeasureDto> measuresByMetricKey,
        Multiset<String> severitiesAggregation, Integer periodIndex) {
    json.name("measures").beginObject();

    json.prop("fNcloc", formatMeasureOrVariation(measuresByMetricKey.get(CoreMetrics.NCLOC_KEY), periodIndex));
    json.prop("fCoverage", formatMeasureOrVariation(coverageMeasure(measuresByMetricKey), periodIndex));
    json.prop("fDuplicationDensity", formatMeasureOrVariation(
            measuresByMetricKey.get(CoreMetrics.DUPLICATED_LINES_DENSITY_KEY), periodIndex));
    json.prop("fDebt",
            formatMeasureOrVariation(measuresByMetricKey.get(CoreMetrics.TECHNICAL_DEBT_KEY), periodIndex));
    json.prop("fSqaleRating",
            formatMeasureOrVariation(measuresByMetricKey.get(CoreMetrics.SQALE_RATING_KEY), periodIndex));
    json.prop("fSqaleDebtRatio",
            formatMeasureOrVariation(measuresByMetricKey.get(CoreMetrics.SQALE_DEBT_RATIO_KEY), periodIndex));
    json.prop("fTests", formatMeasureOrVariation(measuresByMetricKey.get(CoreMetrics.TESTS_KEY), periodIndex));

    json.prop("fIssues", i18n.formatInteger(UserSession.get().locale(), severitiesAggregation.size()));
    for (String severity : severitiesAggregation.elementSet()) {
        json.prop("f" + StringUtils.capitalize(severity.toLowerCase()) + "Issues",
                i18n.formatInteger(UserSession.get().locale(), severitiesAggregation.count(severity)));
    }//  ww  w  .  j  av  a2  s  .  co m
    json.endObject();
}

From source file:com.continuuity.weave.internal.appmaster.ApplicationMasterService.java

/**
 * Handling containers that are completed.
 *//*from  www .  ja  v  a 2s. c o  m*/
private void handleCompleted(List<YarnContainerStatus> completedContainersStatuses) {
    Multiset<String> restartRunnables = HashMultiset.create();
    for (YarnContainerStatus status : completedContainersStatuses) {
        LOG.info("Container {} completed with {}:{}.", status.getContainerId(), status.getState(),
                status.getDiagnostics());
        runningContainers.handleCompleted(status, restartRunnables);
    }

    for (Multiset.Entry<String> entry : restartRunnables.entrySet()) {
        LOG.info("Re-request container for {} with {} instances.", entry.getElement(), entry.getCount());
        for (int i = 0; i < entry.getCount(); i++) {
            runnableContainerRequests.add(createRunnableContainerRequest(entry.getElement()));
        }
    }

    // For all runnables that needs to re-request for containers, update the expected count timestamp
    // so that the EventHandler would triggered with the right expiration timestamp.
    expectedContainers.updateRequestTime(restartRunnables.elementSet());
}

From source file:org.dllearner.algorithms.pattern.PatternBasedAxiomLearningAlgorithm.java

private Set<OWLAxiom> applyPattern(OWLAxiom pattern, OWLClass cls, Model fragment) {
    Map<OWLAxiom, Score> axioms2Score = new HashMap<>();

    OWLClassExpression patternSubClass = null;
    OWLClassExpression patternSuperClass = null;

    if (pattern.isOfType(AxiomType.EQUIVALENT_CLASSES)) {
        Set<OWLSubClassOfAxiom> subClassOfAxioms = ((OWLEquivalentClassesAxiom) pattern)
                .asOWLSubClassOfAxioms();
        for (OWLSubClassOfAxiom axiom : subClassOfAxioms) {
            if (!axiom.getSubClass().isAnonymous()) {
                patternSubClass = axiom.getSubClass();
                patternSuperClass = axiom.getSuperClass();
                break;
            }//  w ww.java2 s.  c om
        }
    } else if (pattern.isOfType(AxiomType.SUBCLASS_OF)) {
        patternSubClass = ((OWLSubClassOfAxiom) pattern).getSubClass();
        patternSuperClass = ((OWLSubClassOfAxiom) pattern).getSuperClass();
    } else {
        logger.warn("Pattern " + pattern + " not supported yet.");
        return Collections.emptySet();
    }

    Set<OWLEntity> signature = patternSuperClass.getSignature();
    signature.remove(patternSubClass.asOWLClass());
    Query query = converter.asQuery("?x", dataFactory.getOWLObjectIntersectionOf(cls, patternSuperClass),
            signature);
    logger.info("Running query\n" + query);
    Map<OWLEntity, String> variablesMapping = converter.getVariablesMapping();
    org.apache.jena.query.ResultSet rs = QueryExecutionFactory.create(query, fragment).execSelect();
    QuerySolution qs;
    Set<String> resources = new HashSet<>();
    Multiset<OWLAxiom> instantiations = HashMultiset.create();
    while (rs.hasNext()) {
        qs = rs.next();
        resources.add(qs.getResource("x").getURI());
        // get the IRIs for each variable
        Map<OWLEntity, IRI> entity2IRIMap = new HashMap<>();
        entity2IRIMap.put(patternSubClass.asOWLClass(), cls.getIRI());
        boolean skip = false;
        for (OWLEntity entity : signature) {
            String var = variablesMapping.get(entity);
            if (qs.get(var) == null) {
                logger.warn("Variable " + var + " is not bound.");
                skip = true;
                break;
            }
            if (qs.get(var).isLiteral()) {
                skip = true;
                break;
            }
            Resource resource = qs.getResource(var);
            if (entity.isOWLObjectProperty() && resource.hasURI(RDF.type.getURI())) {
                skip = true;
                break;
            }
            entity2IRIMap.put(entity, IRI.create(resource.getURI()));
        }
        if (!skip) {
            // instantiate the pattern
            OWLObjectDuplicator duplicator = new OWLObjectDuplicator(entity2IRIMap, dataFactory);
            OWLAxiom patternInstantiation = duplicator.duplicateObject(pattern);
            instantiations.add(patternInstantiation);
        }
    }
    // compute the score
    int total = resources.size();
    for (OWLAxiom axiom : instantiations.elementSet()) {
        int frequency = instantiations.count(axiom);
        //         System.out.println(axiom + ":" + frequency);
        Score score = computeScore(total, Math.min(total, frequency));
        axioms2Score.put(axiom, score);
    }

    return asAnnotatedAxioms(axioms2Score);
}

From source file:it.units.malelab.ege.util.DUMapper.java

private static double[][][] getNeatData3(String baseDir, String fileNamePattern, int generations)
        throws IOException {
    List<List<Map<Integer, Multimap<Integer, Integer>>>> data = new ArrayList<>();
    Map<Integer, String> nodeTypesMap = new HashMap<>();
    for (int g = 0; g < generations; g++) {
        List<Map<Integer, Multimap<Integer, Integer>>> currentPopulation = new ArrayList<>();
        BufferedReader reader = Files.newBufferedReader(
                FileSystems.getDefault().getPath(baseDir, String.format(fileNamePattern, g + 1)));
        String line;//from  w w  w.  j a v  a 2s  .  c  om
        boolean isInPopulation = false;
        Map<Integer, Multimap<Integer, Integer>> currentIndividual = null;
        while ((line = reader.readLine()) != null) {
            if (line.equals("[NEAT-POPULATION:SPECIES]")) {
                isInPopulation = true;
                continue;
            }
            if (!isInPopulation) {
                continue;
            }
            if (line.startsWith("\"g\"")) {
                if (currentIndividual != null) {
                    //save current individual
                    currentPopulation.add(currentIndividual);
                }
                currentIndividual = new HashMap<>();
            }
            if (line.startsWith("\"n\"")) {
                String[] pieces = line.split(",");
                nodeTypesMap.put(Integer.parseInt(pieces[4]), pieces[3].replaceAll("\"", ""));
                currentIndividual.put(Integer.parseInt(pieces[4]), (Multimap) HashMultimap.create());
            } else if (line.startsWith("\"l\"")) {
                String[] pieces = line.split(",");
                int from = Integer.parseInt(pieces[3]);
                int to = Integer.parseInt(pieces[4]);
                if (currentIndividual.get(from) == null) {
                    currentIndividual.put(from, (Multimap) HashMultimap.create());
                }
                if (currentIndividual.get(to) == null) {
                    currentIndividual.put(to, (Multimap) HashMultimap.create());
                }
                currentIndividual.get(from).put(1, to);
                currentIndividual.get(to).put(-1, from);
            }
        }
        reader.close();
        data.add(currentPopulation);
    }
    //build node innovation numbers
    String[] nodeTypes = new String[] { "i", "b", "h", "o" };
    List<Integer> nodeINs = new ArrayList<>();
    for (String nodeType : nodeTypes) {
        List<Integer> typeNodeINs = new ArrayList<>();
        for (Integer in : nodeTypesMap.keySet()) {
            if (nodeTypesMap.get(in).equals(nodeType)) {
                typeNodeINs.add(in);
            }
        }
        Collections.sort(typeNodeINs);
        nodeINs.addAll(typeNodeINs);
    }
    //populate arrays
    double[][] usages = new double[generations][];
    double[][] diversities = new double[generations][];
    for (int g = 0; g < generations; g++) {
        usages[g] = new double[nodeINs.size()];
        diversities[g] = new double[nodeINs.size()];
        List<Map<Integer, Multimap<Integer, Integer>>> currentPopulation = data.get(g);
        //populate usages, diversities
        int i = 0;
        for (int nodeIN : nodeINs) {
            double[] localUsages = new double[currentPopulation.size()];
            Multiset<Set<Integer>> froms = HashMultiset.create();
            Multiset<Set<Integer>> tos = HashMultiset.create();
            int c = 0;
            for (Map<Integer, Multimap<Integer, Integer>> currentIndividual : currentPopulation) {
                if (nodeTypesMap.get(nodeIN).equals("i") || nodeTypesMap.get(nodeIN).equals("b")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = currentIndividual.get(nodeIN).get(1).isEmpty() ? 0 : 1;
                        tos.add(new HashSet<>(currentIndividual.get(nodeIN).get(1)));
                    } else {
                        tos.add(Collections.EMPTY_SET);
                    }
                } else if (nodeTypesMap.get(nodeIN).equals("h")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = (currentIndividual.get(nodeIN).get(-1).isEmpty() ? 0 : 0.5)
                                + (currentIndividual.get(nodeIN).get(1).isEmpty() ? 0 : 0.5);
                        tos.add(new HashSet<>(currentIndividual.get(nodeIN).get(1)));
                        froms.add(new HashSet<>(currentIndividual.get(nodeIN).get(-1)));
                    } else {
                        tos.add(Collections.EMPTY_SET);
                        froms.add(Collections.EMPTY_SET);
                    }
                } else if (nodeTypesMap.get(nodeIN).equals("o")) {
                    if (currentIndividual.containsKey(nodeIN)) {
                        localUsages[c] = currentIndividual.get(nodeIN).get(-1).isEmpty() ? 0 : 1;
                        froms.add(new HashSet<>(currentIndividual.get(nodeIN).get(-1)));
                    } else {
                        froms.add(Collections.EMPTY_SET);
                    }
                }
                c = c + 1;
            }
            usages[g][i] = StatUtils.mean(localUsages);
            if (nodeTypesMap.get(nodeIN).equals("i") || nodeTypesMap.get(nodeIN).equals("b")) {
                diversities[g][i] = Utils.multisetDiversity(tos, tos.elementSet());
            } else if (nodeTypesMap.get(nodeIN).equals("h")) {
                diversities[g][i] = Utils.multisetDiversity(tos, tos.elementSet()) / 2
                        + Utils.multisetDiversity(froms, tos.elementSet()) / 2;
            } else if (nodeTypesMap.get(nodeIN).equals("o")) {
                diversities[g][i] = Utils.multisetDiversity(froms, tos.elementSet());
            }
            i = i + 1;
        }
    }
    return new double[][][] { diversities, usages };
}

From source file:com.music.service.text.TimelineToMusicService.java

private Variation getVariation(List<Tweet> tweets, TimelineMusic meta) {
    Morphology morphology = new Morphology(new StringReader(""));
    Multiset<String> words = HashMultiset.create();
    for (Tweet tweet : tweets) {
        String tweetText = tweet.getText().toLowerCase();
        List<String> urls = TimelineToMusicService.extractUrls(tweetText);
        for (String url : urls) {
            tweetText = tweetText.replace(url, "");
        }/*from   ww w.  j  av a 2  s .  c  o  m*/
        List<String> usernames = TimelineToMusicService.extractMentionedUsernames(tweetText);
        for (String username : usernames) {
            tweetText = tweetText.replace(username, "").replace("rt", "");
        }

        String[] wordsInTweet = tweetText.split("[^\\p{L}&&[^']]+");
        for (String word : wordsInTweet) {
            try {
                words.add(morphology.stem(word));
            } catch (Exception ex) {
                words.add(word);
            }
        }
    }
    words.removeAll(stopwords);

    // if a word is mentioned more times than is 4% of the tweets, it's considered a topic
    double topicThreshold = tweets.size() * 4 / 100;
    for (Iterator<String> it = words.iterator(); it.hasNext();) {
        String word = it.next();
        // remove stopwords not in the list (e.g. in a different language).
        // We consider all words less than 4 characters to be stop words
        if (word == null || word.length() < 4) {
            it.remove();
        } else if (words.count(word) < topicThreshold) {
            it.remove();
        }
    }

    meta.setTopKeywords(new HashSet<>(words.elementSet()));

    // the more topics you have, the more variative music
    if (meta.getTopKeywords().size() > 40) {
        return Variation.EXTREMELY_VARIATIVE;
    } else if (meta.getTopKeywords().size() > 30) {
        return Variation.VERY_VARIATIVE;
    } else if (meta.getTopKeywords().size() > 20) {
        return Variation.MOVING;
    } else if (meta.getTopKeywords().size() > 10) {
        return Variation.AVERAGE;
    } else {
        return Variation.MONOTONOUS;
    }
}

From source file:org.apache.twill.internal.appmaster.ApplicationMasterService.java

/**
 * Handling containers that are completed.
 *///w  w  w .j ava 2 s .c om
private void handleCompleted(List<YarnContainerStatus> completedContainersStatuses) {
    Multiset<String> restartRunnables = HashMultiset.create();
    for (YarnContainerStatus status : completedContainersStatuses) {
        LOG.info("Container {} completed with {}:{}.", status.getContainerId(), status.getState(),
                status.getDiagnostics());
        runningContainers.handleCompleted(status, restartRunnables);
    }

    for (Multiset.Entry<String> entry : restartRunnables.entrySet()) {
        LOG.info("Re-request container for {} with {} instances.", entry.getElement(), entry.getCount());
        runnableContainerRequests.add(createRunnableContainerRequest(entry.getElement(), entry.getCount()));
    }

    // For all runnables that needs to re-request for containers, update the expected count timestamp
    // so that the EventHandler would triggered with the right expiration timestamp.
    expectedContainers.updateRequestTime(restartRunnables.elementSet());
}

From source file:org.dllearner.utilities.examples.AutomaticNegativeExampleFinderSPARQL2.java

private SortedSet<OWLIndividual> computeNegativeExamples(OWLClass classToDescribe,
        Multiset<OWLClass> positiveExamplesTypes, Map<Strategy, Double> strategiesWithWeight,
        int maxNrOfReturnedInstances) {
    SortedSet<OWLIndividual> negativeExamples = new TreeSet<>();

    for (Entry<Strategy, Double> entry : strategiesWithWeight.entrySet()) {
        Strategy strategy = entry.getKey();
        Double weight = entry.getValue();

        // the max number of instances returned by the current strategy
        int strategyLimit = (int) (weight * maxNrOfReturnedInstances);

        // the highest frequency value
        int maxFrequency = positiveExamplesTypes.entrySet().iterator().next().getCount();

        if (strategy == SIBLING) {//get sibling class based examples
            negativeExamples.addAll(negativeExamplesBySiblingClasses(positiveExamplesTypes, strategyLimit,
                    maxNrOfReturnedInstances));
        } else if (strategy == SUPERCLASS) {//get super class based examples
            negativeExamples.addAll(negativeExamplesBySuperClasses(positiveExamplesTypes, negativeExamples,
                    strategyLimit, maxNrOfReturnedInstances));
        } else if (strategy == RANDOM) {//get some random examples
            logger.info("Applying random strategy...");
            SortedSet<OWLIndividual> randomNegativeExamples = new TreeSet<>();
            String query = "SELECT DISTINCT ?s WHERE {?s a ?type. ?type a owl:Class .";
            if (classToDescribe != null) {
                query += "FILTER NOT EXISTS{?s a <" + classToDescribe.toStringID() + "> }";
            } else {
                for (OWLClass nc : positiveExamplesTypes.elementSet()) {

                }/*w w  w  . j  a v a  2 s  .  co m*/
                throw new UnsupportedOperationException(
                        "Currently it's not possible to get random examples for unknown class to describe.");
            }

            query += "} LIMIT " + maxNrOfReturnedInstances;

            try (QueryExecution qe = qef.createQueryExecution(query)) {
                ResultSet rs = qe.execSelect();
                while (rs.hasNext()) {
                    QuerySolution qs = rs.next();
                    randomNegativeExamples
                            .add(df.getOWLNamedIndividual(IRI.create(qs.getResource("s").getURI())));
                }
            }
            randomNegativeExamples.removeAll(negativeExamples);
            negativeExamples.addAll(new ArrayList<>(randomNegativeExamples).subList(0, Math
                    .min(randomNegativeExamples.size(), maxNrOfReturnedInstances - negativeExamples.size())));
            logger.info("Negative examples(" + randomNegativeExamples.size() + "): " + randomNegativeExamples);
        }
    }
    return negativeExamples;
}

From source file:org.splevo.ui.refinementbrowser.ArgoUMLVariantScanHandler.java

private void scanForIncludedFeatures(List<VariationPoint> vps) {

    Multiset<String> identifiedFeatues = HashMultiset.create();
    List<String> errors = Lists.newArrayList();

    for (VariationPoint vp : vps) {

        Set<SoftwareElement> elements = getNotLeadingImplementingElements((VariationPoint) vp);
        if (elements.size() == 0) {
            identifiedFeatues.add("{NONE}");
        }/*from  w ww  . j  av  a 2 s .c o  m*/
        for (SoftwareElement element : elements) {

            SourceLocation sourceLocation = element.getSourceLocation();
            String path = sourceLocation.getFilePath();
            List<String> lines = null;
            try {
                lines = FileUtils.readLines(new File(path));

            } catch (IOException e) {
                e.printStackTrace();
                continue;
            }
            int markerLineIndex = getMarkerLineIndex(vp, sourceLocation, lines);
            if (markerLineIndex == -1) {
                errors.add("No marker found for " + path.substring(path.length() - 20));
                continue;
            }

            String featureId = getFeatureId(lines, markerLineIndex);

            if (isMarkerLine(lines, markerLineIndex - 1)) {
                featureId = getFeatureId(lines, markerLineIndex - 1) + " + " + featureId;
            } else if (isMarkerLine(lines, markerLineIndex + 1)) {
                featureId += " + " + getFeatureId(lines, markerLineIndex + 1);
            }

            identifiedFeatues.add(featureId);
        }
    }

    if (errors.size() > 0) {
        MessageDialog.openError(Display.getCurrent().getActiveShell(), "Marker Detection Errors",
                Joiner.on("\n").join(errors));
    }

    StringBuilder message = new StringBuilder();
    message.append("VP Count Total: ");
    message.append(vps.size());
    for (String featureId : identifiedFeatues.elementSet()) {
        message.append("\n");
        message.append(identifiedFeatues.count(featureId));
        message.append(" x ");
        message.append(featureId);
    }
    MessageDialog.openInformation(Display.getCurrent().getActiveShell(), "Info", message.toString());
}

From source file:org.dllearner.algorithms.qtl.experiments.PRConvergenceExperiment.java

private RDFResourceTree applyBaseLine(ExamplesWrapper examples, Baseline baselineApproach) {
    logger.info("Computing baseline...");
    Collection<RDFResourceTree> posExamples = examples.posExamplesMapping.values();
    Collection<RDFResourceTree> negExamples = examples.negExamplesMapping.values();

    RDFResourceTree solution = null;/*  w  w  w  .  j a  v a 2  s .c  o  m*/

    switch (baselineApproach) {
    case RANDOM:// 1.
        String query = "SELECT ?cls WHERE {?cls a owl:Class .} ORDER BY RAND() LIMIT 1";
        QueryExecution qe = qef.createQueryExecution(query);
        ResultSet rs = qe.execSelect();
        if (rs.hasNext()) {
            QuerySolution qs = rs.next();
            Resource cls = qs.getResource("cls");
            solution = new RDFResourceTree();
            solution.addChild(new RDFResourceTree(cls.asNode()), RDF.type.asNode());
        }
        break;
    case MOST_POPULAR_TYPE_IN_KB:// 2.
        query = "SELECT ?cls WHERE {?cls a owl:Class . ?s a ?cls .} ORDER BY DESC(COUNT(?s)) LIMIT 1";
        qe = qef.createQueryExecution(query);
        rs = qe.execSelect();
        if (rs.hasNext()) {
            QuerySolution qs = rs.next();
            Resource cls = qs.getResource("cls");
            solution = new RDFResourceTree();
            solution.addChild(new RDFResourceTree(cls.asNode()), RDF.type.asNode());
        }
        break;
    case MOST_FREQUENT_TYPE_IN_EXAMPLES:// 3.
        Multiset<Node> types = HashMultiset.create();
        for (RDFResourceTree ex : posExamples) {
            List<RDFResourceTree> children = ex.getChildren(RDF.type.asNode());
            for (RDFResourceTree child : children) {
                types.add(child.getData());
            }
        }
        Node mostFrequentType = Ordering.natural().onResultOf(new Function<Multiset.Entry<Node>, Integer>() {
            @Override
            public Integer apply(Multiset.Entry<Node> entry) {
                return entry.getCount();
            }
        }).max(types.entrySet()).getElement();
        solution = new RDFResourceTree();
        solution.addChild(new RDFResourceTree(mostFrequentType), RDF.type.asNode());
        break;
    case MOST_FREQUENT_EDGE_IN_EXAMPLES:// 4.
        Multiset<Pair<Node, Node>> pairs = HashMultiset.create();
        for (RDFResourceTree ex : posExamples) {
            SortedSet<Node> edges = ex.getEdges();
            for (Node edge : edges) {
                List<RDFResourceTree> children = ex.getChildren(edge);
                for (RDFResourceTree child : children) {
                    pairs.add(new Pair<>(edge, child.getData()));
                }
            }
        }
        Pair<Node, Node> mostFrequentPair = Ordering.natural()
                .onResultOf(new Function<Multiset.Entry<Pair<Node, Node>>, Integer>() {
                    @Override
                    public Integer apply(Multiset.Entry<Pair<Node, Node>> entry) {
                        return entry.getCount();
                    }
                }).max(pairs.entrySet()).getElement();
        solution = new RDFResourceTree();
        solution.addChild(new RDFResourceTree(mostFrequentPair.getValue()), mostFrequentPair.getKey());
        break;
    case MOST_INFORMATIVE_EDGE_IN_EXAMPLES:
        // get all p-o in pos examples
        Multiset<Pair<Node, Node>> edgeObjectPairs = HashMultiset.create();
        for (RDFResourceTree ex : posExamples) {
            SortedSet<Node> edges = ex.getEdges();
            for (Node edge : edges) {
                List<RDFResourceTree> children = ex.getChildren(edge);
                for (RDFResourceTree child : children) {
                    edgeObjectPairs.add(new Pair<>(edge, child.getData()));
                }
            }
        }

        double bestAccuracy = -1;
        solution = new RDFResourceTree();

        for (Pair<Node, Node> pair : edgeObjectPairs.elementSet()) {
            Node edge = pair.getKey();
            Node childValue = pair.getValue();

            // compute accuracy
            int tp = edgeObjectPairs.count(pair);
            int fn = posExamples.size() - tp;
            int fp = 0;
            for (RDFResourceTree ex : negExamples) { // compute false positives
                List<RDFResourceTree> children = ex.getChildren(edge);
                if (children != null) {
                    for (RDFResourceTree child : children) {
                        if (child.getData().equals(childValue)) {
                            fp++;
                            break;
                        }
                    }
                }
            }
            int tn = negExamples.size() - fp;

            double accuracy = Heuristics.getPredictiveAccuracy(posExamples.size(), negExamples.size(), tp, tn,
                    1.0);
            // update best solution
            if (accuracy >= bestAccuracy) {
                solution = new RDFResourceTree();
                solution.addChild(new RDFResourceTree(childValue), edge);
                bestAccuracy = accuracy;
            }
        }
        break;
    case LGG:
        LGGGenerator lggGenerator = new LGGGeneratorSimple();
        solution = lggGenerator.getLGG(Lists.newArrayList(posExamples));
        break;
    default:
        break;
    }
    logger.info("Baseline solution:\n" + owlRenderer.render(QueryTreeUtils.toOWLClassExpression(solution)));

    return solution;
}