Example usage for com.google.common.collect Sets powerSet

List of usage examples for com.google.common.collect Sets powerSet

Introduction

In this page you can find the example usage for com.google.common.collect Sets powerSet.

Prototype

@GwtCompatible(serializable = false)
public static <E> Set<Set<E>> powerSet(Set<E> set) 

Source Link

Document

Returns the set of all possible subsets of set .

Usage

From source file:org.eclipse.viatra.query.runtime.localsearch.planner.PConstraintInfoInferrer.java

private void createConstraintInfoTypeConstraint(List<PConstraintInfo> resultList,
        TypeConstraint typeConstraint) {
    Set<PVariable> affectedVariables = typeConstraint.getAffectedVariables();
    Set<Set<PVariable>> bindings = null;

    IInputKey inputKey = typeConstraint.getSupplierKey();
    if (inputKey.isEnumerable()) {
        bindings = Sets.powerSet(affectedVariables);
    } else {// w w w.j  av  a  2s.  c  o  m
        // For not enumerable types, this constraint can only be a check
        bindings = Collections.singleton(affectedVariables);
    }

    if (inputKey instanceof EStructuralFeatureInstancesKey) {
        final EStructuralFeature feature = ((EStructuralFeatureInstancesKey) inputKey).getEmfKey();
        if (!canPerformInverseNavigation(feature)) {
            // When inverse navigation is not allowed or not possible, filter out operation masks, where
            // the first variable would be free AND the feature is an EReference and has no EOpposite
            bindings = excludeUnnavigableOperationMasks(typeConstraint, bindings);
        }
    }
    doCreateConstraintInfos(resultList, typeConstraint, affectedVariables, bindings);
}

From source file:org.eclipse.incquery.runtime.localsearch.planner.LocalSearchRuntimeBasedStrategy.java

/**
 * Create all possible application condition for all constraint
 * //from   w  ww  .  ja  v a 2s .  c  o  m
 * @param constraintSet the set of constraints
 * @param runtimeContext the model dependent runtime contest
 * @return a collection of the wrapper PConstraintInfo objects with all the allowed application conditions
 */
private List<PConstraintInfo> createPConstraintInfos(Set<PConstraint> constraintSet,
        IQueryRuntimeContext runtimeContext) {
    List<PConstraintInfo> constraintInfos = Lists.newArrayList();

    for (PConstraint pConstraint : constraintSet) {
        if (pConstraint instanceof ExportedParameter) {
            // Do not create mask info for exported parameter, for it is only a symbolic constraint
            continue;
        }
        if (pConstraint instanceof TypeConstraint) {
            Set<PVariable> affectedVariables = pConstraint.getAffectedVariables();
            Set<Set<PVariable>> bindings = Sets.powerSet(affectedVariables);
            doCreateConstraintInfosForTypeConstraint(runtimeContext, constraintInfos,
                    (TypeConstraint) pConstraint, affectedVariables, bindings);
        } else {
            // Create constraint infos so that only single use variables can be unbound
            Set<PVariable> affectedVariables = pConstraint.getAffectedVariables();

            Set<PVariable> singleUseVariables = Sets.newHashSet();
            for (PVariable pVariable : affectedVariables) {
                Set<PConstraint> allReferringConstraints = pVariable.getReferringConstraints();
                // Filter out exported parameter constraints
                Set<ExportedParameter> referringExportedParameters = pVariable
                        .getReferringConstraintsOfType(ExportedParameter.class);
                SetView<PConstraint> trueReferringConstraints = Sets.difference(allReferringConstraints,
                        referringExportedParameters);
                if (trueReferringConstraints.size() == 1) {
                    singleUseVariables.add(pVariable);
                }
            }
            SetView<PVariable> nonSingleUseVariables = Sets.difference(affectedVariables, singleUseVariables);
            // Generate bindings by taking the unioning each element of the power set with the set of non-single use variables
            Set<Set<PVariable>> singleUseVariablesPowerSet = Sets.powerSet(singleUseVariables);
            Set<Set<PVariable>> bindings = Sets.newHashSet();
            for (Set<PVariable> set : singleUseVariablesPowerSet) {
                bindings.add(Sets.newHashSet(set));
            }
            for (Set<PVariable> set : bindings) {
                set.addAll(nonSingleUseVariables);
            }

            if (pConstraint instanceof PatternMatchCounter) {
                // in cases of this type, the deduced variables will contain only the result variable
                final PVariable resultVariable = pConstraint.getDeducedVariables().iterator().next();
                Set<Set<PVariable>> additionalBindings = Sets.newHashSet();
                for (Set<PVariable> binding : bindings) {
                    if (binding.contains(resultVariable)) {
                        Collection<PVariable> filteredBinding = Collections2.filter(binding,
                                new Predicate<PVariable>() {
                                    @Override
                                    public boolean apply(PVariable input) {
                                        return input != resultVariable;
                                    }
                                });
                        additionalBindings.add(Sets.newHashSet(filteredBinding));
                    }

                }
                bindings.addAll(additionalBindings);
            }

            doCreateConstraintInfos(runtimeContext, constraintInfos, pConstraint, affectedVariables, bindings);
        }
    }
    return constraintInfos;
}

From source file:com.google.devtools.build.lib.rules.android.DexArchiveAspect.java

private static Set<Set<String>> aspectDexopts(RuleContext ruleContext) {
    return Sets.powerSet(normalizeDexopts(ruleContext,
            getAndroidConfig(ruleContext).getDexoptsSupportedInIncrementalDexing()));
}

From source file:org.mskcc.juber.waltz.pileup.processors.GenotypingProcessor.java

/**
 * Iterate over the powerset of the given set of genotypes and find the
 * fragments supporting each non-empty element of the powerset
 * /*from w  ww .  ja va  2s  .com*/
 * @param sNVs
 * @return
 */
private Map<Set<GenotypeIDWithMafLine>, Set<String>> getSupportingFragmentsForPowerSet(
        Set<GenotypeIDWithMafLine> genotypeIDsWithName) {
    // find supporting fragments for individual genotypes
    Map<GenotypeIDWithMafLine, Set<String>> supportingFragments = new HashMap<GenotypeIDWithMafLine, Set<String>>();
    Set<GenotypeIDWithMafLine> nonZeroGenotypes = new HashSet<GenotypeIDWithMafLine>();
    for (GenotypeIDWithMafLine genotypeIDWithName : genotypeIDsWithName) {
        Set<String> fragments = null;
        // MNV needs a bit of special treatment since only constituent SNVs
        // are stored in the pileup
        if (genotypeIDWithName.genotypeID.type == GenotypeEventType.MNV) {
            fragments = getMNVSupportingFragments(genotypeIDWithName.genotypeID);
        } else {
            fragments = pileup.genotypes.get(genotypeIDWithName.genotypeID);
        }

        if (fragments == null) {
            fragments = new HashSet<String>();
        } else {
            nonZeroGenotypes.add(genotypeIDWithName);
        }

        supportingFragments.put(genotypeIDWithName, fragments);
    }

    // compute powerset only for the genotypes with non-zero support
    Set<Set<GenotypeIDWithMafLine>> powerSet = null;
    if (nonZeroGenotypes.size() <= 5) {
        // choose all non-zero genotypes
        powerSet = Sets.powerSet(nonZeroGenotypes);
    } else {
        // choose top 5 non-zero genotypes by number of supporting fragments
        Set<Entry<GenotypeIDWithMafLine, Set<String>>> entries = supportingFragments.entrySet();
        List<Entry<GenotypeIDWithMafLine, Set<String>>> list = new ArrayList<Entry<GenotypeIDWithMafLine, Set<String>>>(
                entries);
        Collections.sort(list, fragmentCountComparator);

        Set<GenotypeIDWithMafLine> chosenGenotypeIDs = new HashSet<GenotypeIDWithMafLine>();
        for (int i = 0; i < 5; i++) {
            if (list.get(i).getValue().size() == 0) {
                break;
            }

            chosenGenotypeIDs.add(list.get(i).getKey());
        }

        powerSet = Sets.powerSet(chosenGenotypeIDs);
    }

    Set<Set<GenotypeIDWithMafLine>> processingSet = new HashSet<Set<GenotypeIDWithMafLine>>(powerSet);
    // add individual genotypes to the processing set
    for (GenotypeIDWithMafLine genotypeIDWithName : genotypeIDsWithName) {
        Set<GenotypeIDWithMafLine> s = new HashSet<GenotypeIDWithMafLine>();
        s.add(genotypeIDWithName);
        processingSet.add(s);
    }

    powerSet = null;

    // populate the returning set
    Map<Set<GenotypeIDWithMafLine>, Set<String>> returningSet = new HashMap<Set<GenotypeIDWithMafLine>, Set<String>>();
    for (Set<GenotypeIDWithMafLine> s : processingSet) {
        if (s.isEmpty()) {
            continue;
        }

        Set<String> fragments = getIntersection(s, supportingFragments);
        returningSet.put(s, fragments);

    }

    return returningSet;
}

From source file:org.stackwire.fca.Context.java

/**
 * Returns power set of attributes//  w  ww . java  2 s .c  om
 * 
 * @return power set of attributes
 */
public Set<Set<Integer>> powerSetOfAttributes() {
    return Sets.powerSet(Utils.rangeSet(0, attributeNames.size() - 1));
}

From source file:org.stackwire.fca.Context.java

/**
 * Returns power set of object/*from w w  w.ja v  a 2s .  c o m*/
 * 
 * @return power set of attributes
 */
public Set<Set<Integer>> powerSetOfObjects() {
    return Sets.powerSet(Utils.rangeSet(0, objectNames.size() - 1));
}

From source file:org.dllearner.algorithms.qtl.qald.QALDExperiment.java

private List<String> generateNoiseCandidatesSimilar(List<String> examples, String queryString) {
    Query query = QueryFactory.create(queryString);

    QueryUtils queryUtils = new QueryUtils();

    Set<Triple> triplePatterns = queryUtils.extractTriplePattern(query);

    Set<String> negExamples = new HashSet<>();

    if (triplePatterns.size() == 1) {
        Triple tp = triplePatterns.iterator().next();
        Node var = NodeFactory.createVariable("var");
        Triple newTp = Triple.create(tp.getSubject(), tp.getPredicate(), var);

        ElementTriplesBlock triplesBlock = new ElementTriplesBlock();
        triplesBlock.addTriple(newTp);/*from   w ww.jav a 2 s . com*/

        ElementFilter filter = new ElementFilter(
                new E_NotEquals(new ExprVar(var), NodeValue.makeNode(tp.getObject())));

        ElementGroup eg = new ElementGroup();
        eg.addElement(triplesBlock);
        eg.addElementFilter(filter);

        Query q = new Query();
        q.setQuerySelectType();
        q.setDistinct(true);
        q.addProjectVars(query.getProjectVars());

        q.setQueryPattern(eg);
        //         System.out.println(q);

        List<String> result = getResult(q.toString());
        negExamples.addAll(result);
    } else {
        // we modify each triple pattern <s p o> by <s p ?var> . ?var != o
        Set<Set<Triple>> powerSet = new TreeSet<>((Comparator<Set<Triple>>) (o1, o2) -> {
            return ComparisonChain.start().compare(o1.size(), o2.size()).compare(o1.hashCode(), o2.hashCode())
                    .result();
        });
        powerSet.addAll(Sets.powerSet(triplePatterns));

        for (Set<Triple> set : powerSet) {
            if (!set.isEmpty() && set.size() != triplePatterns.size()) {
                List<Triple> existingTriplePatterns = new ArrayList<>(triplePatterns);
                List<Triple> newTriplePatterns = new ArrayList<>();
                List<ElementFilter> filters = new ArrayList<>();
                int cnt = 0;
                for (Triple tp : set) {
                    if (tp.getObject().isURI() || tp.getObject().isLiteral()) {
                        Node var = NodeFactory.createVariable("var" + cnt++);
                        Triple newTp = Triple.create(tp.getSubject(), tp.getPredicate(), var);

                        existingTriplePatterns.remove(tp);
                        newTriplePatterns.add(newTp);

                        ElementTriplesBlock triplesBlock = new ElementTriplesBlock();
                        triplesBlock.addTriple(tp);

                        ElementGroup eg = new ElementGroup();
                        eg.addElement(triplesBlock);

                        ElementFilter filter = new ElementFilter(new E_NotExists(eg));
                        filters.add(filter);
                    }
                }
                Query q = new Query();
                q.setQuerySelectType();
                q.setDistinct(true);
                q.addProjectVars(query.getProjectVars());
                List<Triple> allTriplePatterns = new ArrayList<>(existingTriplePatterns);
                allTriplePatterns.addAll(newTriplePatterns);
                ElementTriplesBlock tripleBlock = new ElementTriplesBlock(BasicPattern.wrap(allTriplePatterns));
                ElementGroup eg = new ElementGroup();
                eg.addElement(tripleBlock);

                for (ElementFilter filter : filters) {
                    eg.addElementFilter(filter);
                }

                q.setQueryPattern(eg);
                //               System.out.println(q);

                List<String> result = getResult(q.toString());
                result.removeAll(examples);

                if (result.isEmpty()) {
                    q = new Query();
                    q.setQuerySelectType();
                    q.setDistinct(true);
                    q.addProjectVars(query.getProjectVars());
                    tripleBlock = new ElementTriplesBlock(BasicPattern.wrap(existingTriplePatterns));
                    eg = new ElementGroup();
                    eg.addElement(tripleBlock);

                    for (ElementFilter filter : filters) {
                        eg.addElementFilter(filter);
                    }

                    q.setQueryPattern(eg);
                    //                  System.out.println(q);

                    result = getResult(q.toString());
                    result.removeAll(examples);
                }
                negExamples.addAll(result);
            }
        }
    }

    negExamples.removeAll(examples);
    if (negExamples.isEmpty()) {
        logger.error("Found no negative example.");
        System.exit(0);
    }
    return new ArrayList<>(negExamples);
}

From source file:io.prestosql.sql.planner.QueryPlanner.java

private List<Set<FieldId>> enumerateGroupingSets(Analysis.GroupingSetAnalysis groupingSetAnalysis) {
    List<List<Set<FieldId>>> partialSets = new ArrayList<>();

    for (Set<FieldId> cube : groupingSetAnalysis.getCubes()) {
        partialSets.add(ImmutableList.copyOf(Sets.powerSet(cube)));
    }/*from w w w . j  a v  a  2s.  com*/

    for (List<FieldId> rollup : groupingSetAnalysis.getRollups()) {
        List<Set<FieldId>> sets = IntStream.rangeClosed(0, rollup.size())
                .mapToObj(i -> ImmutableSet.copyOf(rollup.subList(0, i))).collect(toImmutableList());

        partialSets.add(sets);
    }

    partialSets.addAll(groupingSetAnalysis.getOrdinarySets());

    if (partialSets.isEmpty()) {
        return ImmutableList.of(ImmutableSet.of());
    }

    // compute the cross product of the partial sets
    List<Set<FieldId>> allSets = new ArrayList<>();
    partialSets.get(0).stream().map(ImmutableSet::copyOf).forEach(allSets::add);

    for (int i = 1; i < partialSets.size(); i++) {
        List<Set<FieldId>> groupingSets = partialSets.get(i);
        List<Set<FieldId>> oldGroupingSetsCrossProduct = ImmutableList.copyOf(allSets);
        allSets.clear();
        for (Set<FieldId> existingSet : oldGroupingSetsCrossProduct) {
            for (Set<FieldId> groupingSet : groupingSets) {
                Set<FieldId> concatenatedSet = ImmutableSet.<FieldId>builder().addAll(existingSet)
                        .addAll(groupingSet).build();
                allSets.add(concatenatedSet);
            }
        }
    }

    return allSets;
}

From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java

private Set<Set<Set<Signal>>> getTailCombinations(Set<Signal> sigs) {
    try {//from ww w  . j a va 2s . c  o  m
        Set<Set<Set<Signal>>> retVal = new HashSet<>();
        for (Set<Signal> s : Sets.powerSet(sigs)) {
            if (s.isEmpty()) {
                continue;
            }
            SetView<Signal> diff = Sets.difference(sigs, s);
            if (diff.isEmpty()) {
                Set<Set<Signal>> tmp = new HashSet<>();
                tmp.add(s);
                retVal.add(tmp);
            } else {
                Set<Set<Set<Signal>>> tail = getTailCombinations(diff);
                for (Set<Set<Signal>> x : tail) {
                    Set<Set<Signal>> tmp = new HashSet<>();
                    tmp.add(s);
                    for (Set<Signal> s2 : x) {
                        tmp.add(s2);
                    }
                    retVal.add(tmp);
                }
            }
        }
        return retVal;
    } catch (IllegalArgumentException e) {
        logger.error(e.getLocalizedMessage());
        return null;
    }
}

From source file:org.esa.snap.classification.gpf.BaseClassifier.java

private void runFeaturePowerSet(final Operator operator, final LabeledInstances allLabeledInstances,
        final FeatureInfo[] completeFeatureInfoList, final ProgressMonitor opPM) {
    final StatusProgressMonitor pm = new StatusProgressMonitor(StatusProgressMonitor.TYPE.SUBTASK);

    try {//from ww w  .  j a  v  a 2s .co m
        // get the power set of all features
        Set<Set<FeatureInfo>> featurePowerSet = Sets
                .powerSet(ImmutableSet.copyOf(Arrays.asList(completeFeatureInfoList)));
        List<Set<FeatureInfo>> featureSetList = new ArrayList<>();
        for (Set<FeatureInfo> featureSet : featurePowerSet) {
            if (featureSet.size() < minPowerSetSize || featureSet.size() > maxPowerSetSize)
                continue;
            featureSetList.add(featureSet);
        }
        pm.beginTask("Evaluating feature power set", featureSetList.size());

        int cnt = 1;
        for (Set<FeatureInfo> featureSet : featureSetList) {
            if (opPM.isCanceled()) {
                break;
            }

            final FeatureInfo[] featureInfos = featureSet.toArray(new FeatureInfo[featureSet.size()]);

            Classifier setClassifier = createMLClassifier(featureInfos);

            // create subset of labeledInstances
            // LabeledInstances subsetLabeledInstances = createSubsetLabeledInstances(featureInfos, allLabeledInstances);

            final LabeledInstances allLabeledInstances2 = getLabeledInstances(operator,
                    params.numTrainSamples * 2, featureInfoList);

            trainClassifier(setClassifier, getClassifierName() + '.' + cnt, allLabeledInstances2, featureInfos,
                    true);
            ++cnt;
            pm.worked(1);
        }

        classifierReport.setTopClassifier("TOP Classifier = " + topClassifierName + " at "
                + String.format("%-6.2f", topClassifierPercent * 100) + '%');

        if (topFeatureInfoList != null) {
            featureInfoList = topFeatureInfoList;

            mlClassifier = createMLClassifier(featureInfoList);

            // create subset of labeledInstances
            //LabeledInstances subsetLabeledInstances = createSubsetLabeledInstances(featureInfoList, allLabeledInstances);

            final LabeledInstances allLabeledInstances2 = getLabeledInstances(operator,
                    params.numTrainSamples * 2, featureInfoList);

            Dataset trainDataset = trainClassifier(mlClassifier, getClassifierName(), allLabeledInstances2,
                    featureInfoList, false);

            saveClassifier(trainDataset);

            classifierTrained = true;
        }

    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        pm.done();
    }
}