Example usage for com.google.common.collect Sets cartesianProduct

List of usage examples for com.google.common.collect Sets cartesianProduct

Introduction

In this page you can find the example usage for com.google.common.collect Sets cartesianProduct.

Prototype

public static <B> Set<List<B>> cartesianProduct(Set<? extends B>... sets) 

Source Link

Document

Returns every possible list that can be formed by choosing one element from each of the given sets in order; the "n-ary <a href="http://en.wikipedia.org/wiki/Cartesian_product">Cartesian product</a>" of the sets.

Usage

From source file:com.qubole.quark.planner.QuarkCube.java

public static <E, T extends Set<E>> Set<Set<E>> cartesian(List<Set<T>> list) {
    final Set<List<T>> cartesianSet = Sets.cartesianProduct(list);
    return Sets.newHashSet(Iterables.transform(cartesianSet, new Function<List<T>, Set<E>>() {
        public Set<E> apply(List<T> l) {
            return Sets.newHashSet(Iterables.concat(l));
        }//from w  w  w.  ja va 2  s .co m
    }));
}

From source file:io.prestosql.plugin.cassandra.NativeCassandraSession.java

private Iterable<Row> queryPartitionKeysLegacyWithMultipleQueries(CassandraTable table,
        List<Set<Object>> filterPrefixes) {
    CassandraTableHandle tableHandle = table.getTableHandle();
    List<CassandraColumnHandle> partitionKeyColumns = table.getPartitionKeyColumns();

    Set<List<Object>> filterCombinations = Sets.cartesianProduct(filterPrefixes);

    ImmutableList.Builder<Row> rowList = ImmutableList.builder();
    for (List<Object> combination : filterCombinations) {
        Select partitionKeys = CassandraCqlUtils.selectDistinctFrom(tableHandle, partitionKeyColumns);
        addWhereClause(partitionKeys.where(), partitionKeyColumns, combination);

        List<Row> resultRows = execute(partitionKeys).all();
        if (resultRows != null && !resultRows.isEmpty()) {
            rowList.addAll(resultRows);/*ww w .  j  av  a2 s .  co m*/
        }
    }

    return rowList.build();
}

From source file:hudson.matrix.MatrixProject.java

/**
 * Rebuilds the {@link #configurations} list and {@link #activeConfigurations}.
 *
 * @param context//from   w  ww.j  a  v  a  2 s .  com
 *      We rebuild configurations right before a build, to allow configurations to be adjusted for the build.
 *      (think of it as reconfiguring a project right before a build.) And when that happens, this value is the
 *      build in progress. Otherwise this value is null (for example, when Jenkins is booting up.)
 */
/*package*/ Set<MatrixConfiguration> rebuildConfigurations(MatrixBuildExecution context) throws IOException {
    {
        // backward compatibility check to see if there's any data in the old structure
        // if so, bring them to the newer structure.
        File[] oldDirs = getConfigurationsDir().listFiles(new FileFilter() {
            public boolean accept(File child) {
                return child.isDirectory() && !child.getName().startsWith("axis-");
            }
        });
        if (oldDirs != null) {
            // rename the old directory to the new one
            for (File dir : oldDirs) {
                try {
                    Combination c = Combination.fromString(dir.getName());
                    dir.renameTo(getRootDirFor(c));
                } catch (IllegalArgumentException e) {
                    // it's not a configuration dir. Just ignore.
                }
            }
        }
    }

    CopyOnWriteMap.Tree<Combination, MatrixConfiguration> configurations = new CopyOnWriteMap.Tree<Combination, MatrixConfiguration>();
    loadConfigurations(getConfigurationsDir(), configurations, Collections.<String, String>emptyMap());
    this.configurations = configurations;

    Iterable<Combination> activeCombinations;
    if (context != null) {
        List<Set<String>> axesList = Lists.newArrayList();
        for (Axis axis : axes)
            axesList.add(Sets.newLinkedHashSet(axis.rebuild(context)));

        activeCombinations = Iterables.transform(Sets.cartesianProduct(axesList),
                new Function<List<String>, Combination>() {
                    public Combination apply(@Nullable List<String> strings) {
                        assert strings != null;
                        return new Combination(axes, (String[]) strings.toArray(new String[0]));
                    }
                });
    } else {
        activeCombinations = axes.list();
    }

    // find all active configurations
    final Set<MatrixConfiguration> active = new LinkedHashSet<MatrixConfiguration>();
    final boolean isDynamicFilter = isDynamicFilter(getCombinationFilter());

    for (Combination c : activeCombinations) {
        if (isDynamicFilter || c.evalGroovyExpression(axes, getCombinationFilter())) {
            LOGGER.fine("Adding configuration: " + c);
            MatrixConfiguration config = configurations.get(c);
            if (config == null) {
                config = new MatrixConfiguration(this, c);
                config.onCreatedFromScratch();
                config.save();
                configurations.put(config.getCombination(), config);
            }
            active.add(config);
        }
    }
    this.activeConfigurations = active;

    return active;
}

From source file:org.apache.flink.compiler.dag.TwoInputNode.java

protected void instantiate(OperatorDescriptorDual operator, Channel in1, Channel in2,
        List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator,
        RequestedGlobalProperties globPropsReq1, RequestedGlobalProperties globPropsReq2,
        RequestedLocalProperties locPropsReq1, RequestedLocalProperties locPropsReq2) {
    final PlanNode inputSource1 = in1.getSource();
    final PlanNode inputSource2 = in2.getSource();

    for (List<NamedChannel> broadcastChannelsCombination : Sets.cartesianProduct(broadcastPlanChannels)) {

        boolean validCombination = true;

        // check whether the broadcast inputs use the same plan candidate at the branching point
        for (int i = 0; i < broadcastChannelsCombination.size(); i++) {
            NamedChannel nc = broadcastChannelsCombination.get(i);
            PlanNode bcSource = nc.getSource();

            if (!(areBranchCompatible(bcSource, inputSource1) || areBranchCompatible(bcSource, inputSource2))) {
                validCombination = false;
                break;
            }//from   w w  w.ja  v a 2 s.com

            // check branch compatibility against all other broadcast variables
            for (int k = 0; k < i; k++) {
                PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource();

                if (!areBranchCompatible(bcSource, otherBcSource)) {
                    validCombination = false;
                    break;
                }
            }
        }

        if (!validCombination) {
            continue;
        }

        placePipelineBreakersIfNecessary(operator.getStrategy(), in1, in2);

        DualInputPlanNode node = operator.instantiate(in1, in2, this);
        node.setBroadcastInputs(broadcastChannelsCombination);

        GlobalProperties gp1 = in1.getGlobalProperties().clone().filterByNodesConstantSet(this, 0);
        GlobalProperties gp2 = in2.getGlobalProperties().clone().filterByNodesConstantSet(this, 1);
        GlobalProperties combined = operator.computeGlobalProperties(gp1, gp2);

        LocalProperties lp1 = in1.getLocalProperties().clone().filterByNodesConstantSet(this, 0);
        LocalProperties lp2 = in2.getLocalProperties().clone().filterByNodesConstantSet(this, 1);
        LocalProperties locals = operator.computeLocalProperties(lp1, lp2);

        node.initProperties(combined, locals);
        node.updatePropertiesWithUniqueSets(getUniqueFields());
        target.add(node);
    }
}

From source file:org.apache.flink.optimizer.dag.TwoInputNode.java

protected void instantiate(OperatorDescriptorDual operator, Channel in1, Channel in2,
        List<Set<? extends NamedChannel>> broadcastPlanChannels, List<PlanNode> target, CostEstimator estimator,
        RequestedGlobalProperties globPropsReq1, RequestedGlobalProperties globPropsReq2,
        RequestedLocalProperties locPropsReq1, RequestedLocalProperties locPropsReq2) {
    final PlanNode inputSource1 = in1.getSource();
    final PlanNode inputSource2 = in2.getSource();

    for (List<NamedChannel> broadcastChannelsCombination : Sets.cartesianProduct(broadcastPlanChannels)) {

        boolean validCombination = true;

        // check whether the broadcast inputs use the same plan candidate at the branching point
        for (int i = 0; i < broadcastChannelsCombination.size(); i++) {
            NamedChannel nc = broadcastChannelsCombination.get(i);
            PlanNode bcSource = nc.getSource();

            if (!(areBranchCompatible(bcSource, inputSource1) || areBranchCompatible(bcSource, inputSource2))) {
                validCombination = false;
                break;
            }/*w  w w .  ja  v a2 s.  c  o  m*/

            // check branch compatibility against all other broadcast variables
            for (int k = 0; k < i; k++) {
                PlanNode otherBcSource = broadcastChannelsCombination.get(k).getSource();

                if (!areBranchCompatible(bcSource, otherBcSource)) {
                    validCombination = false;
                    break;
                }
            }
        }

        if (!validCombination) {
            continue;
        }

        placePipelineBreakersIfNecessary(operator.getStrategy(), in1, in2);

        DualInputPlanNode node = operator.instantiate(in1, in2, this);
        node.setBroadcastInputs(broadcastChannelsCombination);

        SemanticProperties semPropsGlobalPropFiltering = getSemanticPropertiesForGlobalPropertyFiltering();
        GlobalProperties gp1 = in1.getGlobalProperties().clone()
                .filterBySemanticProperties(semPropsGlobalPropFiltering, 0);
        GlobalProperties gp2 = in2.getGlobalProperties().clone()
                .filterBySemanticProperties(semPropsGlobalPropFiltering, 1);
        GlobalProperties combined = operator.computeGlobalProperties(gp1, gp2);

        SemanticProperties semPropsLocalPropFiltering = getSemanticPropertiesForLocalPropertyFiltering();
        LocalProperties lp1 = in1.getLocalProperties().clone()
                .filterBySemanticProperties(semPropsLocalPropFiltering, 0);
        LocalProperties lp2 = in2.getLocalProperties().clone()
                .filterBySemanticProperties(semPropsLocalPropFiltering, 1);
        LocalProperties locals = operator.computeLocalProperties(lp1, lp2);

        node.initProperties(combined, locals);
        node.updatePropertiesWithUniqueSets(getUniqueFields());
        target.add(node);
    }
}

From source file:eu.aniketos.scpm.impl.CompositionPlanner.java

private List<ICompositionPlan> createCompositionPlans(ICompositionPlan serviceSpecification,
        Map<String, Set<Service>> mapTaskServices) {

    //Creation of several composition plans combining the service locations

    if (logger.isDebugEnabled()) {
        logger.debug("Creating composition Plans for process " + serviceSpecification.getCompositionPlanID()); //$NON-NLS-1$
    }/* w w  w. j ava2 s . co  m*/

    Set<String> taskIds = mapTaskServices.keySet();

    Object[] taskIdsArray = (Object[]) taskIds.toArray();

    LinkedList<ICompositionPlan> listCompositionPlan = new LinkedList<ICompositionPlan>();

    XMLOutputter xmlOutput = new XMLOutputter();
    xmlOutput.setFormat(Format.getPrettyFormat());

    Iterator<String> itTaskIds = taskIds.iterator();

    List<Set<Service>> list = new LinkedList<Set<Service>>();
    while (itTaskIds.hasNext()) {
        String taskId = itTaskIds.next();
        Set<Service> services = mapTaskServices.get(taskId);
        Set<String> locations = new HashSet<String>();
        for (Service service : services) {
            locations.add(service.getLocation());
        }
        list.add(services);
    }

    Set<List<Service>> set = Sets.cartesianProduct(list);
    Object[] setArray = (Object[]) set.toArray();

    for (int j = 0; j < setArray.length; j++) {
        Document newDocument = BPMNParser.getDocument(serviceSpecification.getBPMNXML());

        //   List<String> locationsParsed = new Vector<String>();

        @SuppressWarnings("unchecked")
        List<Object> listS = (List<Object>) setArray[j];
        for (int i = taskIdsArray.length - 1; i >= 0; i--) {
            String taskId = (String) taskIdsArray[i];
            //            String taskId = serviceIds.get(i);
            Service service = (Service) listS.get(i);
            String location = service.getLocation();
            //            if(!locationsParsed.contains(location)){
            //               String portTypeName = WsdlParser.getInstance().getPortTypeName(location);
            //               Element interfaceElement = XMLParser.addPortTypeName(newDocument, portTypeName);
            //               Map<String,List<String>> hashOperationsData = WsdlParser.getInstance().getOperationsData(location);
            //               Set<String> setOperations = hashOperationsData.keySet();
            //               for(String operation : setOperations){
            //                  List<String> listInputOutput = hashOperationsData.get(operation);
            //                  XMLParser.addOperation(newDocument, interfaceElement, portTypeName, operation, listInputOutput);
            //               }
            //               locationsParsed.add(location);
            //            }

            String serviceId = service.getServiceId();
            logger.debug("ServiceId: " + serviceId);

            String provider = service.getProvider();

            BPMNParser.addServiceId(newDocument, taskId, serviceId);
            BPMNParser.addLocationField(newDocument, taskId, location);
            BPMNParser.addProviderField(newDocument, taskId, provider);
        }
        int c = j + 1;
        BPMNParser.addProcessId(newDocument, "compositionPlan" + c);
        logger.debug("Created compositon plan" + c);

        String specification = xmlOutput.outputString(newDocument);
        ICompositionPlan compositionPlan = new CompositionPlan(specification);
        compositionPlan.setCompositionPlanID("compositionPlan" + c);

        //Interaction with the composition planner

        listCompositionPlan.add(compositionPlan);
        logger.debug("compositionPlan " + compositionPlan.getBPMNXML());

    }

    logger.info("Created " + listCompositionPlan.size() + " Composition Plans");

    return listCompositionPlan;

}

From source file:io.datakernel.aggregation_db.AggregationMetadata.java

private List<List<Object>> createKeyPrefixes(List<Set<Object>> betweenSets, List<Object> equalsList,
        boolean[] betweenPredicatePositions, int numberOfPredicateKeys) {
    List<List<Object>> keyPrefixes = new ArrayList<>();

    Set<List<Object>> cartesianProduct = Sets.cartesianProduct(betweenSets);

    for (List<Object> list : cartesianProduct) {
        List<Object> keyPrefix = newArrayList();
        int betweenListPosition = 0;
        int equalsListPosition = 0;

        for (int k = 0; k < numberOfPredicateKeys; ++k) {
            boolean betweenPosition = betweenPredicatePositions[k];
            if (betweenPosition)
                keyPrefix.add(list.get(betweenListPosition++));
            else// www.j  av a  2s .co m
                keyPrefix.add(equalsList.get(equalsListPosition++));
        }

        keyPrefixes.add(keyPrefix);
    }

    return keyPrefixes;
}

From source file:grakn.core.graql.reasoner.atom.binary.RelationAtom.java

/**
 * @param parentAtom reference atom defining the mapping
 * @param unifierType type of match to be performed
 * @return set of possible COMPLETE mappings between this (child) and parent relation players
 *//*from www  .  j ava  2 s  .  c  o  m*/
private Set<List<Pair<RelationProperty.RolePlayer, RelationProperty.RolePlayer>>> getRelationPlayerMappings(
        RelationAtom parentAtom, UnifierType unifierType) {
    SetMultimap<Variable, Type> childVarTypeMap = this.getParentQuery().getVarTypeMap(unifierType.inferTypes());
    SetMultimap<Variable, Type> parentVarTypeMap = parentAtom.getParentQuery()
            .getVarTypeMap(unifierType.inferTypes());

    //establish compatible castings for each parent casting
    List<Set<Pair<RelationProperty.RolePlayer, RelationProperty.RolePlayer>>> compatibleMappingsPerParentRP = new ArrayList<>();
    if (parentAtom.getRelationPlayers().size() > this.getRelationPlayers().size())
        return new HashSet<>();

    ReasonerQuery childQuery = getParentQuery();
    parentAtom.getRelationPlayers().forEach(prp -> {
        Statement parentRolePattern = prp.getRole().orElse(null);
        if (parentRolePattern == null) {
            throw GraqlQueryException.rolePatternAbsent(parentAtom);
        }
        String parentRoleLabel = parentRolePattern.getType().isPresent() ? parentRolePattern.getType().get()
                : null;
        Role parentRole = parentRoleLabel != null ? tx().getRole(parentRoleLabel) : null;
        Variable parentRolePlayer = prp.getPlayer().var();
        Set<Type> parentTypes = parentVarTypeMap.get(parentRolePlayer);

        Set<RelationProperty.RolePlayer> compatibleRelationPlayers = new HashSet<>();
        this.getRelationPlayers().stream()
                //check for role compatibility
                .filter(crp -> {
                    Statement childRolePattern = crp.getRole().orElse(null);
                    if (childRolePattern == null) {
                        throw GraqlQueryException.rolePatternAbsent(this);
                    }
                    String childRoleLabel = childRolePattern.getType().isPresent()
                            ? childRolePattern.getType().get()
                            : null;
                    Role childRole = childRoleLabel != null ? tx().getRole(childRoleLabel) : null;

                    boolean varCompatibility = unifierType.equivalence() == null
                            || parentRolePattern.var().isReturned() == childRolePattern.var().isReturned();
                    return varCompatibility && unifierType.roleCompatibility(parentRole, childRole);
                })
                //check for inter-type compatibility
                .filter(crp -> {
                    Variable childVar = crp.getPlayer().var();
                    Set<Type> childTypes = childVarTypeMap.get(childVar);
                    return unifierType.typeCompatibility(parentTypes, childTypes)
                            && parentTypes.stream().allMatch(parentType -> unifierType
                                    .typePlayability(childQuery, childVar, parentType));
                })
                //check for substitution compatibility
                .filter(crp -> {
                    Set<Atomic> parentIds = parentAtom.getPredicates(prp.getPlayer().var(), IdPredicate.class)
                            .collect(toSet());
                    Set<Atomic> childIds = this.getPredicates(crp.getPlayer().var(), IdPredicate.class)
                            .collect(toSet());
                    return unifierType.idCompatibility(parentIds, childIds);
                })
                //check for value predicate compatibility
                .filter(crp -> {
                    Set<Atomic> parentVP = parentAtom.getPredicates(prp.getPlayer().var(), ValuePredicate.class)
                            .collect(toSet());
                    Set<Atomic> childVP = this.getPredicates(crp.getPlayer().var(), ValuePredicate.class)
                            .collect(toSet());
                    return unifierType.valueCompatibility(parentVP, childVP);
                })
                //check linked resources
                .filter(crp -> {
                    Variable parentVar = prp.getPlayer().var();
                    Variable childVar = crp.getPlayer().var();
                    return unifierType.attributeCompatibility(parentAtom.getParentQuery(),
                            this.getParentQuery(), parentVar, childVar);
                }).forEach(compatibleRelationPlayers::add);

        if (!compatibleRelationPlayers.isEmpty()) {
            compatibleMappingsPerParentRP.add(compatibleRelationPlayers.stream()
                    .map(crp -> new Pair<>(crp, prp)).collect(Collectors.toSet()));
        }
    });

    return Sets.cartesianProduct(compatibleMappingsPerParentRP).stream().filter(list -> !list.isEmpty())
            //check the same child rp is not mapped to multiple parent rps
            .filter(list -> {
                List<RelationProperty.RolePlayer> listChildRps = list.stream().map(Pair::getKey)
                        .collect(Collectors.toList());
                //NB: this preserves cardinality instead of removing all occurring instances which is what we want
                return ReasonerUtils.listDifference(listChildRps, this.getRelationPlayers()).isEmpty();
            })
            //check all parent rps mapped
            .filter(list -> {
                List<RelationProperty.RolePlayer> listParentRps = list.stream().map(Pair::getValue)
                        .collect(Collectors.toList());
                return listParentRps.containsAll(parentAtom.getRelationPlayers());
            }).collect(toSet());
}

From source file:org.jamocha.dn.compiler.pathblocks.PathBlocks.java

protected static void vertical(final UndirectedGraph<FilterInstance, ConflictEdge> graph,
        final Set<Set<FilterInstance>> filterInstancesGroupedByRule, final PathBlockSet resultBlocks) {
    final Set<Set<Set<FilterInstance>>> filterInstancesPowerSet = Sets.powerSet(filterInstancesGroupedByRule);
    final Iterator<Set<Set<FilterInstance>>> iterator = filterInstancesPowerSet.iterator();
    // skip empty set
    iterator.next();//w ww .  j a v  a  2  s. c  o  m
    while (iterator.hasNext()) {
        final Set<Set<FilterInstance>> powerSetElement = iterator.next();
        final Set<List<FilterInstance>> cartesianProduct = Sets
                .cartesianProduct(ImmutableList.copyOf(powerSetElement));
        for (final List<FilterInstance> filterInstances : cartesianProduct) {
            final Block newBlock = new Block(graph);
            newBlock.addFilterInstances(filterInstances.stream().collect(toMap(FilterInstance::getRuleOrProxy,
                    fi -> Collections.singleton(new FilterInstancesSideBySide(fi)))));
            horizontalRecursion(newBlock, new Stack<>(), resultBlocks);
        }
    }
}

From source file:com.facebook.presto.sql.analyzer.StatementAnalyzer.java

private List<List<FieldOrExpression>> analyzeGroupBy(QuerySpecification node, RelationType tupleDescriptor,
        AnalysisContext context, List<FieldOrExpression> outputExpressions) {
    List<Set<Set<Expression>>> enumeratedGroupingSets = node.getGroupBy().stream()
            .map(GroupingElement::enumerateGroupingSets).distinct().collect(toImmutableList());

    // compute cross product of enumerated grouping sets, if there are any
    List<List<Expression>> computedGroupingSets = ImmutableList.of();
    if (!enumeratedGroupingSets.isEmpty()) {
        computedGroupingSets = Sets
                .cartesianProduct(enumeratedGroupingSets).stream().map(groupingSetList -> groupingSetList
                        .stream().flatMap(Collection::stream).distinct().collect(toImmutableList()))
                .distinct().collect(toImmutableList());
    }//from   ww w . ja va2  s  . c  o  m

    // if there are aggregates, but no grouping columns, create a grand total grouping set
    if (computedGroupingSets.isEmpty() && !extractAggregates(node).isEmpty()) {
        computedGroupingSets = ImmutableList.of(ImmutableList.of());
    }

    if (computedGroupingSets.size() > 1) {
        throw new SemanticException(NOT_SUPPORTED, node,
                "Grouping by multiple sets of columns is not yet supported");
    }

    List<List<FieldOrExpression>> analyzedGroupingSets = computedGroupingSets.stream()
            .map(groupingSet -> analyzeGroupingColumns(groupingSet, node, tupleDescriptor, context,
                    outputExpressions))
            .collect(toImmutableList());

    analysis.setGroupingSets(node, analyzedGroupingSets);
    return analyzedGroupingSets;
}