Example usage for com.google.common.collect Sets union

List of usage examples for com.google.common.collect Sets union

Introduction

In this page you can find the example usage for com.google.common.collect Sets union.

Prototype

public static <E> SetView<E> union(final Set<? extends E> set1, final Set<? extends E> set2) 

Source Link

Document

Returns an unmodifiable view of the union of two sets.

Usage

From source file:org.sosy_lab.cpachecker.cfa.ast.c.CIdExpressionCollectingVisitor.java

@Override
public Set<CIdExpression> visit(CFunctionCallExpression pIastFunctionCallExpression) throws RuntimeException {
    Set<CIdExpression> result = Collections.emptySet();
    for (CExpression e : pIastFunctionCallExpression.getParameterExpressions()) {
        Sets.union(result, e.accept(this));
    }//ww w.jav  a 2 s.c  o  m
    return result;
}

From source file:com.hortonworks.streamline.streams.layout.component.TopologyDag.java

List<Component> topOrder() {
    Map<Component, State> state = new HashMap<>();
    List<Component> res = new ArrayList<>();
    for (Component component : Sets.union(inputComponents, outputComponents)) {
        if (state.get(component) != State.VISITED) {
            res.addAll(dfs(component, state));
        }/* ww  w .  j a  v a2  s. c  o m*/
    }
    Collections.reverse(res);
    return res;
}

From source file:msi.gaml.factories.ModelAssembler.java

public ModelDescription assemble(final String projectPath, final String modelPath,
        final Iterable<ISyntacticElement> allModels, final ValidationContext collector, final boolean document,
        final Map<String, ModelDescription> mm) {
    final ImmutableList<ISyntacticElement> models = ImmutableList.copyOf(allModels);
    final TOrderedHashMap<String, ISyntacticElement> speciesNodes = new TOrderedHashMap();
    final TOrderedHashMap<String, TOrderedHashMap<String, ISyntacticElement>>[] experimentNodes = new TOrderedHashMap[1];
    final ISyntacticElement globalNodes = SyntacticFactory.create(GLOBAL, (EObject) null, true);
    final ISyntacticElement source = models.get(0);
    Facets globalFacets = null;// www.  ja v a 2  s.co  m
    if (source.hasFacet(IKeyword.PRAGMA)) {
        final Facets facets = source.copyFacets(null);
        final List<String> pragmas = (List<String>) facets.get(IKeyword.PRAGMA).getExpression().getConstValue();
        collector.resetInfoAndWarning();
        if (pragmas != null) {
            if (pragmas.contains(IKeyword.NO_INFO)) {
                collector.setNoInfo();
            }
            if (pragmas.contains(IKeyword.NO_WARNING)) {
                collector.setNoWarning();
            }
            if (pragmas.contains(IKeyword.NO_EXPERIMENT)) {
                collector.setNoExperiment();
            }
        }

    }
    final Map<String, SpeciesDescription> tempSpeciesCache = new THashMap<>();

    for (final ISyntacticElement cm : models.reverse()) {
        final SyntacticModelElement currentModel = (SyntacticModelElement) cm;
        if (currentModel != null) {
            if (currentModel.hasFacets()) {
                if (globalFacets == null) {
                    globalFacets = new Facets(currentModel.copyFacets(null));
                } else {
                    globalFacets.putAll(currentModel.copyFacets(null));
                }
            }
            currentModel.visitChildren(element -> globalNodes.addChild(element));
            SyntacticVisitor visitor = element -> addSpeciesNode(element, speciesNodes, collector);
            currentModel.visitSpecies(visitor);

            // We input the species so that grids are always the last ones
            // (see DiffusionStatement)
            currentModel.visitGrids(visitor);
            visitor = element -> {
                if (experimentNodes[0] == null) {
                    experimentNodes[0] = new TOrderedHashMap();
                }
                addExperimentNode(element, currentModel.getName(), experimentNodes[0], collector);

            };
            currentModel.visitExperiments(visitor);

        }
    }

    final String modelName = buildModelName(source.getName());

    // We build a list of working paths from which the composite model will
    // be able to look for resources. These working paths come from the
    // imported models

    Set<String> absoluteAlternatePathAsStrings = models.isEmpty() ? null
            : ImmutableSet.copyOf(
                    Iterables.transform(models.reverse(), each -> ((SyntacticModelElement) each).getPath()));

    if (mm != null) {
        for (final ModelDescription m1 : mm.values()) {
            for (final String im : m1.getAlternatePaths()) {
                absoluteAlternatePathAsStrings = Sets.union(absoluteAlternatePathAsStrings,
                        Collections.singleton(im));
            }
        }
    }

    final ModelDescription model = new ModelDescription(modelName, null, projectPath, modelPath,
            source.getElement(), null, ModelDescription.ROOT, null, globalFacets, collector,
            absoluteAlternatePathAsStrings);

    final Collection<String> allModelNames = models.size() == 1 ? null
            : ImmutableSet.copyOf(
                    Iterables.transform(Iterables.skip(models, 1), each -> buildModelName(each.getName())));
    model.setImportedModelNames(allModelNames);
    model.isDocumenting(document);

    // hqnghi add micro-models
    if (mm != null) {
        // model.setMicroModels(mm);
        model.addChildren(mm.values());
    }
    // end-hqnghi
    // recursively add user-defined species to world and down on to the
    // hierarchy
    speciesNodes.forEachValue(speciesNode -> {
        addMicroSpecies(model, speciesNode, tempSpeciesCache);
        return true;
    });
    if (experimentNodes[0] != null) {
        experimentNodes[0].forEachEntry((s, b) -> {
            b.forEachValue(experimentNode -> {
                addExperiment(s, model, experimentNode, tempSpeciesCache);
                return true;
            });
            return true;
        });
    }

    // Parent the species and the experiments of the model (all are now
    // known).
    speciesNodes.forEachValue(speciesNode -> {
        parentSpecies(model, speciesNode, model, tempSpeciesCache);
        return true;
    });

    if (experimentNodes[0] != null) {
        experimentNodes[0].forEachEntry((s, b) -> {
            b.forEachValue(experimentNode -> {
                parentExperiment(model, experimentNode);
                return true;
            });
            return true;
        });
    }

    // Initialize the hierarchy of types
    model.buildTypes();
    // hqnghi build micro-models as types
    if (mm != null) {
        for (final Entry<String, ModelDescription> entry : mm.entrySet()) {
            model.getTypesManager().alias(entry.getValue().getName(), entry.getKey());
        }
        // end-hqnghi
    }

    // Make species and experiments recursively create their attributes,
    // actions....
    complementSpecies(model, globalNodes);

    speciesNodes.forEachValue(speciesNode -> {
        complementSpecies(model.getMicroSpecies(speciesNode.getName()), speciesNode);
        return true;
    });

    if (experimentNodes[0] != null) {
        experimentNodes[0].forEachEntry((s, b) -> {
            b.forEachValue(experimentNode -> {
                complementSpecies(model.getExperiment(experimentNode.getName()), experimentNode);
                return true;
            });
            return true;
        });
    }

    // Complement recursively the different species (incl. the world). The
    // recursion is hierarchical

    model.inheritFromParent();

    for (final SpeciesDescription sd : getSpeciesInHierarchicalOrder(model)) {
        sd.inheritFromParent();
        if (sd.isExperiment()) {
            if (!sd.finalizeDescription()) {
                return null;
            }
        }
    }

    // Issue #1708 (put before the finalization)
    if (model.hasFacet(SCHEDULES) || model.hasFacet(FREQUENCY)) {
        createSchedulerSpecies(model);
    }

    if (!model.finalizeDescription()) {
        return null;
    }

    if (document) {
        collector.document(model);
    }
    return model;

}

From source file:org.phoenicis.tools.files.FileUtilities.java

public Set<PosixFilePermission> intToPosixFilePermission(int mode) {
    if (mode >= 1000 || mode < 0) {
        throw new IllegalArgumentException("Invalid mode " + mode);
    }//from  w w  w  . j a va 2s .  c om

    final int owner = mode / 100;
    final int group = (mode - owner * 100) / 10;
    final int others = mode - owner * 100 - group * 10;

    if (owner > 7 || group > 7 || others > 7) {
        throw new IllegalArgumentException("Invalid mode " + mode);
    }

    return Sets.union(
            Sets.union(singleIntToFilePermission(owner, "OWNER"), singleIntToFilePermission(group, "GROUP")),
            singleIntToFilePermission(others, "OTHERS"));
}

From source file:org.apache.aurora.scheduler.resources.ResourceBag.java

/**
 * Applies {@code operator} to this and {@code other} resource values. Any missing resource type
 * values on either side will get substituted with 0.0 before applying the operator.
 *
 * @param other Other ResourceBag./*from   www . j  a  v  a 2  s. com*/
 * @param operator Operator to apply.
 * @return Operation result.
 */
private ResourceBag binaryOp(ResourceBag other, BinaryOperator<Double> operator) {
    ImmutableMap.Builder<ResourceType, Double> builder = ImmutableMap.builder();
    Set<ResourceType> resourceTypes = Sets.union(resourceVectors.keySet(), other.getResourceVectors().keySet());
    for (ResourceType type : resourceTypes) {
        Double left = resourceVectors.getOrDefault(type, 0.0);
        Double right = other.getResourceVectors().getOrDefault(type, 0.0);
        builder.put(type, operator.apply(left, right));
    }

    return new ResourceBag(builder.build());
}

From source file:com.opengamma.strata.measure.credit.CdsTradeCalculationFunction.java

@Override
public FunctionRequirements requirements(CdsTrade trade, Set<Measure> measures,
        CalculationParameters parameters, ReferenceData refData) {

    Cds cds = trade.getProduct();//from www . j  a  v a2s . co m

    Currency notionalCurrency = cds.getFeeLeg().getPeriodicPayments().getNotional().getCurrency();
    Currency feeCurrency = cds.getFeeLeg().getUpfrontFee().getCurrency();

    Set<MarketDataId<?>> rateCurveIds = ImmutableSet.of(IsdaYieldCurveInputsId.of(notionalCurrency),
            IsdaYieldCurveInputsId.of(feeCurrency));

    Set<Currency> currencies = ImmutableSet.of(notionalCurrency, feeCurrency);
    ReferenceInformation refInfo = cds.getReferenceInformation();
    if (refInfo instanceof SingleNameReferenceInformation) {
        SingleNameReferenceInformation singleNameRefInfo = (SingleNameReferenceInformation) refInfo;
        Set<MarketDataId<?>> keys = ImmutableSet.of(IsdaSingleNameCreditCurveInputsId.of(singleNameRefInfo),
                IsdaSingleNameRecoveryRateId.of(singleNameRefInfo));
        return FunctionRequirements.builder().valueRequirements(Sets.union(rateCurveIds, keys))
                .outputCurrencies(currencies).build();

    } else if (refInfo instanceof IndexReferenceInformation) {
        IndexReferenceInformation indexRefInfo = (IndexReferenceInformation) refInfo;
        Set<MarketDataId<?>> keys = ImmutableSet.of(IsdaIndexCreditCurveInputsId.of(indexRefInfo),
                IsdaIndexRecoveryRateId.of(indexRefInfo));
        return FunctionRequirements.builder().valueRequirements(Sets.union(rateCurveIds, keys))
                .outputCurrencies(currencies).build();

    } else {
        throw new IllegalStateException("Unknown reference information type: " + refInfo.getType());
    }
}

From source file:org.geogit.api.plumbing.diff.DiffCounter.java

private DiffObjectCount countBucketsChildren(ImmutableSortedMap<Integer, Bucket> buckets,
        Iterator<Node> children, final NodeStorageOrder refOrder, final int depth) {

    final SortedSetMultimap<Integer, Node> treesByBucket;
    final SortedSetMultimap<Integer, Node> featuresByBucket;
    {//from  w w w. j  a v  a  2  s .  c o  m
        treesByBucket = TreeMultimap.create(Ordering.natural(), refOrder); // make sure values
                                                                           // are sorted
                                                                           // according to
                                                                           // refOrder
        featuresByBucket = TreeMultimap.create(Ordering.natural(), refOrder);// make sure values
                                                                             // are sorted
                                                                             // according to
                                                                             // refOrder
        while (children.hasNext()) {
            Node ref = children.next();
            Integer bucket = refOrder.bucket(ref, depth);
            if (ref.getType().equals(TYPE.TREE)) {
                treesByBucket.put(bucket, ref);
            } else {
                featuresByBucket.put(bucket, ref);
            }
        }
    }

    DiffObjectCount count = new DiffObjectCount();

    {// count full size of all buckets for which no children falls into
        final Set<Integer> loneleyBuckets = Sets.difference(buckets.keySet(),
                Sets.union(featuresByBucket.keySet(), treesByBucket.keySet()));

        for (Integer bucket : loneleyBuckets) {
            ObjectId bucketId = buckets.get(bucket).id();
            count.add(sizeOfTree(bucketId));
        }
    }
    {// count the full size of all children whose buckets don't exist on the buckets tree
        for (Integer bucket : Sets.difference(featuresByBucket.keySet(), buckets.keySet())) {
            SortedSet<Node> refs = featuresByBucket.get(bucket);
            count.addFeatures(refs.size());
        }

        for (Integer bucket : Sets.difference(treesByBucket.keySet(), buckets.keySet())) {
            SortedSet<Node> refs = treesByBucket.get(bucket);
            count.add(aggregateSize(refs));
        }
    }

    // find the number of diffs of the intersection
    final Set<Integer> commonBuckets = Sets.intersection(buckets.keySet(),
            Sets.union(featuresByBucket.keySet(), treesByBucket.keySet()));
    for (Integer bucket : commonBuckets) {

        Iterator<Node> refs = Iterators.concat(treesByBucket.get(bucket).iterator(),
                featuresByBucket.get(bucket).iterator());

        final ObjectId bucketId = buckets.get(bucket).id();
        final RevTree bucketTree = getTree(bucketId);

        if (bucketTree.isEmpty()) {
            // unlikely
            count.add(aggregateSize(refs));
        } else if (!bucketTree.buckets().isPresent()) {
            count.add(countChildrenDiffs(bucketTree.children(), refs));
        } else {
            final int deeperBucketsDepth = depth + 1;
            final ImmutableSortedMap<Integer, Bucket> deeperBuckets;
            deeperBuckets = bucketTree.buckets().get();
            count.add(countBucketsChildren(deeperBuckets, refs, refOrder, deeperBucketsDepth));
        }
    }

    return count;
}

From source file:org.apache.beam.runners.fnexecution.graph.LengthPrefixUnknownCoders.java

private static MessageWithComponents createLengthPrefixByteArrayCoder(String coderId,
        RunnerApi.Components components) {
    MessageWithComponents.Builder rvalBuilder = MessageWithComponents.newBuilder();

    String byteArrayCoderId = generateUniqueId(coderId + "-byte_array", Sets
            .union(components.getCodersMap().keySet(), rvalBuilder.getComponents().getCodersMap().keySet()));
    Coder.Builder byteArrayCoder = Coder.newBuilder();
    byteArrayCoder.getSpecBuilder().getSpecBuilder().setUrn(BYTES_CODER_TYPE);
    rvalBuilder.getComponentsBuilder().putCoders(byteArrayCoderId, byteArrayCoder.build());
    rvalBuilder.getCoderBuilder().addComponentCoderIds(byteArrayCoderId).getSpecBuilder().getSpecBuilder()
            .setUrn(LENGTH_PREFIX_CODER_TYPE);

    return rvalBuilder.build();
}

From source file:com.cloudera.exhibit.etl.tbl.SumTopTbl.java

@Override
public GenericData.Record merge(GenericData.Record current, GenericData.Record next) {
    if (current == null) {
        return next;
    }/*from   w  ww.java2s  .com*/
    Map<String, GenericData.Record> curValue = (Map<String, GenericData.Record>) current.get("value");
    Map<String, GenericData.Record> nextValue = (Map<String, GenericData.Record>) next.get("value");
    Schema vschema = intermediate.getField("value").schema().getValueType();
    for (String key : Sets.union(curValue.keySet(), nextValue.keySet())) {
        GenericData.Record sum = (GenericData.Record) SumTbl.add(curValue.get(key), nextValue.get(key),
                vschema);
        current.put(key, sum);
    }
    return current;
}

From source file:com.facebook.buck.haskell.HaskellLibraryDescription.java

private Archive requireStaticLibrary(BuildTarget baseTarget, BuildRuleParams baseParams,
        BuildRuleResolver resolver, SourcePathResolver pathResolver, SourcePathRuleFinder ruleFinder,
        CxxPlatform cxxPlatform, Arg args, Linker.LinkableDepType depType) throws NoSuchBuildTargetException {
    Preconditions.checkArgument(Sets/*from   w  w  w .j  a va 2  s.  c  o m*/
            .intersection(baseTarget.getFlavors(), Sets.union(Type.FLAVOR_VALUES, cxxPlatforms.getFlavors()))
            .isEmpty());
    BuildTarget target = baseTarget.withAppendedFlavors(
            depType == Linker.LinkableDepType.STATIC ? Type.STATIC.getFlavor() : Type.STATIC_PIC.getFlavor(),
            cxxPlatform.getFlavor());
    Optional<Archive> archive = resolver.getRuleOptionalWithType(target, Archive.class);
    if (archive.isPresent()) {
        return archive.get();
    }
    return resolver.addToIndex(createStaticLibrary(target, baseParams, resolver, pathResolver, ruleFinder,
            cxxPlatform, args, depType));
}