Example usage for com.google.common.collect Sets union

List of usage examples for com.google.common.collect Sets union

Introduction

In this page you can find the example usage for com.google.common.collect Sets union.

Prototype

public static <E> SetView<E> union(final Set<? extends E> set1, final Set<? extends E> set2) 

Source Link

Document

Returns an unmodifiable view of the union of two sets.

Usage

From source file:gobblin.data.management.policy.CombineSelectionPolicy.java

private static Set<DatasetVersion> unionDatasetVersions(Collection<Set<DatasetVersion>> sets) {
    if (sets.size() <= 0) {
        return Sets.newHashSet();
    }/*from w w w.  j  ava 2s .  c o  m*/
    Iterator<Set<DatasetVersion>> it = sets.iterator();
    Set<DatasetVersion> outputSet = it.next();
    while (it.hasNext()) {
        outputSet = Sets.union(outputSet, it.next());
    }
    return outputSet;
}

From source file:org.caleydo.view.domino.api.model.typed.TypedSet.java

private static int or(BitSetSet a, Set<Integer> b) {
    if (b instanceof BitSetSet) {
        BitSetSet s = BitSetSet.or(a, (BitSetSet) b);
        return s.size();
    }// w  ww.ja va  2  s . c  o  m
    return Sets.union(b, a).size(); // as the predicate is: in the second argument
}

From source file:org.gradle.api.internal.resolve.JvmLocalLibraryMetaDataAdapter.java

private void createJvmAssemblyLocalComponentMetaData(EnumMap<UsageKind, List<PublishArtifact>> artifacts,
        JvmAssembly assembly, EnumMap<UsageKind, Iterable<DependencySpec>> dependenciesPerUsage,
        EnumMap<UsageKind, TaskDependency> buildDependenciesPerUsage, boolean toAssembly) {
    configureUsageMetadata(UsageKind.API, toAssembly ? assembly : null, Collections.<DependencySpec>emptyList(),
            dependenciesPerUsage, buildDependenciesPerUsage);
    configureUsageMetadata(UsageKind.RUNTIME, toAssembly ? assembly : null,
            Collections.<DependencySpec>emptyList(), dependenciesPerUsage, buildDependenciesPerUsage);
    if (toAssembly) {
        // TODO:Cedric This is an approximation: when a component wants to compile against the assembly of
        // a library (not the jar), then we should give it the *stubbed classes* instead of the raw classes. However:
        // - there's no such thing as a "stubbed classes assembly"
        // - for performance reasons only the classes that belong to the API are stubbed, so we would miss the classes that do not belong to the API
        // So this makes the UsageKind.API misleading (should this be COMPILE?).
        addArtifact(UsageKind.API, assembly.getClassDirectories(), artifacts);
        addArtifact(UsageKind.RUNTIME,//from  w  ww.  j a  v a 2 s.  co m
                Sets.union(assembly.getClassDirectories(), assembly.getResourceDirectories()), artifacts);
    }
}

From source file:gobblin.source.extractor.extract.QueryBasedSource.java

@Override
public List<WorkUnit> getWorkunits(SourceState state) {
    initLogger(state);/*from   w  ww  . jav  a 2s  . c  o m*/

    List<WorkUnit> workUnits = Lists.newArrayList();

    // Map<String, String> tableNameToEntityMap = Maps.newHashMap();
    Set<SourceEntity> entities = getFilteredSourceEntities(state);

    Map<SourceEntity, State> tableSpecificPropsMap = shouldObtainTablePropsFromConfigStore(state)
            ? getTableSpecificPropsFromConfigStore(entities, state)
            : getTableSpecificPropsFromState(entities, state);
    Map<SourceEntity, Long> prevWatermarksByTable = getPreviousWatermarksForAllTables(state);

    for (SourceEntity sourceEntity : Sets.union(entities, prevWatermarksByTable.keySet())) {

        log.info("Source entity to be processed: {}, carry-over from previous state: {} ", sourceEntity,
                !entities.contains(sourceEntity));

        SourceState combinedState = getCombinedState(state, tableSpecificPropsMap.get(sourceEntity));
        long previousWatermark = prevWatermarksByTable.containsKey(sourceEntity)
                ? prevWatermarksByTable.get(sourceEntity)
                : ConfigurationKeys.DEFAULT_WATERMARK_VALUE;

        // If a table name exists in prevWatermarksByTable (i.e., it has a previous watermark) but does not exist
        // in talbeNameToEntityMap, create an empty workunit for it, so that its previous watermark is preserved.
        // This is done by overriding the high watermark to be the same as the previous watermark.
        if (!entities.contains(sourceEntity)) {
            combinedState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, previousWatermark);
        }

        workUnits.addAll(generateWorkUnits(sourceEntity, state, previousWatermark));
    }

    log.info("Total number of workunits for the current run: " + workUnits.size());
    List<WorkUnit> previousWorkUnits = this.getPreviousWorkUnitsForRetry(state);
    log.info("Total number of incomplete tasks from the previous run: " + previousWorkUnits.size());
    workUnits.addAll(previousWorkUnits);

    int numOfMultiWorkunits = state.getPropAsInt(ConfigurationKeys.MR_JOB_MAX_MAPPERS_KEY,
            ConfigurationKeys.DEFAULT_MR_JOB_MAX_MAPPERS);

    return pack(workUnits, numOfMultiWorkunits);
}

From source file:com.facebook.buck.android.relinker.RelinkerRule.java

@Override
public ImmutableList<Step> getBuildSteps(BuildContext context, final BuildableContext buildableContext) {

    final ImmutableList.Builder<Step> relinkerSteps = ImmutableList.builder();
    if (isRelinkable) {
        ImmutableList<Arg> args = ImmutableList.<Arg>builder().addAll(linkerArgs)
                .add(new StringArg("-Wl,--version-script=" + getRelativeVersionFilePath().toString())).build();

        relinkerSteps.addAll(new CxxLink(
                buildRuleParams.withFlavor(ImmutableFlavor.of("cxx-link")).withoutFlavor(
                        LinkerMapMode.NO_LINKER_MAP.getFlavor()),
                context.getSourcePathResolver(), linker, getLibFilePath(), args,
                cxxBuckConfig.getLinkScheduleInfo(), cxxBuckConfig.shouldCacheLinks()).getBuildSteps(context,
                        buildableContext));
        buildableContext.recordArtifact(getRelativeVersionFilePath());
    }/*w  w  w .j av  a 2  s.  c  o  m*/

    buildableContext.recordArtifact(getSymbolsNeededOutPath());

    return ImmutableList.of(new MakeCleanDirectoryStep(getProjectFilesystem(), getScratchDirPath()),
            new AbstractExecutionStep("xdso-dce relinker") {
                @Override
                public StepExecutionResult execute(ExecutionContext context)
                        throws IOException, InterruptedException {
                    ImmutableSet<String> symbolsNeeded = readSymbolsNeeded();
                    if (!isRelinkable) {
                        getProjectFilesystem().copyFile(getBaseLibPath(), getLibFilePath());
                        buildableContext.recordArtifact(getLibFilePath());
                    } else {
                        writeVersionScript(context.getProcessExecutor(), symbolsNeeded);
                        for (Step s : relinkerSteps.build()) {
                            StepExecutionResult executionResult = s.execute(context);
                            if (!executionResult.isSuccess()) {
                                return StepExecutionResult.ERROR;
                            }
                        }
                    }
                    writeSymbols(getSymbolsNeededOutPath(), Sets.union(symbolsNeeded,
                            getSymbols(context.getProcessExecutor(), getLibFilePath()).undefined));
                    return StepExecutionResult.SUCCESS;
                }
            });
}

From source file:com.siemens.sw360.datahandler.db.ComponentDatabaseHandler.java

public List<Component> getMyComponents(String user) {
    //This call could be sped up, because we want the full documents
    Set<String> myComponentIds = componentRepository.getMyComponentIds(user);

    //! but in general this will be the much larger junk and so we keep it this way
    Set<String> myComponentIdsFromReleases = releaseRepository.getMyComponentIds(user);

    return componentRepository.makeSummary(SummaryType.SHORT,
            Sets.union(myComponentIds, myComponentIdsFromReleases));
}

From source file:org.jclouds.virtualbox.compute.VirtualBoxComputeServiceAdapter.java

@Override
public Iterable<Image> listImages() {
    // the set of image vm names that were (or could be) built from the yaml file
    final Set<String> imagesFromYamlNames = Sets
            .newHashSet(Iterables.transform(imagesToYamlImages.keySet(), new Function<Image, String>() {
                @Override//from   w w  w .ja  v a2  s .c  o m
                public String apply(Image input) {
                    return VIRTUALBOX_IMAGE_PREFIX + input.getId();
                }

            }));

    // IMachines that were not built from the yaml file transformed to Images
    Set<Image> imagesFromCloning = Sets
            .newHashSet(Iterables.transform(Iterables.filter(imageMachines(), new Predicate<IMachine>() {
                @Override
                public boolean apply(IMachine input) {
                    return !imagesFromYamlNames.contains(input.getName());
                }
            }), imachineToImage));

    // final set of images are those from yaml and those from vbox that were not a transformation
    // of the yaml ones
    return Sets.union(imagesToYamlImages.keySet(), imagesFromCloning);
}

From source file:org.apache.cassandra.locator.PropertyFileSnitch.java

/**
 * We cannot update rack or data-center for a live node, see CASSANDRA-10243.
 *
 * @param reloadedMap - the new map of hosts to dc:rack properties
 * @param reloadedDefaultDCRack - the default dc:rack or null if no default
 * @return true if we can continue updating (no live host had dc or rack updated)
 *//*from w w w . ja  v  a  2s.c o m*/
private static boolean livenessCheck(HashMap<InetAddress, String[]> reloadedMap,
        String[] reloadedDefaultDCRack) {
    // If the default has changed we must check all live hosts but hopefully we will find a live
    // host quickly and interrupt the loop. Otherwise we only check the live hosts that were either
    // in the old set or in the new set
    Set<InetAddress> hosts = Arrays.equals(defaultDCRack, reloadedDefaultDCRack)
            ? Sets.intersection(StorageService.instance.getLiveRingMembers(), // same default
                    Sets.union(endpointMap.keySet(), reloadedMap.keySet()))
            : StorageService.instance.getLiveRingMembers(); // default updated

    for (InetAddress host : hosts) {
        String[] origValue = endpointMap.containsKey(host) ? endpointMap.get(host) : defaultDCRack;
        String[] updateValue = reloadedMap.containsKey(host) ? reloadedMap.get(host) : reloadedDefaultDCRack;

        if (!Arrays.equals(origValue, updateValue)) {
            logger.error(
                    "Cannot update data center or rack from {} to {} for live host {}, property file NOT RELOADED",
                    origValue, updateValue, host);
            return false;
        }
    }

    return true;
}

From source file:org.cinchapi.concourse.importer.AbstractImporter.java

/**
 * Import a single group of {@code data} (i.e. a line in a csv file) into
 * {@code concourse}./* w  ww . j  a va  2 s .co  m*/
 * <p>
 * If {@code resolveKey} is specified, it is possible that the {@code data}
 * will be added to more than one existing record. It is guaranteed that an
 * attempt will be made to add the data to at least one (possibly) new
 * record.
 * </p>
 * 
 * @param data
 * @param resolveKey
 * @return an {@link ImportResult} object that describes the records
 *         created/affected from the import and whether any errors occurred.
 */
protected final ImportResult importGroup(Multimap<String, String> data, @Nullable String resolveKey) {
    // Determine import record(s)
    Set<Long> records = Sets.newHashSet();
    for (String resolveValue : data.get(resolveKey)) {
        records = Sets.union(records,
                concourse.find(resolveKey, Operator.EQUALS, Convert.stringToJava(resolveValue)));
        records = Sets.newHashSet(records); // must make copy because
                                            // previous method returns
                                            // immutable view
    }
    if (records.isEmpty()) {
        records.add(concourse.create());
    }
    // Iterate through the data and add it to Concourse
    ImportResult result = ImportResult.newImportResult(data, records);
    for (String key : data.keySet()) {
        for (String rawValue : data.get(key)) {
            if (!Strings.isNullOrEmpty(rawValue)) { // do not waste time
                                                    // sending empty
                                                    // values
                                                    // over the wire
                Object convertedValue = Convert.stringToJava(rawValue);
                List<Object> values = Lists.newArrayList();
                if (convertedValue instanceof ResolvableLink) {
                    // Find all the records that resolve and create a
                    // Link to those records.
                    for (long record : concourse.find(((ResolvableLink) convertedValue).getKey(),
                            Operator.EQUALS, ((ResolvableLink) convertedValue).getValue())) {
                        values.add(Link.to(record));
                    }
                } else {
                    values.add(convertedValue);
                }
                for (long record : records) {
                    for (Object value : values) {
                        if (!concourse.add(key, value, record)) {
                            result.addError(MessageFormat.format("Could not import {0} AS {1} IN {2}", key,
                                    value, record));
                        }
                    }
                }
            }
        }
    }
    return result;
}

From source file:org.sosy_lab.cpachecker.cfa.ast.c.FileLocationCollectingVisitor.java

@Override
public Set<FileLocation> visit(CArrayDesignator pNode) throws RuntimeException {
    Set<FileLocation> result = Collections.singleton(pNode.getFileLocation());
    if (pNode.getSubscriptExpression() != null) {
        result = Sets.union(result, pNode.getSubscriptExpression().accept(this));
    }/*from   w  w  w  .j  a  v  a  2 s .c  o  m*/
    return result;
}