Example usage for com.google.common.collect ImmutableSet size

List of usage examples for com.google.common.collect ImmutableSet size

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:com.facebook.buck.android.APKModuleGraph.java

/**
 * Loop through each of the targets we visited while generating seed modules:
 * If the are exclusive to that module, add them to that module.  If they are not exclusive to
 * that module, find or create an appropriate shared module and fill out its dependencies
 *
 * @param apkModuleGraph the current graph we're building
 * @param targetToContainingApkModulesMap the targets mapped to the seed targets they are
 *                                        reachable from
 *///from   w  w  w .  j  a  v  a2 s . c  o  m
private void generateSharedModules(MutableDirectedGraph<APKModule> apkModuleGraph,
        HashMultimap<BuildTarget, String> targetToContainingApkModulesMap) {

    // Sort the targets into APKModuleBuilders based on their seed dependencies
    final Map<ImmutableSet<String>, APKModule.Builder> combinedModuleHashToModuleMap = new HashMap<>();
    for (Map.Entry<BuildTarget, Collection<String>> entry : targetToContainingApkModulesMap.asMap()
            .entrySet()) {
        ImmutableSet<String> containingModuleSet = ImmutableSet.copyOf(entry.getValue());
        boolean exists = false;
        for (Map.Entry<ImmutableSet<String>, APKModule.Builder> existingEntry : combinedModuleHashToModuleMap
                .entrySet()) {
            if (existingEntry.getKey().equals(containingModuleSet)) {
                existingEntry.getValue().addBuildTargets(entry.getKey());
                exists = true;
                break;
            }
        }

        if (!exists) {
            String name = containingModuleSet.size() == 1 ? containingModuleSet.iterator().next()
                    : generateNameFromTarget(entry.getKey());
            combinedModuleHashToModuleMap.put(containingModuleSet,
                    APKModule.builder().setName(name).addBuildTargets(entry.getKey()));
        }
    }

    // Find the seed modules and add them to the graph
    Map<String, APKModule> seedModules = new HashMap<>();
    for (Map.Entry<ImmutableSet<String>, APKModule.Builder> entry : combinedModuleHashToModuleMap.entrySet()) {
        if (entry.getKey().size() == 1) {
            APKModule seed = entry.getValue().build();
            apkModuleGraph.addNode(seed);
            seedModules.put(entry.getKey().iterator().next(), seed);
            apkModuleGraph.addEdge(seed, rootAPKModuleSupplier.get());
        }
    }

    // Find the shared modules and add them to the graph
    for (Map.Entry<ImmutableSet<String>, APKModule.Builder> entry : combinedModuleHashToModuleMap.entrySet()) {
        if (entry.getKey().size() > 1) {
            APKModule shared = entry.getValue().build();
            apkModuleGraph.addNode(shared);
            apkModuleGraph.addEdge(shared, rootAPKModuleSupplier.get());
            for (String seedName : entry.getKey()) {
                apkModuleGraph.addEdge(seedModules.get(seedName), shared);
            }
        }
    }
}

From source file:org.diqube.thrift.base.services.DiqubeThriftServiceInfoManager.java

@PostConstruct
public void initialize() {
    annotationByServiceInterface = new HashMap<>();

    ImmutableSet<ClassInfo> classInfos;
    try {/*from  ww w  .ja  v  a2  s .  com*/
        classInfos = ClassPath.from(DiqubeThriftServiceInfoManager.class.getClassLoader())
                .getTopLevelClassesRecursive(BASE_PKG);
    } catch (IOException e) {
        throw new RuntimeException("Could not parse ClassPath.");
    }

    for (ClassInfo classInfo : classInfos) {
        Class<?> clazz = classInfo.load();

        DiqubeThriftService annotation = clazz.getAnnotation(DiqubeThriftService.class);
        if (annotation != null)
            annotationByServiceInterface.put(annotation.serviceInterface(),
                    new DiqubeThriftServiceInfo<>(annotation));
    }
    logger.info("Found {} diqube services in {} scanned classes.", annotationByServiceInterface.size(),
            classInfos.size());
}

From source file:com.linecorp.armeria.common.MediaType.java

/**
 * Returns an optional charset for the value of the charset parameter if it is specified.
 *
 * @throws IllegalStateException if multiple charset values have been set for this media type
 * @throws IllegalCharsetNameException if a charset value is present, but illegal
 * @throws UnsupportedCharsetException if a charset value is present, but no support is available
 *     in this instance of the Java virtual machine
 *//*  w ww  . j  a va 2 s.com*/
public Optional<Charset> charset() {
    ImmutableSet<String> charsetValues = ImmutableSet.copyOf(parameters.get(CHARSET_ATTRIBUTE));
    switch (charsetValues.size()) {
    case 0:
        return Optional.empty();
    case 1:
        return Optional.of(Charset.forName(Iterables.getOnlyElement(charsetValues)));
    default:
        throw new IllegalStateException("Multiple charset values defined: " + charsetValues);
    }
}

From source file:com.vmware.photon.controller.rootscheduler.service.InMemoryConstraintChecker.java

@Override
public Map<String, ServerAddress> getCandidates(List<ResourceConstraint> constraints, int numCandidates) {
    // Find all the hosts that satisfy the resource constraints.
    ImmutableSet<String> matches;
    if (constraints.isEmpty()) {
        matches = getHosts();//from  ww  w  .j ava2 s. c o  m
    } else {
        Iterator<ResourceConstraint> iterator = constraints.iterator();
        matches = checkConstraint(iterator.next());
        while (iterator.hasNext()) {
            matches = Sets.intersection(matches, checkConstraint(iterator.next())).immutableCopy();
        }
    }

    // Randomly pick candidates. Pretty sure there is a better way to do this...
    Map<String, ServerAddress> result = new HashMap<>();
    Map<String, ServerAddress> hostMap = getHostMap();
    while (result.size() < numCandidates && result.size() < matches.size()) {
        String pick = matches.asList().get(RANDOM.nextInt(matches.size()));
        result.put(pick, hostMap.get(pick));
    }
    return ImmutableMap.copyOf(result);
}

From source file:dagger2.internal.codegen.BindingGraphValidator.java

/**
 * Validates that scopes do not participate in a scoping cycle - that is to say, scoped
 * components are in a hierarchical relationship terminating with Singleton.
 *
 * <p>As a side-effect, this means scoped components cannot have a dependency cycle between
 * themselves, since a component's presence within its own dependency path implies a cyclical
 * relationship between scopes./*from  w  w w.j  a  v a 2  s.  co m*/
 */
private void validateScopeHierarchy(TypeElement rootComponent, TypeElement componentType,
        Builder<BindingGraph> reportBuilder, Deque<Equivalence.Wrapper<AnnotationMirror>> scopeStack,
        Deque<TypeElement> scopedDependencyStack) {
    Optional<AnnotationMirror> scope = getScopeAnnotation(componentType);
    if (scope.isPresent()) {
        Equivalence.Wrapper<AnnotationMirror> wrappedScope = AnnotationMirrors.equivalence().wrap(scope.get());
        if (scopeStack.contains(wrappedScope)) {
            scopedDependencyStack.push(componentType);
            // Current scope has already appeared in the component chain.
            StringBuilder message = new StringBuilder();
            message.append(rootComponent.getQualifiedName());
            message.append(" depends on scoped components in a non-hierarchical scope ordering:\n");
            appendIndentedComponentsList(message, scopedDependencyStack);
            if (scopeCycleValidationType.diagnosticKind().isPresent()) {
                reportBuilder.addItem(message.toString(), scopeCycleValidationType.diagnosticKind().get(),
                        rootComponent, getAnnotationMirror(rootComponent, Component.class).get());
            }
            scopedDependencyStack.pop();
        } else {
            Optional<AnnotationMirror> componentAnnotation = getAnnotationMirror(componentType,
                    Component.class);
            if (componentAnnotation.isPresent()) {
                ImmutableSet<TypeElement> scopedDependencies = scopedTypesIn(
                        MoreTypes.asTypeElements(getComponentDependencies(componentAnnotation.get())));
                if (scopedDependencies.size() == 1) {
                    // empty can be ignored (base-case), and > 1 is a different error reported separately.
                    scopeStack.push(wrappedScope);
                    scopedDependencyStack.push(componentType);
                    validateScopeHierarchy(rootComponent, getOnlyElement(scopedDependencies), reportBuilder,
                            scopeStack, scopedDependencyStack);
                    scopedDependencyStack.pop();
                    scopeStack.pop();
                }
            } // else: we skip component dependencies which are not components
        }
    }
}

From source file:org.linagora.linshare.core.notifications.service.impl.MailBuildingServiceImpl.java

private String getFileNames(UploadRequestUrl requestUrl) {
    ImmutableSet<FileRepresentation> files = FluentIterable.from(requestUrl.getUploadRequestEntries())
            .transform(new Function<UploadRequestEntry, FileRepresentation>() {
                @Override//from  w w w.  j  a v a  2s.  co  m
                public FileRepresentation apply(UploadRequestEntry ure) {
                    return new FileRepresentation(ure);
                }
            }).toSet();
    if (files.size() > 0) {
        return files.toString();
    }
    return " - ";
}

From source file:com.google.javascript.jscomp.newtypes.JSType.java

public boolean isLoose() {
    ImmutableSet<ObjectType> objs = getObjs();
    return objs.size() == 1 && Iterables.getOnlyElement(objs).isLoose();
}

From source file:google.registry.tools.server.GenerateZoneFilesAction.java

@Override
public Map<String, Object> handleJsonRequest(Map<String, ?> json) {
    @SuppressWarnings("unchecked")
    ImmutableSet<String> tlds = ImmutableSet.copyOf((List<String>) json.get("tlds"));
    final DateTime exportTime = DateTime.parse(json.get("exportTime").toString());
    // We disallow exporting within the past 2 minutes because there might be outstanding writes.
    // We can only reliably call loadAtPointInTime at times that are UTC midnight and >
    // datastoreRetention ago in the past.
    DateTime now = clock.nowUtc();//from  w  ww. ja v a  2s . c  o m
    if (exportTime.isAfter(now.minusMinutes(2))) {
        throw new BadRequestException("Invalid export time: must be > 2 minutes ago");
    }
    if (exportTime.isBefore(now.minus(datastoreRetention))) {
        throw new BadRequestException(String.format("Invalid export time: must be < %d days ago",
                datastoreRetention.getStandardDays()));
    }
    if (!exportTime.equals(exportTime.toDateTime(UTC).withTimeAtStartOfDay())) {
        throw new BadRequestException("Invalid export time: must be midnight UTC");
    }
    String jobId = mrRunner.setJobName("Generate bind file stanzas").setModuleName("tools")
            .setDefaultReduceShards(tlds.size()).runMapreduce(new GenerateBindFileMapper(tlds, exportTime),
                    new GenerateBindFileReducer(bucket, exportTime, gcsBufferSize),
                    ImmutableList.of(new NullInput<EppResource>(),
                            createEntityInput(DomainResource.class, HostResource.class)));
    ImmutableList<String> filenames = FluentIterable.from(tlds).transform(new Function<String, String>() {
        @Override
        public String apply(String tld) {
            return String.format(GCS_PATH_FORMAT, bucket, String.format(FILENAME_FORMAT, tld, exportTime));
        }
    }).toList();
    return ImmutableMap.<String, Object>of("jobPath", createJobPath(jobId), "filenames", filenames);
}

From source file:org.linagora.linshare.core.service.impl.MailBuildingServiceImpl.java

private String getFileNames(UploadRequest request) {
    ImmutableSet<FileRepresentation> files = FluentIterable.from(request.getUploadRequestEntries())
            .transform(new Function<UploadRequestEntry, FileRepresentation>() {
                @Override//from  w  ww . ja v  a2 s. c  o  m
                public FileRepresentation apply(UploadRequestEntry ure) {
                    return new FileRepresentation(ure);
                }
            }).toSet();
    if (files.size() > 0) {
        return files.toString();
    }
    return " - ";
}

From source file:dk.ilios.spanner.internal.ExperimentingSpannerRun.java

@Override
public void run() throws InvalidBenchmarkException {

    ImmutableSet<Experiment> allExperiments = selector.selectExperiments(baselineData);

    // TODO(lukes): move this standard-out handling into the ConsoleOutput class?
    stdout.println("Experiment selection: ");
    stdout.println("  Benchmark Methods:   "
            + FluentIterable.from(allExperiments).transform(new Function<Experiment, String>() {
                @Override//from   w  w w. j  a  v  a 2 s  .com
                public String apply(Experiment experiment) {
                    return experiment.instrumentation().benchmarkMethod().getName();
                }
            }).toSet());
    stdout.println("  Instruments:   "
            + FluentIterable.from(selector.instruments()).transform(new Function<Instrument, String>() {
                @Override
                public String apply(Instrument instrument) {
                    return instrument.name();
                }
            }));
    stdout.println("  User parameters:   " + selector.userParameters());
    stdout.println("  Selection type:    " + selector.selectionType());
    stdout.println();

    stdout.format("This selection yields %s experiments.%n", allExperiments.size());
    stdout.flush();

    // always dry run first.
    ImmutableSet<Experiment> experimentsToRun = dryRun(allExperiments);
    //        if (experimentsToRun.size() != allExperiments.size()) {
    //            stdout.format("%d experiments were skipped.%n", allExperiments.size() - experimentsToRun.size());
    //        }

    //        if (experimentsToRun.isEmpty()) {
    //            throw new InvalidBenchmarkException("All experiments were skipped.");
    //        }
    //
    //        if (options.dryRun()) {
    //            return;
    //        }

    stdout.flush();

    int totalTrials = experimentsToRun.size() * options.getTrialsPrExperiment();
    Stopwatch stopwatch = Stopwatch.createStarted();
    List<ScheduledTrial> trials = createScheduledTrials(experimentsToRun, totalTrials);

    List<ListenableFuture<Trial.Result>> pendingTrials = scheduleTrials(trials, executorProvider);
    ConsoleOutput output = new ConsoleOutput(stdout, totalTrials, stopwatch);
    try {
        // Process results as they complete.
        for (ListenableFuture<Trial.Result> trialFuture : inCompletionOrder(pendingTrials)) {
            try {
                Trial.Result result = trialFuture.get();
                output.processTrial(result);
                for (ResultProcessor resultProcessor : resultProcessors) {
                    resultProcessor.processTrial(result.getTrial());
                }
            } catch (ExecutionException e) {
                if (e.getCause() instanceof TrialFailureException) {
                    output.processFailedTrial((TrialFailureException) e.getCause());
                } else {
                    for (ListenableFuture<?> toCancel : pendingTrials) {
                        toCancel.cancel(true);
                    }
                    throw Throwables.propagate(e.getCause());
                }
            } catch (InterruptedException e) {
                // be responsive to interruption, cancel outstanding work and exit
                for (ListenableFuture<?> toCancel : pendingTrials) {
                    // N.B. TrialRunLoop is responsive to interruption.
                    toCancel.cancel(true);
                }
                throw new RuntimeException(e);
            }
        }
    } finally {
        executorProvider.shutdown();
        output.close();
    }

    for (ResultProcessor resultProcessor : resultProcessors) {
        try {
            resultProcessor.close();
        } catch (IOException e) {
            logger.log(Level.WARNING, "Could not close a result processor: " + resultProcessor, e);
        }
    }
}