Example usage for com.google.common.collect ImmutableList stream

List of usage examples for com.google.common.collect ImmutableList stream

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableList stream.

Prototype

default Stream<E> stream() 

Source Link

Document

Returns a sequential Stream with this collection as its source.

Usage

From source file:com.google.devtools.build.lib.skyframe.ActionFileSystem.java

ActionFileSystem(FileSystem delegate, Path execRoot, ImmutableList<Root> sourceRoots,
        InputArtifactData inputArtifactData, Iterable<Artifact> allowedInputs,
        Iterable<Artifact> outputArtifacts) {
    try {/*from   ww w . j  av a2 s . c om*/
        Profiler.instance().startTask(ProfilerTask.ACTION_FS_STAGING, "staging");
        this.delegate = delegate;

        this.execRootFragment = execRoot.asFragment();
        this.execRootPath = getPath(execRootFragment);
        this.sourceRoots = sourceRoots.stream().map(root -> root.asPath().asFragment())
                .collect(ImmutableList.toImmutableList());

        validateRoots();

        this.inputArtifactData = inputArtifactData;

        this.optionalInputs = new HashMap<>();
        for (Artifact input : allowedInputs) {
            // Skips staging source artifacts as a performance optimization. We may want to stage them
            // if we want stricter enforcement of source sandboxing.
            //
            // TODO(shahan): there are no currently known cases where metadata is requested for an
            // optional source input. If there are any, we may want to stage those.
            if (input.isSourceArtifact() || inputArtifactData.contains(input)) {
                continue;
            }
            optionalInputs.computeIfAbsent(input.getExecPath(), unused -> new OptionalInputMetadata(input));
        }

        this.optionalInputsByDigest = new ConcurrentHashMap<>();

        this.outputs = Streams.stream(outputArtifacts)
                .collect(ImmutableMap.toImmutableMap(a -> a.getExecPath(), a -> new OutputMetadata(a)));
    } finally {
        Profiler.instance().completeTask(ProfilerTask.ACTION_FS_STAGING);
    }
}

From source file:com.facebook.buck.parser.ParserWithConfigurableAttributes.java

@Override
protected ImmutableSet<BuildTarget> collectBuildTargetsFromTargetNodeSpecs(ParsingContext parsingContext,
        PerBuildState state, Iterable<? extends TargetNodeSpec> targetNodeSpecs,
        TargetConfiguration targetConfiguration, boolean excludeConfigurationTargets)
        throws InterruptedException {
    PerBuildStateWithConfigurableAttributes stateWithConfigurableAttributes = (PerBuildStateWithConfigurableAttributes) state;

    TargetNodeProviderForSpecResolver<TargetNode<?>> targetNodeProvider = DefaultParser
            .createTargetNodeProviderForSpecResolver(state);

    TargetNodeFilterForSpecResolver<TargetNode<?>> targetNodeFilter = (spec, nodes) -> spec.filter(nodes);

    if (excludeConfigurationTargets) {
        targetNodeFilter = new TargetNodeFilterForSpecResolverWithNodeFiltering<>(targetNodeFilter,
                ParserWithConfigurableAttributes::filterOutNonBuildTargets);
    }/*from   www .  j av a2 s .c om*/

    ImmutableList<ImmutableSet<BuildTarget>> buildTargets = targetSpecResolver.resolveTargetSpecs(
            parsingContext.getCell(), targetNodeSpecs, targetConfiguration,
            (buildTarget, targetNode, targetType) -> DefaultParser.applyDefaultFlavors(buildTarget, targetNode,
                    targetType, parsingContext.getApplyDefaultFlavorsMode()),
            targetNodeProvider, targetNodeFilter);

    if (!stateWithConfigurableAttributes.getParsingContext().excludeUnsupportedTargets()) {
        return ImmutableSet.copyOf(Iterables.concat(buildTargets));
    }
    return filterIncompatibleTargetNodes(stateWithConfigurableAttributes,
            buildTargets.stream().flatMap(ImmutableSet::stream).map(state::getTargetNode))
                    .map(TargetNode::getBuildTarget).collect(ImmutableSet.toImmutableSet());
}

From source file:org.fcrepo.http.api.FedoraLdp.java

/**
 * Retrieve the node profile//w ww  .  j av a  2s. c  o  m
 *
 * @param rangeValue the range value
 * @return a binary or the triples for the specified node
 * @throws IOException if IO exception occurred
 */
@GET
@Produces({ TURTLE + ";qs=10", JSON_LD + ";qs=8", N3, N3_ALT2, RDF_XML, NTRIPLES, APPLICATION_XML, TEXT_PLAIN,
        TURTLE_X, TEXT_HTML, APPLICATION_XHTML_XML })
public Response getResource(@HeaderParam("Range") final String rangeValue) throws IOException {
    checkCacheControlHeaders(request, servletResponse, resource(), session);

    LOGGER.info("GET resource '{}'", externalPath);

    final RdfStream rdfStream = new DefaultRdfStream(asNode(resource()));

    // If requesting a binary, check the mime-type if "Accept:" header is present.
    // (This needs to be done before setting up response headers, as getContent
    // returns a response - so changing headers after that won't work so nicely.)
    final ImmutableList<MediaType> acceptableMediaTypes = ImmutableList
            .copyOf(headers.getAcceptableMediaTypes());

    if (resource() instanceof FedoraBinary && acceptableMediaTypes.size() > 0) {
        final MediaType mediaType = MediaType.valueOf(((FedoraBinary) resource()).getMimeType());

        if (!acceptableMediaTypes.stream().anyMatch(t -> t.isCompatible(mediaType))) {
            return notAcceptable(Variant.VariantListBuilder.newInstance().mediaTypes(mediaType).build())
                    .build();
        }
    }

    addResourceHttpHeaders(resource());
    return getContent(rangeValue, getChildrenLimit(), rdfStream);
}

From source file:com.opengamma.strata.measure.fxopt.BlackFxOptionSmileVolatilitiesSpecification.java

@ImmutableConstructor
private BlackFxOptionSmileVolatilitiesSpecification(FxOptionVolatilitiesName name, CurrencyPair currencyPair,
        DayCount dayCount, List<FxOptionVolatilitiesNode> nodes, CurveInterpolator timeInterpolator,
        CurveExtrapolator timeExtrapolatorLeft, CurveExtrapolator timeExtrapolatorRight,
        CurveInterpolator strikeInterpolator, CurveExtrapolator strikeExtrapolatorLeft,
        CurveExtrapolator strikeExtrapolatorRight) {
    JodaBeanUtils.notNull(name, "name");
    JodaBeanUtils.notNull(currencyPair, "currencyPair");
    JodaBeanUtils.notNull(dayCount, "dayCount");
    JodaBeanUtils.notNull(nodes, "nodes");
    JodaBeanUtils.notNull(timeInterpolator, "timeInterpolator");
    JodaBeanUtils.notNull(timeExtrapolatorLeft, "timeExtrapolatorLeft");
    JodaBeanUtils.notNull(timeExtrapolatorRight, "timeExtrapolatorRight");
    JodaBeanUtils.notNull(strikeInterpolator, "strikeInterpolator");
    JodaBeanUtils.notNull(strikeExtrapolatorLeft, "strikeExtrapolatorLeft");
    JodaBeanUtils.notNull(strikeExtrapolatorRight, "strikeExtrapolatorRight");
    this.name = name;
    this.currencyPair = currencyPair;
    this.dayCount = dayCount;
    this.nodes = ImmutableList.copyOf(nodes);
    this.timeInterpolator = timeInterpolator;
    this.timeExtrapolatorLeft = timeExtrapolatorLeft;
    this.timeExtrapolatorRight = timeExtrapolatorRight;
    this.strikeInterpolator = strikeInterpolator;
    this.strikeExtrapolatorLeft = strikeExtrapolatorLeft;
    this.strikeExtrapolatorRight = strikeExtrapolatorRight;
    this.nodesByTenor = nodes.stream()
            .collect(Guavate.toImmutableListMultimap(FxOptionVolatilitiesNode::getTenor));
    ImmutableList<Double> fullDeltas = nodes.stream().map(FxOptionVolatilitiesNode::getStrike).distinct()
            .map(Strike::getValue).sorted().collect(toImmutableList());

    int nDeltas = fullDeltas.size() - 1;
    ArgChecker.isTrue(fullDeltas.get(nDeltas) == 0.5, "0 < delta <= 0.5");
    this.deltas = fullDeltas.subList(0, nDeltas); // ATM removed
    int nParams = nodes.size();
    for (int i = 0; i < nParams; ++i) {
        ArgChecker.isTrue(nodes.get(i).getCurrencyPair().equals(currencyPair),
                "currency pair must be the same");
        ArgChecker.isTrue(nodes.get(i).getStrike() instanceof DeltaStrike, "Strike must be DeltaStrike");
    }/*  w  ww. j  a v  a 2  s  .  c  om*/
    for (Tenor tenor : nodesByTenor.keys()) {
        ImmutableList<FxOptionVolatilitiesNode> nodesForTenor = nodesByTenor.get(tenor);
        // value type, delta, size
        List<Double> atmDelta = nodesForTenor.stream()
                .filter(node -> node.getQuoteValueType().equals(ValueType.BLACK_VOLATILITY))
                .map(node -> node.getStrike().getValue()).sorted().collect(toList());
        ArgChecker.isTrue(atmDelta.equals(fullDeltas.subList(nDeltas, nDeltas + 1)),
                "The ATM delta set must be " + fullDeltas.subList(nDeltas, nDeltas + 1) + ", but found "
                        + atmDelta + ", for " + tenor);
        List<Double> rrDelta = nodesForTenor.stream()
                .filter(node -> node.getQuoteValueType().equals(ValueType.RISK_REVERSAL))
                .map(node -> node.getStrike().getValue()).sorted().collect(toList());
        ArgChecker.isTrue(rrDelta.equals(deltas), "The delta set for risk reversal must be " + deltas
                + ", but found " + rrDelta + ", for " + tenor);
        List<Double> strDelta = nodesForTenor.stream()
                .filter(node -> node.getQuoteValueType().equals(ValueType.STRANGLE))
                .map(node -> node.getStrike().getValue()).sorted().collect(toList());
        ArgChecker.isTrue(strDelta.equals(deltas),
                "The delta set for strangle must be " + deltas + ", but found " + strDelta + ", for " + tenor);
        // convention
        Set<BusinessDayAdjustment> busAdj = nodesForTenor.stream()
                .map(FxOptionVolatilitiesNode::getBusinessDayAdjustment).collect(toSet());
        ArgChecker.isTrue(busAdj.size() == 1, "BusinessDayAdjustment must be common to all the nodes");
        Set<DaysAdjustment> offset = nodesForTenor.stream().map(FxOptionVolatilitiesNode::getSpotDateOffset)
                .collect(toSet());
        ArgChecker.isTrue(offset.size() == 1, "DaysAdjustment must be common to all the nodes");
    }
}

From source file:com.spectralogic.dsbrowser.gui.components.ds3panel.Ds3PanelPresenter.java

public void calculateFiles(final TreeTableView<Ds3TreeTableValue> ds3TreeTableView) {
    //if a task for calculating of items is already running and cancel that task
    if (itemsTask != null) {
        itemsTask.cancel(true);/* w  ww. java 2 s  .com*/
    }
    try {
        ObservableList<TreeItem<Ds3TreeTableValue>> selectedItems = ds3TreeTableView.getSelectionModel()
                .getSelectedItems();
        final TreeItem<Ds3TreeTableValue> root = ds3TreeTableView.getRoot();
        if (Guard.isNullOrEmpty(selectedItems) && root != null && root.getValue() != null) {
            selectedItems = FXCollections.observableArrayList();
            selectedItems.add(root);
        }
        //start a new task for calculating
        itemsTask = new GetNumberOfItemsTask(ds3Common.getCurrentSession().getClient(), selectedItems);

        itemsTask.setOnSucceeded(SafeHandler.logHandle(event -> Platform.runLater(() -> {
            final ImmutableList<TreeItem<Ds3TreeTableValue>> values = ds3TreeTableView.getSelectionModel()
                    .getSelectedItems().stream().filter(Objects::nonNull)
                    .collect(GuavaCollectors.immutableList());
            TreeItem<Ds3TreeTableValue> selectedRoot = ds3TreeTableView.getRoot();
            if (!Guard.isNullOrEmpty(values)) {
                final Optional<TreeItem<Ds3TreeTableValue>> first = values.stream().findFirst();
                if (first.isPresent()) {
                    selectedRoot = first.get();
                }
            }
            //for number of files and folders
            final FilesCountModel filesCountModel = itemsTask.getValue();
            if (selectedRoot == null || selectedRoot.getValue() == null || getSession() == null
                    || null == filesCountModel) {
                setVisibilityOfItemsInfo(false);
            } else {
                setVisibilityOfItemsInfo(true);
                setItemCountPanelInfo(filesCountModel, selectedRoot);
            }

        })));
        workers.execute(itemsTask);

    } catch (final Exception e) {
        LOG.error("Unable to calculate no. of items and capacity", e);
    }
}

From source file:com.spectralogic.dsbrowser.gui.components.ds3panel.Ds3PanelPresenter.java

public void ds3DeleteObject() {
    LOG.info("Got delete object event");
    final TreeTableView<Ds3TreeTableValue> ds3TreeTable = ds3Common.getDs3TreeTableView();
    final ImmutableList<TreeItem<Ds3TreeTableValue>> values = ds3TreeTable.getSelectionModel()
            .getSelectedItems().stream().filter(Objects::nonNull).collect(GuavaCollectors.immutableList());
    final TreeItem<Ds3TreeTableValue> root = ds3TreeTable.getRoot();
    if (Guard.isNullOrEmpty(values)) {
        if (root.getValue() == null) {
            LOG.info(resourceBundle.getString("noFiles"));
            alert.info("noFiles");
        }//from ww  w.j  a  v a2 s. c om
    } else if (values.stream().map(TreeItem::getValue)
            .anyMatch(value -> value.getType() == Ds3TreeTableValue.Type.Directory)) {
        values.stream().map(TreeItem::toString).forEach(itemString -> LOG.info("Delete folder {}", itemString));
        DeleteService.deleteFolders(ds3Common, values);
    } else if (values.stream().map(TreeItem::getValue)
            .anyMatch(value -> value.getType() == Ds3TreeTableValue.Type.Bucket)) {
        LOG.info("Going to delete the bucket");
        DeleteService.deleteBucket(ds3Common, values, workers, loggingService, dateTimeUtils, resourceBundle);
    } else if (values.stream().map(TreeItem::getValue)
            .anyMatch(value -> value.getType() == Ds3TreeTableValue.Type.File)) {
        LOG.info("Going to delete the file(s)");
        DeleteService.deleteFiles(ds3Common, values);
    }
}

From source file:com.spectralogic.dsbrowser.gui.services.ds3Panel.SortPolicyCallback.java

private void sortChild(final TreeItem<BaseTreeModel> o1, final Comparator<TreeItem<BaseTreeModel>> comparator,
        final String type) {
    try {//  w  ww.  j a v a 2 s  . c o m
        if (comparator != null) {
            final ImmutableList<TreeItem<BaseTreeModel>> loaderList = o1.getChildren().stream().filter(
                    i -> (i.getValue().getType().toString().equals(BaseTreeModel.Type.Loader.toString())))
                    .collect(GuavaCollectors.immutableList());
            final ImmutableList<TreeItem<BaseTreeModel>> collect = o1.getChildren().stream().filter(
                    i -> !(i.getValue().getType().toString().equals(BaseTreeModel.Type.Loader.toString())))
                    .collect(GuavaCollectors.immutableList());
            final ObservableList<TreeItem<BaseTreeModel>> treeItems = FXCollections
                    .observableArrayList(collect);
            treeItems.forEach(i -> {
                if (i.isExpanded())
                    sortChild(i, comparator, type);
            });
            FXCollections.sort(treeItems, comparator);
            o1.getChildren().removeAll(o1.getChildren());
            o1.getChildren().addAll(treeItems);
            final Optional<TreeItem<BaseTreeModel>> first = loaderList.stream().findFirst();

            first.ifPresent(baseTreeModelTreeItem -> o1.getChildren().add(baseTreeModelTreeItem));

            if (!type.equals(StringConstants.TYPE)) {
                FXCollections.sort(o1.getChildren(),
                        Comparator.comparing(t -> t.getValue().getType().toString()));
            }
        }
    } catch (final Exception e) {
        LOG.error("Unable to sort", e);
    }
}

From source file:com.spectralogic.dsbrowser.gui.services.ds3Panel.SortPolicyCallback.java

@Override
public Object call(final Object param) {
    try {//from   w  w w  .  j av a  2  s .  c  o m
        if (param instanceof TreeTableView) {
            final TreeTableView<BaseTreeModel> param1 = (TreeTableView<BaseTreeModel>) param;
            final Comparator<TreeItem<BaseTreeModel>> comparator = (o1, o2) -> {
                if (param1.getComparator() == null) {
                    return 0;
                } else {
                    return param1.getComparator().compare(o1, o2);
                }
            };
            if (ds3TreeTable.getRoot() != null) {
                final ImmutableList<TreeItem<BaseTreeModel>> loaderList = ds3TreeTable.getRoot().getChildren()
                        .stream()
                        .filter(i -> (i.getValue().getType().toString()
                                .equals(BaseTreeModel.Type.Loader.toString())))
                        .collect(GuavaCollectors.immutableList());
                final ImmutableList<TreeItem<BaseTreeModel>> collect = ds3TreeTable.getRoot().getChildren()
                        .stream()
                        .filter(i -> !(i.getValue().getType().toString()
                                .equals(BaseTreeModel.Type.Loader.toString())))
                        .collect(GuavaCollectors.immutableList());
                final ObservableList<TreeItem<BaseTreeModel>> treeItems = FXCollections
                        .observableArrayList(collect);
                FXCollections.sort(treeItems, comparator);

                ds3TreeTable.getRoot().getChildren().removeAll(ds3TreeTable.getRoot().getChildren());
                ds3TreeTable.getRoot().getChildren().addAll(treeItems);
                final Optional<TreeItem<BaseTreeModel>> first = loaderList.stream().findFirst();
                first.ifPresent(baseTreeModelTreeItem -> ds3TreeTable.getRoot().getChildren()
                        .add(baseTreeModelTreeItem));

                treeItems.forEach(i -> {
                    if (i.isExpanded()) {
                        final Optional<TreeTableColumn<BaseTreeModel, ?>> firstElement = param1.getSortOrder()
                                .stream().findFirst();
                        if (firstElement.isPresent())
                            sortChild(i, comparator, firstElement.get().getText());
                        else
                            sortChild(i, comparator, StringConstants.EMPTY_STRING);
                    }
                });
                if (param1.getSortOrder().stream().findFirst().isPresent()) {
                    if (!param1.getSortOrder().stream().findFirst().get().getText()
                            .equals(StringConstants.TYPE)) {
                        FXCollections.sort(ds3TreeTable.getRoot().getChildren(),
                                Comparator.comparing(t -> t.getValue().getType().toString()));
                    }

                }
            }

        }
    } catch (final Exception e) {
        LOG.error("Unable to sort tree", e);
    }
    return true;
}

From source file:com.spectralogic.dsbrowser.gui.services.tasks.GetServiceTask.java

@Override
protected ObservableList<TreeItem<Ds3TreeTableValue>> call() throws Exception {
    final GetServiceResponse response = session.getClient().getService(new GetServiceRequest());
    if (null != response && null != response.getListAllMyBucketsResult()
            && !Guard.isNullOrEmpty(response.getListAllMyBucketsResult().getBuckets())) {
        final ImmutableList<Ds3TreeTableValue> buckets = response.getListAllMyBucketsResult().getBuckets()
                .stream().map(bucket -> {
                    final HBox hbox = new HBox();
                    hbox.getChildren().add(new Label(StringConstants.FOUR_DASH));
                    hbox.setAlignment(Pos.CENTER);
                    return new Ds3TreeTableValue(bucket.getName(), bucket.getName(),
                            Ds3TreeTableValue.Type.Bucket, 0, dateTimeUtils.format(bucket.getCreationDate()),
                            StringConstants.TWO_DASH, false, hbox);
                }).sorted(Comparator.comparing(b -> b.getName().toLowerCase()))
                .collect(GuavaCollectors.immutableList());

        loggingService.logMessage(resourceBundle.getString("receivedBucketList"), LogType.SUCCESS);
        Platform.runLater(() -> {//from   w w w  .  j  a v  a2  s  . c om
            if (null != ds3Common) {
                if (null != ds3Common.getDeepStorageBrowserPresenter()
                        && null != ds3Common.getDs3PanelPresenter()) {
                    ds3Common.getDs3PanelPresenter().disableSearch(false);
                }
                final ImmutableList<Ds3TreeTableItem> treeItems = buckets
                        .stream().map(value -> new Ds3TreeTableItem(value.getName(), session, value, workers,
                                ds3Common, dateTimeUtils, loggingService))
                        .collect(GuavaCollectors.immutableList());
                if (!Guard.isNullOrEmpty(treeItems)) {
                    partialResults.get().addAll(treeItems);
                    ds3Common.getDs3PanelPresenter().disableSearch(false);
                } else {
                    ds3Common.getDs3PanelPresenter().disableSearch(true);
                    LOG.info("No buckets found");
                }
            } else {
                LOG.info("Ds3Common is null");
            }
        });
    } else {
        LOG.info("No buckets found");
        ds3Common.getDs3PanelPresenter().disableSearch(true);
    }
    return this.partialResults.get();
}

From source file:com.google.devtools.build.lib.skyframe.PrepareAnalysisPhaseFunction.java

@Override
public PrepareAnalysisPhaseValue compute(SkyKey key, Environment env)
        throws InterruptedException, PrepareAnalysisPhaseFunctionException {
    PrepareAnalysisPhaseKey options = (PrepareAnalysisPhaseKey) key.argument();

    BuildOptions targetOptions = defaultBuildOptions.applyDiff(options.getOptionsDiff());
    BuildOptions hostOptions = targetOptions.get(BuildConfiguration.Options.class).useDistinctHostConfiguration
            ? HostTransition.INSTANCE.patch(targetOptions)
            : targetOptions;//from www.ja va 2  s  . com

    ImmutableSortedSet<Class<? extends BuildConfiguration.Fragment>> allFragments = options.getFragments()
            .fragmentClasses();
    BuildConfigurationValue.Key hostConfigurationKey = BuildConfigurationValue.key(allFragments,
            BuildOptions.diffForReconstruction(defaultBuildOptions, hostOptions));
    ImmutableList<BuildConfigurationValue.Key> targetConfigurationKeys = getTopLevelBuildOptions(targetOptions,
            options.getMultiCpu())
                    .stream()
                    .map(elem -> BuildConfigurationValue.key(allFragments,
                            BuildOptions.diffForReconstruction(defaultBuildOptions, elem)))
                    .collect(ImmutableList.toImmutableList());

    // We don't need the host configuration below, but we call this to get the error, if any.
    try {
        env.getValueOrThrow(hostConfigurationKey, InvalidConfigurationException.class);
    } catch (InvalidConfigurationException e) {
        throw new PrepareAnalysisPhaseFunctionException(e);
    }

    Map<SkyKey, SkyValue> configs = env.getValues(targetConfigurationKeys);

    // We only report invalid options for the target configurations, and abort if there's an error.
    ErrorSensingEventHandler nosyEventHandler = new ErrorSensingEventHandler(env.getListener());
    targetConfigurationKeys.stream().map(k -> configs.get(k)).filter(Predicates.notNull())
            .map(v -> ((BuildConfigurationValue) v).getConfiguration())
            .forEach(config -> config.reportInvalidOptions(nosyEventHandler));
    if (nosyEventHandler.hasErrors()) {
        throw new PrepareAnalysisPhaseFunctionException(
                new InvalidConfigurationException("Build options are invalid"));
    }

    // We get the list of labels from the TargetPatternPhaseValue, so we are reasonably certain that
    // there will not be an error loading these again.
    ResolvedTargets<Target> resolvedTargets = TestSuiteExpansionFunction.labelsToTargets(env,
            options.getLabels(), false);
    if (resolvedTargets == null) {
        return null;
    }
    ImmutableSet<Target> targets = resolvedTargets.getTargets();

    // We use a hash set here to remove duplicate nodes; this can happen for input files and package
    // groups.
    LinkedHashSet<TargetAndConfiguration> nodes = new LinkedHashSet<>(targets.size());
    for (Target target : targets) {
        if (target.isConfigurable()) {
            for (BuildConfigurationValue.Key configKey : targetConfigurationKeys) {
                BuildConfiguration config = ((BuildConfigurationValue) configs.get(configKey))
                        .getConfiguration();
                nodes.add(new TargetAndConfiguration(target, config));
            }
        } else {
            nodes.add(new TargetAndConfiguration(target, null));
        }
    }

    // We'll get the configs from #resolveConfigurations below, which started out as a copy of the
    // same code in SkyframeExecutor, which gets configurations for deps including transitions. So,
    // for now, to satisfy its API we resolve transitions and repackage each target as a Dependency
    // (with a NONE transition if necessary).
    // Keep this in sync with AnalysisUtils#getTargetsWithConfigs.
    Multimap<BuildConfiguration, Dependency> asDeps = AnalysisUtils.targetsToDeps(nodes, ruleClassProvider);
    LinkedHashSet<TargetAndConfiguration> topLevelTargetsWithConfigs = resolveConfigurations(env, nodes,
            asDeps);
    if (env.valuesMissing()) {
        return null;
    }
    ImmutableList<ConfiguredTargetKey> topLevelCtKeys = topLevelTargetsWithConfigs.stream()
            .map(node -> ConfiguredTargetKey.of(node.getLabel(), node.getConfiguration()))
            .collect(ImmutableList.toImmutableList());
    return new PrepareAnalysisPhaseValue(hostConfigurationKey, targetConfigurationKeys, topLevelCtKeys);
}