Example usage for com.google.common.util.concurrent Futures allAsList

List of usage examples for com.google.common.util.concurrent Futures allAsList

Introduction

In this page you can find the example usage for com.google.common.util.concurrent Futures allAsList.

Prototype

@Beta
@CheckReturnValue
public static <V> ListenableFuture<List<V>> allAsList(
        Iterable<? extends ListenableFuture<? extends V>> futures) 

Source Link

Document

Creates a new ListenableFuture whose value is a list containing the values of all its input futures, if all succeed.

Usage

From source file:com.spotify.folsom.ketama.KetamaMemcacheClient.java

private <T> ListenableFuture<List<T>> sendSplitRequest(final MultiRequest<T> multiRequest) {
    final List<byte[]> keys = multiRequest.getKeys();

    final Map<RawMemcacheClient, List<byte[]>> routing = Maps.newIdentityHashMap();
    final List<RawMemcacheClient> routing2 = Lists.newArrayListWithCapacity(keys.size());
    for (final byte[] key : keys) {
        final RawMemcacheClient client = getClient(key);
        List<byte[]> subKeys = routing.get(client);
        if (subKeys == null) {
            subKeys = Lists.newArrayList();
            routing.put(client, subKeys);
        }/*from w w  w.  j  a  v  a 2 s .c o  m*/
        subKeys.add(key);
        routing2.add(client);
    }

    final Map<RawMemcacheClient, ListenableFuture<List<T>>> futures = Maps.newIdentityHashMap();

    for (final Map.Entry<RawMemcacheClient, List<byte[]>> entry : routing.entrySet()) {
        final List<byte[]> subKeys = entry.getValue();
        final Request<List<T>> subRequest = multiRequest.create(subKeys);
        final RawMemcacheClient client = entry.getKey();
        ListenableFuture<List<T>> send = client.send(subRequest);
        futures.put(client, send);
    }
    final ListenableFuture<List<List<T>>> allFutures = Futures.allAsList(futures.values());
    return Utils.transform(allFutures, new Assembler<>(futures, routing2));
}

From source file:com.facebook.buck.core.rulekey.calculator.ParallelRuleKeyCalculator.java

/**
 * @return a {@link ListenableFuture} wrapping the result of calculating the {@link RuleKey} of
 *     the given {@link BuildRule}.//w w  w  .  j  a va2 s  .  c o  m
 */
public synchronized ListenableFuture<T> calculate(BuckEventBus buckEventBus, BuildRule rule) {
    ListenableFuture<T> fromOurCache = ruleKeys.get(rule.getBuildTarget());
    if (fromOurCache != null) {
        return fromOurCache;
    }

    T fromInternalCache = ruleKeyFactory.getFromCache(rule);
    if (fromInternalCache != null) {
        ListenableFuture<T> future = Futures.immediateFuture(fromInternalCache);
        // Record the rule key future.
        ruleKeys.put(rule.getBuildTarget(), future);
        // Because a rule key will be invalidated from the internal cache any time one of its
        // dependents is invalidated, we know that all of our transitive deps are also in cache.
        return future;
    }

    // Grab all the dependency rule key futures.  Since our rule key calculation depends on this
    // one, we need to wait for them to complete.
    ListenableFuture<List<T>> depKeys = Futures.transformAsync(Futures.immediateFuture(ruleDepsCache.get(rule)),
            (@Nonnull SortedSet<BuildRule> deps) -> {
                List<ListenableFuture<T>> depKeys1 = new ArrayList<>(
                        SortedSets.sizeEstimate(rule.getBuildDeps()));
                for (BuildRule dep : deps) {
                    depKeys1.add(calculate(buckEventBus, dep));
                }
                return Futures.allAsList(depKeys1);
            }, service);

    // Setup a future to calculate this rule key once the dependencies have been calculated.
    ListenableFuture<T> calculated = Futures.transform(depKeys, (List<T> input) -> {
        try (Scope scope = ruleKeyCalculationScope.apply(buckEventBus, rule)) {
            return ruleKeyFactory.build(rule);
        } catch (Exception e) {
            throw new BuckUncheckedExecutionException(e, String.format("When computing rulekey for %s.", rule));
        }
    }, service);

    // Record the rule key future.
    ruleKeys.put(rule.getBuildTarget(), calculated);
    return calculated;
}

From source file:io.crate.action.sql.TransportSQLBulkAction.java

@Override
void executePlan(Executor executor, Analysis analysis, Plan plan,
        final ActionListener<SQLBulkResponse> listener, final SQLBulkRequest request, final long startTime) {
    if (!analysis.expectsAffectedRows()) {
        listener.onFailure(new UnsupportedOperationException(
                "Bulk operations for statements that return result sets is not supported"));
        return;/*from   w  w  w  . jav  a  2s  .c o m*/
    }

    ListenableFuture<List<TaskResult>> future = Futures.allAsList(executor.executeBulk(plan));
    Futures.addCallback(future, new FutureCallback<List<TaskResult>>() {
        @Override
        public void onSuccess(@Nullable List<TaskResult> result) {
            listener.onResponse(createResponse(result, startTime));
        }

        @Override
        public void onFailure(@Nonnull Throwable t) {
            listener.onFailure(t);
        }
    });
}

From source file:org.apache.twill.internal.ServiceMain.java

protected final void doMain(final Service mainService, Service... prerequisites)
        throws ExecutionException, InterruptedException {
    if (Boolean.parseBoolean(System.getProperty("twill.disable.kafka"))) {
        LOG.info("Log collection through kafka disabled");
    } else {//from w  w  w  .j  ava 2 s  .  c  om
        configureLogger();
    }

    Service requiredServices = new CompositeService(prerequisites);
    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override
        public void run() {
            mainService.stopAndWait();
        }
    });

    // Listener for state changes of the service
    ListenableFuture<Service.State> completion = Services.getCompletionFuture(mainService);
    Throwable initFailure = null;

    try {
        try {
            // Starts the service
            LOG.info("Starting service {}.", mainService);
            Futures.allAsList(Services.chainStart(requiredServices, mainService).get()).get();
            LOG.info("Service {} started.", mainService);
        } catch (Throwable t) {
            LOG.error("Exception when starting service {}.", mainService, t);
            initFailure = t;
        }

        try {
            if (initFailure == null) {
                completion.get();
                LOG.info("Service {} completed.", mainService);
            }
        } catch (Throwable t) {
            LOG.error("Exception thrown from service {}.", mainService, t);
            throw Throwables.propagate(t);
        }
    } finally {
        requiredServices.stopAndWait();

        ILoggerFactory loggerFactory = LoggerFactory.getILoggerFactory();
        if (loggerFactory instanceof LoggerContext) {
            ((LoggerContext) loggerFactory).stop();
        }

        if (initFailure != null) {
            // Exit with the init fail exit code.
            System.exit(ContainerExitCodes.INIT_FAILED);
        }
    }
}

From source file:org.thingsboard.server.dao.timeseries.BaseTimeseriesDao.java

@Override
public ListenableFuture<List<TsKvEntry>> findAllAsync(String entityType, UUID entityId,
        List<TsKvQuery> queries) {
    List<ListenableFuture<List<TsKvEntry>>> futures = queries.stream()
            .map(query -> findAllAsync(entityType, entityId, query)).collect(Collectors.toList());
    return Futures.transform(Futures.allAsList(futures),
            new Function<List<List<TsKvEntry>>, List<TsKvEntry>>() {
                @Nullable//from  w  ww .  ja  v  a 2 s  .  c o  m
                @Override
                public List<TsKvEntry> apply(@Nullable List<List<TsKvEntry>> results) {
                    List<TsKvEntry> result = new ArrayList<TsKvEntry>();
                    results.forEach(r -> result.addAll(r));
                    return result;
                }
            }, readResultsProcessingExecutor);
}

From source file:io.druid.query.ChainedExecutionQueryRunner.java

@Override
public Sequence<T> run(final Query<T> query, final Map<String, Object> responseContext) {
    final int priority = query.getContextPriority(0);

    return new BaseSequence<T, Iterator<T>>(new BaseSequence.IteratorMaker<T, Iterator<T>>() {
        @Override//  w w  w.jav a  2s .  c o m
        public Iterator<T> make() {
            // Make it a List<> to materialize all of the values (so that it will submit everything to the executor)
            ListenableFuture<List<Iterable<T>>> futures = Futures.allAsList(Lists.newArrayList(Iterables
                    .transform(queryables, new Function<QueryRunner<T>, ListenableFuture<Iterable<T>>>() {
                        @Override
                        public ListenableFuture<Iterable<T>> apply(final QueryRunner<T> input) {
                            if (input == null) {
                                throw new ISE(
                                        "Null queryRunner! Looks to be some segment unmapping action happening");
                            }

                            return exec.submit(new AbstractPrioritizedCallable<Iterable<T>>(priority) {
                                @Override
                                public Iterable<T> call() throws Exception {
                                    try {
                                        Sequence<T> result = input.run(query, responseContext);
                                        if (result == null) {
                                            throw new ISE("Got a null result! Segments are missing!");
                                        }

                                        List<T> retVal = Sequences.toList(result, Lists.<T>newArrayList());
                                        if (retVal == null) {
                                            throw new ISE("Got a null list of results! WTF?!");
                                        }

                                        return retVal;
                                    } catch (QueryInterruptedException e) {
                                        throw Throwables.propagate(e);
                                    } catch (Exception e) {
                                        log.error(e, "Exception with one of the sequences!");
                                        throw Throwables.propagate(e);
                                    }
                                }
                            });
                        }
                    })));

            queryWatcher.registerQuery(query, futures);

            try {
                final Number timeout = query.getContextValue(QueryContextKeys.TIMEOUT, (Number) null);
                return new MergeIterable<>(ordering.nullsFirst(), timeout == null ? futures.get()
                        : futures.get(timeout.longValue(), TimeUnit.MILLISECONDS)).iterator();
            } catch (InterruptedException e) {
                log.warn(e, "Query interrupted, cancelling pending results, query id [%s]", query.getId());
                futures.cancel(true);
                throw new QueryInterruptedException("Query interrupted");
            } catch (CancellationException e) {
                throw new QueryInterruptedException("Query cancelled");
            } catch (TimeoutException e) {
                log.info("Query timeout, cancelling pending results for query id [%s]", query.getId());
                futures.cancel(true);
                throw new QueryInterruptedException("Query timeout");
            } catch (ExecutionException e) {
                throw Throwables.propagate(e.getCause());
            }
        }

        @Override
        public void cleanup(Iterator<T> tIterator) {

        }
    });
}

From source file:com.flipkart.hydra.task.entities.WrapperCallable.java

protected Object getResponsesForList() throws Exception {
    List list = (List) loopVar;

    Map<Integer, ListenableFuture<Object>> futureMap = new HashMap<>();
    for (int i = 0; i < list.size(); i++) {
        ListenableFuture<Object> future = getFuture(i, list.get(i));
        futureMap.put(i, future);//from   ww  w  .j a v a 2 s.  co m
    }

    ListenableFuture<List<Object>> compositeFuture = Futures.allAsList(futureMap.values());
    compositeFuture.get();

    List<Object> responsesList = new ArrayList<>();
    for (int i = 0; i < list.size(); i++) {
        responsesList.add(futureMap.get(i));
    }

    return responsesList;
}

From source file:io.druid.query.GroupByMergedQueryRunner.java

@Override
public Sequence<T> run(final Query<T> queryParam, final Map<String, Object> responseContext) {
    final GroupByQuery query = (GroupByQuery) queryParam;
    final GroupByQueryConfig querySpecificConfig = configSupplier.get().withOverrides(query);
    final boolean isSingleThreaded = querySpecificConfig.isSingleThreaded();
    final Pair<IncrementalIndex, Accumulator<IncrementalIndex, T>> indexAccumulatorPair = GroupByQueryHelper
            .createIndexAccumulatorPair(query, querySpecificConfig, bufferPool);
    final Pair<Queue, Accumulator<Queue, T>> bySegmentAccumulatorPair = GroupByQueryHelper
            .createBySegmentAccumulatorPair();
    final boolean bySegment = BaseQuery.getContextBySegment(query, false);
    final int priority = BaseQuery.getContextPriority(query, 0);

    ListenableFuture<List<Void>> futures = Futures.allAsList(Lists.newArrayList(
            Iterables.transform(queryables, new Function<QueryRunner<T>, ListenableFuture<Void>>() {
                @Override//from  www  .j ava  2  s  . c o  m
                public ListenableFuture<Void> apply(final QueryRunner<T> input) {
                    if (input == null) {
                        throw new ISE("Null queryRunner! Looks to be some segment unmapping action happening");
                    }

                    ListenableFuture<Void> future = exec
                            .submit(new AbstractPrioritizedCallable<Void>(priority) {
                                @Override
                                public Void call() throws Exception {
                                    try {
                                        if (bySegment) {
                                            input.run(queryParam, responseContext).accumulate(
                                                    bySegmentAccumulatorPair.lhs, bySegmentAccumulatorPair.rhs);
                                        } else {
                                            input.run(queryParam, responseContext).accumulate(
                                                    indexAccumulatorPair.lhs, indexAccumulatorPair.rhs);
                                        }

                                        return null;
                                    } catch (QueryInterruptedException e) {
                                        throw Throwables.propagate(e);
                                    } catch (Exception e) {
                                        log.error(e, "Exception with one of the sequences!");
                                        throw Throwables.propagate(e);
                                    }
                                }
                            });

                    if (isSingleThreaded) {
                        waitForFutureCompletion(query, future, indexAccumulatorPair.lhs);
                    }

                    return future;
                }
            })));

    if (!isSingleThreaded) {
        waitForFutureCompletion(query, futures, indexAccumulatorPair.lhs);
    }

    if (bySegment) {
        return Sequences.simple(bySegmentAccumulatorPair.lhs);
    }

    return new ResourceClosingSequence<T>(Sequences.simple(Iterables.transform(
            indexAccumulatorPair.lhs.iterableWithPostAggregations(null, query.isDescending()),
            new Function<Row, T>() {
                @Override
                public T apply(Row input) {
                    return (T) input;
                }
            })), indexAccumulatorPair.lhs);
}

From source file:io.crate.action.job.TransportJobAction.java

@Override
public void nodeOperation(final JobRequest request, final ActionListener<JobResponse> actionListener) {
    JobExecutionContext.Builder contextBuilder = jobContextService.newBuilder(request.jobId(),
            request.coordinatorNodeId());

    SharedShardContexts sharedShardContexts = new SharedShardContexts(indicesService);
    List<ListenableFuture<Bucket>> directResponseFutures = contextPreparer
            .prepareOnRemote(request.nodeOperations(), contextBuilder, sharedShardContexts);

    try {/*from ww w  .j a  va 2 s . co m*/
        JobExecutionContext context = jobContextService.createContext(contextBuilder);
        context.start();
    } catch (Throwable t) {
        actionListener.onFailure(t);
        return;
    }

    if (directResponseFutures.size() == 0) {
        actionListener.onResponse(new JobResponse());
    } else {
        Futures.addCallback(Futures.allAsList(directResponseFutures), new FutureCallback<List<Bucket>>() {
            @Override
            public void onSuccess(List<Bucket> buckets) {
                actionListener.onResponse(new JobResponse(buckets));
            }

            @Override
            public void onFailure(@Nonnull Throwable t) {
                actionListener.onFailure(t);
            }
        });
    }
}

From source file:org.thingsboard.server.dao.attributes.CassandraBaseAttributesDao.java

@Override
public ListenableFuture<List<AttributeKvEntry>> find(TenantId tenantId, EntityId entityId, String attributeType,
        Collection<String> attributeKeys) {
    List<ListenableFuture<Optional<AttributeKvEntry>>> entries = new ArrayList<>();
    attributeKeys.forEach(attributeKey -> entries.add(find(tenantId, entityId, attributeType, attributeKey)));
    return Futures.transform(Futures.allAsList(entries),
            (Function<List<Optional<AttributeKvEntry>>, ? extends List<AttributeKvEntry>>) input -> {
                List<AttributeKvEntry> result = new ArrayList<>();
                input.stream().filter(opt -> opt.isPresent()).forEach(opt -> result.add(opt.get()));
                return result;
            }, readResultsProcessingExecutor);
}