Example usage for java.util.concurrent CompletableFuture thenCompose

List of usage examples for java.util.concurrent CompletableFuture thenCompose

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture thenCompose.

Prototype

public <U> CompletableFuture<U> thenCompose(Function<? super T, ? extends CompletionStage<U>> fn) 

Source Link

Usage

From source file:com.connio.sdk.example.ConnioAsyncClientExample.java

public static void main(String[] args) {
    try {//from w  w  w .j a  v  a  2 s  . c o m

        // Initialises the context with api key credentials
        Connio.init("_key_671901158138828071", "31acec81b2414b03acf3d8c37ebdf305");

        // Create device profile
        final CompletableFuture<DeviceProfile> deviceProfile = DeviceProfile.create("device_profile_sdk")
                .executeAsync();

        // On device profile creation we compose property creation using it
        final CompletableFuture<Property> property = deviceProfile.thenCompose((dp) -> {
            return dp.addProperty("numericProperty1", Property.Type.Number).setAccess(Property.Access.Public)
                    .executeAsync();
        });

        // On device profile and property creation we compose method creation using them
        final CompletableFuture<Method> method = deviceProfile
                .thenCombine(property, (dp, p) -> new ImmutablePair<>(dp, p))
                .thenCompose((deviceProfileAndProperty) -> {
                    final DeviceProfile dp = deviceProfileAndProperty.getLeft();
                    final Property prop = deviceProfileAndProperty.getRight();

                    final MethodImpl implementation = new MethodImpl.Builder("return value;",
                            MethodImpl.ExecType.Javascript).build();
                    return dp.addMethod("getter", Method.Access.Public, implementation).setInputId(prop.getId())
                            .executeAsync();
                });

        // On device profile and method creation we will create the device
        final CompletableFuture<Device> device = deviceProfile.thenCombine(method, (dp, m) -> dp)
                .thenCompose((dp) -> dp.addDevice().setStatus(Device.Status.Debug).executeAsync());

        // Write three data points
        device.thenCombine(property,
                (d, p) -> d.writeData(p, new DataFeed(new DataPoint.Builder(16.0).build())).executeAsync());
        device.thenCombine(property,
                (d, p) -> d.writeData(p, new DataFeed(new DataPoint.Builder(17.0).build())).executeAsync());
        device.thenCombine(property,
                (d, p) -> d.writeData(p, new DataFeed(new DataPoint.Builder(18.0).build())).executeAsync());

        // Retrieve getter method value
        CompletableFuture<Object> methodValue = device.thenCombine(method, (d, m) -> new ImmutablePair<>(d, m))
                .thenCompose((deviceAndMethod) -> {
                    final Device d = deviceAndMethod.getLeft();
                    final Method m = deviceAndMethod.getRight();

                    return d.readMethod(m).executeAsync();
                });

        // Get device state
        CompletableFuture<Optional<DeviceState>> deviceState = device.thenCompose(d -> d.state().fetchAsync());

        // Until this point there's no blocking whatsoever. Now we block until we get futures values to print
        // some information
        System.out.println("Device profile id: " + deviceProfile.get().getId());
        System.out.println("Property id: " + property.get().getId());
        System.out.println("Method id: " + method.get().getId());
        System.out.println("Device id: " + device.get().getId());
        System.out.println("Getter method value: " + methodValue.get());
        System.out.println("Device state: " + deviceState.get().get());

        Connio.terminate();

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.ikanow.aleph2.management_db.services.DataBucketStatusCrudService.java

/** Tries to distribute a request to listening data import managers to notify their harvesters that the bucket state has been updated
 * @param update_reply - the future reply to the find-and-update
 * @param suspended_predicate - takes the status bean (must exist at this point) and checks whether the bucket should be suspended
 * @param underlying_data_bucket_db - the data bucket bean db store
 * @param actor_context - actor context for distributing out requests
 * @param retry_store - the retry store for handling data import manager connectivity problems
 * @return a collection of success/error messages from either this function or the 
 *///from  w w w . j a  v a2  s .  co  m
private static <T> CompletableFuture<Collection<BasicMessageBean>> getOperationFuture(
        final CompletableFuture<Optional<DataBucketStatusBean>> update_reply,
        final Predicate<DataBucketStatusBean> suspended_predicate,
        final ICrudService<DataBucketBean> underlying_data_bucket_db,
        final ICrudService<DataBucketStatusBean> underlying_data_bucket_status_db,
        final ManagementDbActorContext actor_context,
        final ICrudService<BucketActionRetryMessage> retry_store) {
    return update_reply.thenCompose(sb -> {
        return sb.isPresent() ? underlying_data_bucket_db.getObjectById(sb.get()._id())
                : CompletableFuture.completedFuture(Optional.empty());
    }).thenCompose(bucket -> {
        if (!bucket.isPresent()) {
            return CompletableFuture.completedFuture(Arrays.asList(new BasicMessageBean(new Date(), // date
                    false, // success
                    IManagementDbService.CORE_MANAGEMENT_DB.get(),
                    BucketActionMessage.UpdateBucketActionMessage.class.getSimpleName(), null, // message code
                    ErrorUtils.get(ManagementDbErrorUtils.MISSING_STATUS_BEAN_OR_BUCKET,
                            update_reply.join().map(s -> s._id()).orElse("(unknown)")),
                    null) // details                  
            ));
        } else { // If we're here we've retrieved both the bucket and bucket status, so we're good to go
            final DataBucketStatusBean status_bean = update_reply.join().get();
            // (as above, if we're here then must exist)

            // Once we have the bucket, issue the update command
            final BucketActionMessage.UpdateBucketActionMessage update_message = new BucketActionMessage.UpdateBucketActionMessage(
                    bucket.get(), suspended_predicate.test(status_bean), // (ie user picks whether to suspend or unsuspend here)
                    bucket.get(), new HashSet<String>(
                            Optional.ofNullable(status_bean.node_affinity()).orElse(Collections.emptyList())));

            // Collect message and handle retries

            final CompletableFuture<Collection<BasicMessageBean>> management_results = MgmtCrudUtils
                    .applyRetriableManagementOperation(bucket.get(), actor_context, retry_store, update_message,
                            source -> {
                                return new BucketActionMessage.UpdateBucketActionMessage(
                                        update_message.bucket(), status_bean.suspended(),
                                        update_message.bucket(), new HashSet<String>(Arrays.asList(source)));
                            });

            return MgmtCrudUtils.handleUpdatingStatus(bucket.get(), status_bean,
                    suspended_predicate.test(status_bean), management_results,
                    underlying_data_bucket_status_db);
        }
    });
}

From source file:com.redhat.coolstore.api_gateway.ApiGatewayController.java

/**
 * This /api REST endpoint uses Java 8 concurrency to call two backend services to construct the result
 *
 * @return the list//  www.  j a va  2s . co m
 */
@CrossOrigin(maxAge = 3600)
@RequestMapping(method = RequestMethod.GET, value = "/products", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation("Get a list of products")
@ResponseBody
public List<Product> list() throws ExecutionException, InterruptedException {

    final CompletableFuture<List<Product>> productList = CompletableFuture
            .supplyAsync(() -> feignClientFactory.getPricingClient().getService().getProducts(), es);

    return productList.thenCompose((List<Product> products) -> {

        List<CompletableFuture<Product>> all = products.stream()
                .map(p -> productList.thenCombine(_getInventory(p.itemId), (pl, a) -> {
                    p.availability = a;
                    return p;
                })).collect(Collectors.toList());

        return CompletableFuture.allOf(all.toArray(new CompletableFuture[all.size()]))
                .thenApply(v -> all.stream().map(CompletableFuture::join).collect(Collectors.toList()));
    }).get();

}

From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_PurgeBuckets.java

/** Top level logic for source synchronization
 * @param bucket_mgmt/*from ww  w.  jav a 2  s .co  m*/
 * @param source_db
 */
protected CompletableFuture<Void> synchronizePurgeSources(
        final IManagementCrudService<DataBucketBean> bucket_mgmt,
        final IManagementCrudService<DataBucketStatusBean> underlying_bucket_status_mgmt,
        final ICrudService<PurgeQueueBean> source_purge_db) {
    //_logger.debug("Starting a sync purge sources cycle");
    //      final List<CompletableFuture<?>> new_results = new ArrayList<CompletableFuture<?>>(); // (not used for synchronization - in a single)
    final List<CompletableFuture<?>> purge_results = new ArrayList<CompletableFuture<?>>();

    final CompletableFuture<List<PurgeQueueBean>> future_purge_sources = getAllPurgeSources(source_purge_db);

    //check for entries in test db
    return future_purge_sources.thenCompose(purge_sources -> {
        //_logger.debug("Got test sources successfully, looping over any results");
        purge_sources.forEach(Lambdas.wrap_consumer_u(purge_source -> {
            _logger.debug("Looking at purge source: " + purge_source._id());

            final DataBucketBean to_purge = Lambdas
                    .wrap_u(() -> IkanowV1SyncService_Buckets.getBucketFromV1Source(purge_source.source()))
                    .get();
            //always try to purge the source we pulled
            purge_results.add(handlePurgeSource(to_purge, purge_source));
        }));
        //         if (existing_results.isEmpty()) { // Make sure at least that we don't start a new thread until we've got all the tests from the previous sources
        //            existing_results.add(future_purge_sources);
        //         }
        //_logger.debug("done looping over test sources");
        //combine response of new and old entries, return
        List<CompletableFuture<?>> retval = Arrays.asList(purge_results).stream() // potentially block on existing results but not new tests 'cos that can take ages
                .flatMap(l -> l.stream()).collect(Collectors.toList());
        return CompletableFuture.allOf(retval.toArray(new CompletableFuture[0]));
    });
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_PurgeBuckets.java

/**
 * Returns back all purge sources that aren't marked as complete or errored,
 * deletes everything that was pulled back
 * // w  ww.  j a v  a  2 s .c o  m
 * @param source_test_db
 * @return
 */
protected CompletableFuture<List<PurgeQueueBean>> getAllPurgeSources(
        final ICrudService<PurgeQueueBean> source_test_db) {
    final QueryComponent<PurgeQueueBean> get_query = CrudUtils.allOf(PurgeQueueBean.class)
            .whenNot(PurgeQueueBean::status, PurgeStatus.complete)
            .whenNot(PurgeQueueBean::status, PurgeStatus.error); //can be complete | error | in_progress | submitted | {unset/anything else}

    final CompletableFuture<List<PurgeQueueBean>> get_command = source_test_db.getObjectsBySpec(get_query)
            .thenApply(c -> StreamSupport.stream(c.spliterator(), false).collect(Collectors.toList()));

    return get_command.thenCompose(__ -> {
        return source_test_db.deleteObjectBySpec(get_query);
    }).thenApply(__ -> get_command.join()); // (ie return the original command but only once the update has completed)
}

From source file:io.pravega.controller.store.stream.AbstractStreamMetadataStore.java

@Override
public CompletableFuture<TxnStatus> commitTransaction(final String scope, final String streamName,
        final int epoch, final UUID txId, final OperationContext context, final Executor executor) {
    Stream stream = getStream(scope, streamName, context);
    CompletableFuture<TxnStatus> future = withCompletion(stream.commitTransaction(epoch, txId), executor);

    future.thenCompose(result -> {
        return stream.getNumberOfOngoingTransactions().thenAccept(count -> {
            DYNAMIC_LOGGER.incCounterValue(nameFromStream(COMMIT_TRANSACTION, scope, streamName), 1);
            DYNAMIC_LOGGER.reportGaugeValue(nameFromStream(OPEN_TRANSACTIONS, scope, streamName), count);
        });// w  ww . j  a v  a  2 s .  c om
    });

    return future;
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_TestBuckets.java

/** Top level logic for source synchronization
 * @param bucket_mgmt//from  w  w  w .j a v  a 2s . com
 * @param source_db
 */
protected CompletableFuture<Void> synchronizeTestSources(
        final IManagementCrudService<DataBucketBean> bucket_mgmt,
        final IManagementCrudService<DataBucketStatusBean> underlying_bucket_status_mgmt,
        final ICrudService<TestQueueBean> source_test_db, final BucketTestService bucket_test_service) {
    //_logger.debug("Starting a sync test sources cycle");
    final List<CompletableFuture<?>> new_results = new ArrayList<CompletableFuture<?>>(); // (not used for synchronization - in a single)
    final List<CompletableFuture<?>> existing_results = new ArrayList<CompletableFuture<?>>();

    final CompletableFuture<List<TestQueueBean>> future_test_sources = getAllTestSources(source_test_db);

    //check for entries in test db
    return future_test_sources.thenCompose(test_sources -> {
        //_logger.debug("Got test sources successfully, looping over any results");
        test_sources.forEach(Lambdas.wrap_consumer_u(test_source -> {
            _logger.debug("Looking at test source: " + test_source._id());
            try {
                final DataBucketBean to_test = Lambdas.wrap_u(() -> getBucketFromV1Source(test_source.source()))
                        .get();
                if (test_source.status() != null && (test_source.status() == TestStatus.in_progress
                        || test_source.status() == TestStatus.completed
                        || test_source.status() == TestStatus.error)
                //(test_source.status().equals("in_progress") || test_source.status().equals("completed") || test_source.status().equals("error"))
                ) {
                    existing_results.add(handleExistingTestSource(to_test, test_source, source_test_db));
                } else { // in progress...

                    _logger.debug("Found a new entry, setting up test");
                    new_results.add(
                            handleNewTestSource(to_test, test_source, bucket_test_service, source_test_db));
                }
            } catch (Exception ex) {
                final String error = ErrorUtils.getLongForm("error: {0}", ex);
                _logger.error("Error when checking test source: " + error);
                //turn off this test source
                updateTestSourceStatus(test_source._id(), TestStatus.error, source_test_db, Optional.empty(),
                        Optional.empty(), Optional.of(error)).join();
            }
        }));
        if (existing_results.isEmpty()) { // Make sure at least that we don't start a new thread until we've got all the tests from the previous sources
            existing_results.add(future_test_sources);
        }
        //_logger.debug("done looping over test sources");
        //combine response of new and old entries, return
        List<CompletableFuture<?>> retval = Arrays.asList(existing_results).stream() // potentially block on existing results but not new tests 'cos that can take ages
                .flatMap(l -> l.stream()).collect(Collectors.toList());
        return CompletableFuture.allOf(retval.toArray(new CompletableFuture[0]));
    });
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_TestBuckets.java

/**
 * Returns back all test sources that aren't marked as complete or errored
 * //  w ww. j a v  a  2s. c  om
 * @param source_test_db
 * @return
 */
protected CompletableFuture<List<TestQueueBean>> getAllTestSources(
        final ICrudService<TestQueueBean> source_test_db) {
    final QueryComponent<TestQueueBean> get_query = CrudUtils.allOf(TestQueueBean.class)
            .whenNot(TestQueueBean::status, TestStatus.completed)
            .whenNot(TestQueueBean::status, TestStatus.error); //can be complete | error | in_progress | submitted | {unset/anything else}

    final QueryComponent<TestQueueBean> update_query = CrudUtils.allOf(TestQueueBean.class)
            .whenNot(TestQueueBean::status, TestStatus.in_progress)
            .whenNot(TestQueueBean::status, TestStatus.completed)
            .whenNot(TestQueueBean::status, TestStatus.error); //can be complete | error | in_progress | submitted | {unset/anything else}

    final UpdateComponent<TestQueueBean> update_command = CrudUtils.update(TestQueueBean.class)
            .set(TestQueueBean::status, TestStatus.in_progress)
    // (don't set started_processing_on - only set that once the job has been launched)
    ;

    final CompletableFuture<List<TestQueueBean>> get_command = source_test_db.getObjectsBySpec(get_query)
            .thenApply(c -> StreamSupport.stream(c.spliterator(), false).collect(Collectors.toList()));

    return get_command.thenCompose(__ -> {
        return source_test_db.updateObjectsBySpec(update_query, Optional.of(false), update_command);
    }).thenApply(__ -> get_command.join()); // (ie return the original command but only once the update has completed)
}

From source file:io.pravega.controller.store.stream.AbstractStreamMetadataStore.java

@Override
public CompletableFuture<Void> scaleSegmentsSealed(final String scope, final String name,
        final List<Integer> sealedSegments, final List<Segment> newSegments, final int activeEpoch,
        final long scaleTimestamp, final OperationContext context, final Executor executor) {
    List<Integer> newSegmentNumbers = newSegments.stream().map(Segment::getNumber).collect(Collectors.toList());
    CompletableFuture<Void> future = withCompletion(getStream(scope, name, context)
            .scaleOldSegmentsSealed(sealedSegments, newSegmentNumbers, activeEpoch, scaleTimestamp), executor);
    final List<AbstractMap.SimpleEntry<Double, Double>> newRanges = newSegments.stream()
            .map(x -> new AbstractMap.SimpleEntry<>(x.getKeyStart(), x.getKeyEnd()))
            .collect(Collectors.toList());

    future.thenCompose(result -> CompletableFuture.allOf(
            getActiveSegments(scope, name, System.currentTimeMillis(), null, executor)
                    .thenAccept(list -> DYNAMIC_LOGGER
                            .reportGaugeValue(nameFromStream(SEGMENTS_COUNT, scope, name), list.size())),
            findNumSplitsMerges(scope, name, executor).thenAccept(simpleEntry -> {
                DYNAMIC_LOGGER.updateCounterValue(nameFromStream(SEGMENTS_SPLITS, scope, name),
                        simpleEntry.getKey());
                DYNAMIC_LOGGER.updateCounterValue(nameFromStream(SEGMENTS_MERGES, scope, name),
                        simpleEntry.getValue());
            })));// w w  w  . j  av a 2 s  .c o  m

    return future;
}

From source file:io.pravega.controller.store.stream.PersistentStreamBase.java

private CompletableFuture<List<Segment>> getSuccessorsForSegment(final int number) {
    val segmentFuture = getSegment(number);
    val indexTableFuture = getIndexTable();
    val historyTableFuture = getHistoryTable();
    CompletableFuture<Void> all = CompletableFuture.allOf(segmentFuture, indexTableFuture, historyTableFuture);

    return all.thenCompose(x -> {
        final Segment segment = segmentFuture.getNow(null);
        List<Integer> candidates = TableHelper.findSegmentSuccessorCandidates(segment,
                indexTableFuture.getNow(null).getData(), historyTableFuture.getNow(null).getData());
        return findOverlapping(segment, candidates);
    });/* w w  w  .  ja v  a2  s  .c  om*/
}