Example usage for java.util.concurrent CompletableFuture completedFuture

List of usage examples for java.util.concurrent CompletableFuture completedFuture

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture completedFuture.

Prototype

public static <U> CompletableFuture<U> completedFuture(U value) 

Source Link

Document

Returns a new CompletableFuture that is already completed with the given value.

Usage

From source file:com.ikanow.aleph2.harvest.logstash.services.LogstashHarvestService.java

@Override
public CompletableFuture<BasicMessageBean> onUpdatedSource(DataBucketBean old_bucket, DataBucketBean new_bucket,
        boolean is_enabled, Optional<BucketDiffBean> diff, IHarvestContext context) {

    final LogstashBucketConfigBean config = Optionals.ofNullable(new_bucket.harvest_configs()).stream()
            .findFirst().map(cfg -> BeanTemplateUtils.from(cfg.config(), LogstashBucketConfigBean.class).get())
            .orElse(BeanTemplateUtils.build(LogstashBucketConfigBean.class).done().get());

    // Handle test case - use process utils to delete
    if (BucketUtils.isTestBucket(new_bucket)) {
        resetFilePointer(new_bucket, config, _globals.get());

        //kill/log
        final Tuple2<String, Boolean> kill_result = ProcessUtils.stopProcess(this.getClass().getSimpleName(),
                new_bucket, _global_propertes.get().local_root_dir() + LOCAL_RUN_DIR_SUFFIX, Optional.of(2));

        //log any output (don't crash if something goes wrong, this is just icing)
        try {//from   ww  w  .j  ava2  s  . c o m
            final String log_file = System.getProperty("java.io.tmpdir") + File.separator
                    + BucketUtils.getUniqueSignature(new_bucket.full_name(), Optional.empty());
            final File log_file_handle = new File(log_file);
            LogstashUtils.sendOutputToLogger(context.getLogger(Optional.empty()), Level.INFO, log_file_handle,
                    Optional.empty());
            log_file_handle.delete();
        } catch (Exception ex) {
            context.getLogger(Optional.empty()).log(Level.ERROR,
                    ErrorUtils.lazyBuildMessage(false, () -> this.getClass().getSimpleName(),
                            () -> "onUpdatedSource", () -> null,
                            () -> ErrorUtils.getLongForm("Error getting logstash test output: {0}", ex),
                            () -> Collections.emptyMap()));
        }

        return CompletableFuture.completedFuture(ErrorUtils.buildMessage(true, this.getClass().getSimpleName(),
                "Bucket suspended: {0}", kill_result._1()));
    } else {
        if (diff.map(bdb -> bdb.diffs().isEmpty()).orElse(false)) { // if nothing's changed then do nothing
            //TODO: longer term could do better here, eg we don't care unless data_schema or harvest_configs have changed, right?            
            return CompletableFuture.completedFuture(ErrorUtils.buildSuccessMessage(
                    this.getClass().getSimpleName(), "onUpdatedSource", "No change to bucket"));
        }
        if (is_enabled) {
            return CompletableFuture
                    .completedFuture(startOrUpdateLogstash(new_bucket, config, _globals.get(), context));
        } else { // Just stop
            //(this does nothing if the bucket isn't actually running)
            return CompletableFuture.completedFuture(stopLogstash(new_bucket, config, _globals.get()));
        }
    }
}

From source file:com.ikanow.aleph2.data_model.interfaces.shared_services.MockManagementCrudService.java

@Override
public ManagementFuture<com.ikanow.aleph2.data_model.interfaces.shared_services.ICrudService.Cursor<T>> getObjectsBySpec(
        QueryComponent<T> spec) {/*from   www  . j a va 2  s.  c  o m*/
    return FutureUtils.createManagementFuture(CompletableFuture.completedFuture(new MyCursor()));
}

From source file:io.pravega.controller.store.stream.InMemoryStream.java

@Override
CompletableFuture<Data<Integer>> getStateData() {
    synchronized (lock) {
        if (this.state == null) {
            return FutureHelpers
                    .failedFuture(StoreException.create(StoreException.Type.DATA_NOT_FOUND, getName()));
        }/*from w  w  w  .java  2  s. c om*/

        return CompletableFuture.completedFuture(copy(state));
    }
}

From source file:io.pravega.controller.task.Stream.StreamMetadataTasks.java

/**
 * Update stream's configuration./*  w ww .j  a  va  2  s . c  om*/
 *
 * @param scope      scope.
 * @param stream     stream name.
 * @param newConfig     modified stream configuration.
 * @param contextOpt optional context
 * @return update status.
 */
public CompletableFuture<UpdateStreamStatus.Status> updateStream(String scope, String stream,
        StreamConfiguration newConfig, OperationContext contextOpt) {
    final OperationContext context = contextOpt == null ? streamMetadataStore.createContext(scope, stream)
            : contextOpt;

    // 1. get configuration
    return streamMetadataStore.getConfigurationProperty(scope, stream, true, context, executor)
            .thenCompose(configProperty -> {
                // 2. post event to start update workflow
                if (!configProperty.isUpdating()) {
                    return writeEvent(new UpdateStreamEvent(scope, stream))
                            // 3. update new configuration in the store with updating flag = true
                            // if attempt to update fails, we bail out with no harm done
                            .thenCompose(x -> streamMetadataStore.startUpdateConfiguration(scope, stream,
                                    newConfig, context, executor))
                            // 4. wait for update to complete
                            .thenCompose(x -> checkDone(() -> isUpdated(scope, stream, newConfig, context))
                                    .thenApply(y -> UpdateStreamStatus.Status.SUCCESS));
                } else {
                    log.warn("Another update in progress for {}/{}", scope, stream);
                    return CompletableFuture.completedFuture(UpdateStreamStatus.Status.FAILURE);
                }
            }).exceptionally(ex -> {
                log.warn("Exception thrown in trying to update stream configuration {}", ex.getMessage());
                return handleUpdateStreamError(ex);
            });
}

From source file:io.pravega.controller.store.stream.ZKStreamMetadataStore.java

@Override
public CompletableFuture<Boolean> takeBucketOwnership(int bucket, String processId, Executor executor) {
    Preconditions.checkArgument(bucket < bucketCount);

    // try creating an ephemeral node
    String bucketPath = ZKPaths.makePath(ZKStoreHelper.BUCKET_OWNERSHIP_PATH, String.valueOf(bucket));

    return storeHelper.createEphemeralZNode(bucketPath, SerializationUtils.serialize(processId))
            .thenCompose(created -> {
                if (!created) {
                    // Note: data may disappear by the time we do a getData. Let exception be thrown from here
                    // so that caller may retry.
                    return storeHelper.getData(bucketPath).thenApply(
                            data -> (SerializationUtils.deserialize(data.getData())).equals(processId));
                } else {
                    return CompletableFuture.completedFuture(true);
                }/*from  w  w  w  .j  ava2 s  . c  o m*/
            });
}

From source file:com.ikanow.aleph2.data_model.interfaces.shared_services.MockManagementCrudService.java

@Override
public ManagementFuture<com.ikanow.aleph2.data_model.interfaces.shared_services.ICrudService.Cursor<T>> getObjectsBySpec(
        QueryComponent<T> spec, List<String> field_list, boolean include) {
    return FutureUtils.createManagementFuture(CompletableFuture.completedFuture(new MyCursor()));
}

From source file:io.pravega.controller.store.stream.InMemoryStream.java

@Override
CompletableFuture<Void> createSegmentTableIfAbsent(final Data<Integer> data) {
    synchronized (lock) {
        if (segmentTable == null) {
            segmentTable = new Data<>(data.getData(), 0);
        }/*  w  w w.j a v a2s.c  o m*/
    }

    return CompletableFuture.completedFuture(null);
}

From source file:io.atomix.cluster.messaging.impl.NettyMessagingService.java

@Override
public CompletableFuture<MessagingService> start() {
    getTlsParameters();//from w  w  w.j av  a2 s .c o m
    if (started.get()) {
        log.warn("Already running at local address: {}", localAddress);
        return CompletableFuture.completedFuture(this);
    }

    initEventLoopGroup();
    return startAcceptingConnections().thenRun(() -> {
        timeoutExecutor = Executors
                .newSingleThreadScheduledExecutor(namedThreads("netty-messaging-timeout-%d", log));
        timeoutFuture = timeoutExecutor.scheduleAtFixedRate(this::timeoutAllCallbacks, TIMEOUT_INTERVAL,
                TIMEOUT_INTERVAL, TimeUnit.MILLISECONDS);
        started.set(true);
        log.info("Started");
    }).thenApply(v -> this);
}

From source file:com.ikanow.aleph2.data_model.interfaces.shared_services.MockManagementCrudService.java

@Override
public ManagementFuture<Long> countObjectsBySpec(QueryComponent<T> spec) {
    return FutureUtils.createManagementFuture(CompletableFuture.completedFuture((long) _mutable_values.size()));
}

From source file:io.pravega.controller.store.stream.InMemoryStream.java

@Override
CompletableFuture<Data<Integer>> getSegmentTable() {
    synchronized (lock) {
        if (this.segmentTable == null) {
            return FutureHelpers
                    .failedFuture(StoreException.create(StoreException.Type.DATA_NOT_FOUND, getName()));
        }//from w  w w.  j a va  2s . c  o m

        return CompletableFuture.completedFuture(copy(this.segmentTable));
    }
}