Example usage for java.util.concurrent CompletableFuture completedFuture

List of usage examples for java.util.concurrent CompletableFuture completedFuture

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture completedFuture.

Prototype

public static <U> CompletableFuture<U> completedFuture(U value) 

Source Link

Document

Returns a new CompletableFuture that is already completed with the given value.

Usage

From source file:com.ikanow.aleph2.example.flume_harvester.services.FlumeHarvestTechnology.java

/** Worker for starting a flume job or test
 * @param new_bucket - the bucket to start/test
 * @param context - the harvest context//w  w  w.java2  s. c  o  m
 * @param enabled - whether the job is enabled
 * @param test_mode - whether the job is being run as a test
 * @return
 */
public CompletableFuture<BasicMessageBean> onNewSource(DataBucketBean new_bucket, IHarvestContext context,
        boolean enabled, boolean test_mode) {
    //TODO (ALEPH-10): unit test for this 

    try {
        // Create an agent per config element:

        if (enabled) {
            final List<FlumeBucketConfigBean> agents = FlumeUtils.getAgents(new_bucket);

            @SuppressWarnings("unused")
            final int stopped = removeAgentConfigs(new_bucket, 1);

            final Tuple2<String, Boolean> delete_result = FlumeLaunchUtils
                    .killProcess(FlumeLaunchUtils.getPid(new_bucket));
            //(safe side, always kill - should fail harmlessly if px already dead....)

            final List<Tuple2<String, File>> agent_paths = StreamUtils
                    .zip(agents.stream(), Stream.iterate(1, i -> i + 1), (a, b) -> Tuples._2T(a, b))
                    .map(Lambdas.wrap_u(agent_index -> updateAgentConfig(agent_index._2(), new_bucket,
                            agent_index._1(), context, test_mode)))
                    .collect(Collectors.toList());

            final List<Tuple2<String, String>> err_pids = agent_paths.stream().map(
                    agent_path -> FlumeLaunchUtils.launchProcess(new_bucket, _globals, agent_path, context))
                    .collect(Collectors.toList());

            if (err_pids.isEmpty()) {
                return CompletableFuture
                        .completedFuture(ErrorUtils.buildErrorMessage(this.getClass().getSimpleName(),
                                "onNewSource", "Found no valid Flume configs " + delete_result._1()));
            } else {
                final Tuple2<String, String> err_pid = err_pids.get(0);
                if (null != err_pid._1()) {
                    return CompletableFuture
                            .completedFuture(ErrorUtils.buildErrorMessage(this.getClass().getSimpleName(),
                                    "onNewSource", "Bucket error: " + err_pid._1() + " " + delete_result._1()));
                } else {
                    return CompletableFuture.completedFuture(
                            ErrorUtils.buildSuccessMessage(this.getClass().getSimpleName(), "onNewSource",
                                    "Bucket launched: " + err_pid._2() + " " + delete_result._1()));

                }
            }
        } else {
            return CompletableFuture.completedFuture(ErrorUtils.buildSuccessMessage(
                    this.getClass().getSimpleName(), "onNewSource", "Created in suspended mode"));
        }
    } catch (Exception e) {
        _logger.error(ErrorUtils.getLongForm("onNewSource: Unknown error {0}", e));
        return FutureUtils.returnError(e);
    }
}

From source file:fi.hsl.parkandride.itest.RequestLogITest.java

private void concurrentlyGenerateLogs(int numberOfRequests, int numberOfUpdates) {
    withDate(DateTime.now().withTime(12, 2, 0, 0), () -> {
        final Stream<CompletableFuture<Integer>> statusCodes = range(0, numberOfRequests).parallel()
                .mapToObj(i -> {/* www  .  j av a  2  s .c om*/
                    final Response response = given().header(SOURCE_HEADER, WEB_UI_SOURCE).when()
                            .get(UrlSchema.CAPACITY_TYPES).thenReturn();
                    return CompletableFuture.completedFuture(response.statusCode());
                });

        final Stream<CompletableFuture<Integer>> updates = range(0, numberOfUpdates).parallel().mapToObj(i -> {
            batchingRequestLogService.updateRequestLogs();
            return CompletableFuture.completedFuture(0);
        });

        try {
            CompletableFuture.allOf(Stream.concat(statusCodes, updates).toArray(i -> new CompletableFuture[i]))
                    .get();
        } catch (InterruptedException | ExecutionException e) {
            e.printStackTrace();
            throw new AssertionFailedError(e.getMessage());
        }
    });
}

From source file:com.ikanow.aleph2.management_db.controllers.actors.BucketDeletionActor.java

/** Notifies the external harvester of the purge operation
 * @param bucket//from  w  w  w.j  a  va 2 s  .  co  m
 * @param retry_store
 * @return
 */
public static CompletableFuture<Collection<BasicMessageBean>> notifyHarvesterOfPurge(
        final DataBucketBean to_purge, final ICrudService<DataBucketStatusBean> status_store,
        final ICrudService<BucketActionRetryMessage> retry_store) {
    return status_store.getObjectBySpec(CrudUtils.allOf(DataBucketStatusBean.class)
            .when(DataBucketStatusBean::bucket_path, to_purge.full_name())).thenCompose(status -> {
                if (status.isPresent()) {
                    final BucketActionMessage.PurgeBucketActionMessage purge_msg = new BucketActionMessage.PurgeBucketActionMessage(
                            to_purge, new HashSet<String>(status.get().node_affinity()));
                    final CompletableFuture<Collection<BasicMessageBean>> management_results = MgmtCrudUtils
                            .applyRetriableManagementOperation(to_purge, ManagementDbActorContext.get(),
                                    retry_store, purge_msg,
                                    source -> new BucketActionMessage.PurgeBucketActionMessage(to_purge,
                                            new HashSet<String>(Arrays.asList(source))));
                    return management_results;
                } else {
                    return CompletableFuture.completedFuture(Arrays.asList(
                            new BasicMessageBean(new Date(), false, "CoreManagementDbService", "purgeBucket",
                                    null, "No bucket status for " + to_purge.full_name(), null)));
                }
            });
}

From source file:com.ikanow.aleph2.management_db.services.TestDataBucketCrudService_Create.java

@SuppressWarnings({ "deprecation", "unchecked" })
@Before//  w  w w. j a  v a 2s . c om
public void setup() throws Exception {
    ModuleUtils.disableTestInjection();

    // Here's the setup that Guice normally gives you....
    final String tmpdir = System.getProperty("java.io.tmpdir") + File.separator;
    _globals = new GlobalPropertiesBean(tmpdir, tmpdir, tmpdir, tmpdir);
    _storage_service = new MockHdfsStorageService(_globals);
    _mock_service_context = new MockServiceContext();
    _crud_factory = new MockMongoDbCrudServiceFactory();
    _underlying_db_service = new MockMongoDbManagementDbService(_crud_factory,
            new MongoDbManagementDbConfigBean(false), null, null, null, null);
    _core_distributed_services = new MockCoreDistributedServices();
    _mock_service_context.addGlobals(new GlobalPropertiesBean(null, null, null, null));
    _mock_service_context.addService(IManagementDbService.class, Optional.empty(), _underlying_db_service);
    _mock_service_context.addService(ICoreDistributedServices.class, Optional.empty(),
            _core_distributed_services);
    _mock_service_context.addService(IStorageService.class, Optional.empty(), _storage_service);
    _mock_service_context.addService(ISecurityService.class, Optional.empty(), new MockSecurityService());

    // Add a data warehouse service that returns a message in the onPublish call
    final IDataWarehouseService mock_data_warehouse_service = Mockito.mock(IDataWarehouseService.class);
    Mockito.when(mock_data_warehouse_service.validateSchema(Mockito.any(), Mockito.any()))
            .thenReturn(Tuples._2T("test", Collections.emptyList()));
    Mockito.when(mock_data_warehouse_service.onPublishOrUpdate(Mockito.any(), Mockito.any(),
            Mockito.anyBoolean(), Mockito.anySet(), Mockito.anySet()))
            .thenReturn(CompletableFuture
                    .completedFuture(Arrays.asList(ErrorUtils.buildErrorMessage("TEST", "TEST", "TEST"))));
    _mock_service_context.addService(IDataWarehouseService.class, Optional.empty(),
            mock_data_warehouse_service);

    _db_actor_context = new ManagementDbActorContext(_mock_service_context, true);

    _bucket_crud = new DataBucketCrudService(_mock_service_context, _db_actor_context);
    _bucket_status_crud = new DataBucketStatusCrudService(_mock_service_context, _db_actor_context);
    _shared_library_crud = new SharedLibraryCrudService(_mock_service_context);
    _core_db_service = new CoreManagementDbService(_mock_service_context, _bucket_crud, _bucket_status_crud,
            _shared_library_crud, _db_actor_context);

    _mock_service_context.addService(IManagementDbService.class, IManagementDbService.CORE_MANAGEMENT_DB,
            _core_db_service);

    _bucket_crud.initialize();
    _bucket_status_crud.initialize();
    _underlying_bucket_crud = _bucket_crud._underlying_data_bucket_db.get();
    _underlying_bucket_status_crud = _bucket_crud._underlying_data_bucket_status_db.get();
    _bucket_action_retry_store = _bucket_crud._bucket_action_retry_store.get();
}

From source file:com.ikanow.aleph2.graph.titan.services.TitanGraphService.java

@Override
public CompletableFuture<Collection<BasicMessageBean>> onPublishOrUpdate(DataBucketBean bucket,
        Optional<DataBucketBean> old_bucket, boolean suspended, Set<String> data_services,
        Set<String> previous_data_services) {
    try {/* w  w  w .ja va 2  s .co  m*/
        if (data_services.contains(DataSchemaBean.GraphSchemaBean.name)) {
            return createIndices(bucket, _titan, _USE_ES_FOR_DEDUP_INDEXES);
        } else {
            return CompletableFuture.completedFuture(Collections.emptyList());
        }
    } catch (Throwable t) {
        return CompletableFuture
                .completedFuture(Arrays.asList(ErrorUtils.buildErrorMessage(this.getClass().getSimpleName(),
                        "onPublishOrUpdate", ErrorUtils.getLongForm("{0}", t))));
    }
}

From source file:io.pravega.controller.server.ControllerService.java

public CompletableFuture<NodeUri> getURI(final SegmentId segment) {
    Preconditions.checkNotNull(segment, "segment");
    return CompletableFuture.completedFuture(segmentHelper.getSegmentUri(segment.getStreamInfo().getScope(),
            segment.getStreamInfo().getStream(), segment.getSegmentNumber(), hostStore));
}

From source file:com.ikanow.aleph2.data_model.interfaces.shared_services.MockManagementCrudService.java

@Override
public ManagementFuture<Boolean> updateObjectBySpec(QueryComponent<T> unique_spec, Optional<Boolean> upsert,
        UpdateComponent<T> update) {
    return FutureUtils.createManagementFuture(CompletableFuture.completedFuture(true));
}

From source file:com.ikanow.aleph2.shared.crud.mongodb.services.MongoDbCrudService.java

public CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>> storeObjects(final List<O> new_objects,
        final boolean replace_if_present) {
    try {//w  w  w  .  ja va2  s  . c o  m
        //TODO (ALEPH-22): find a bulk store-and-overwrite
        final List<DBObject> l = new_objects.stream().map(o -> convertToBson(o)).collect(Collectors.toList());

        if (replace_if_present) {
            l.stream().forEach(o -> _state.orig_coll.save(o));

            return CompletableFuture.completedFuture(
                    Tuples._2T(() -> l.stream().map(o -> (Object) _state.coll.convertFromDbId(o.get(_ID)))
                            .collect(Collectors.toList()), () -> {
                                return Patterns.match(_state.orig_coll).<Long>andReturn()
                                        .otherwise(__ -> (long) l.size());
                            }));
        } else {
            final com.mongodb.WriteResult orig_result = _state.orig_coll.insert(l,
                    Patterns.match(_state.orig_coll).<InsertOptions>andReturn().when(FongoDBCollection.class,
                            () -> new InsertOptions().continueOnError(true).writeConcern(new WriteConcern()))
                            .otherwise(() -> new InsertOptions().continueOnError(true)));

            return CompletableFuture.completedFuture(
                    Tuples._2T(() -> l.stream().map(o -> (Object) _state.coll.convertFromDbId(o.get(_ID)))
                            .collect(Collectors.toList()), () -> {
                                return Patterns.match(_state.orig_coll).<Long>andReturn()
                                        .when(FongoDBCollection.class, () -> (Long) (long) orig_result.getN())
                                        .otherwise(__ -> (long) l.size());
                            }));
        }
    } catch (Exception e) {
        return FutureUtils.<Tuple2<Supplier<List<Object>>, Supplier<Long>>>returnError(e);
    }
}

From source file:io.pravega.controller.store.stream.InMemoryStream.java

@Override
CompletableFuture<Void> createIndexTableIfAbsent(Data<Integer> data) {
    Preconditions.checkNotNull(data);// www . j  ava  2 s.  co  m
    Preconditions.checkNotNull(data.getData());

    synchronized (lock) {
        if (indexTable == null) {
            indexTable = new Data<>(Arrays.copyOf(data.getData(), data.getData().length), 0);
        }
    }
    return CompletableFuture.completedFuture(null);
}

From source file:com.ikanow.aleph2.aleph2_rest_utils.DataStoreCrudService.java

@Override
public CompletableFuture<Cursor<FileDescriptor>> getObjectsBySpec(QueryComponent<FileDescriptor> spec) {
    try {//from w ww  .ja v  a  2 s  .  c  om
        return CompletableFuture
                .completedFuture(new DataStoreCursor(getFolderFilenames(output_directory, fileContext)));
    } catch (IllegalArgumentException | IOException e) {
        final CompletableFuture<Cursor<FileDescriptor>> fut = new CompletableFuture<Cursor<FileDescriptor>>();
        fut.completeExceptionally(e);
        return fut;
    }
}