Example usage for java.util.concurrent CompletableFuture get

List of usage examples for java.util.concurrent CompletableFuture get

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture get.

Prototype

@SuppressWarnings("unchecked")
public T get() throws InterruptedException, ExecutionException 

Source Link

Document

Waits if necessary for this future to complete, and then returns its result.

Usage

From source file:org.apache.flink.runtime.rest.RestServerEndpointITCase.java

/**
 * Tests that a bad handler request (HandlerRequest cannot be created) is reported as a BAD_REQUEST
 * and not an internal server error.//from w  w  w  .ja  va 2 s  . c  om
 *
 * <p>See FLINK-7663
 */
@Test
public void testBadHandlerRequest() throws Exception {
    final FaultyTestParameters parameters = new FaultyTestParameters();

    parameters.faultyJobIDPathParameter.resolve(PATH_JOB_ID);
    ((TestParameters) parameters).jobIDQueryParameter.resolve(Collections.singletonList(QUERY_JOB_ID));

    CompletableFuture<TestResponse> response = restClient.sendRequest(serverAddress.getHostName(),
            serverAddress.getPort(), new TestHeaders(), parameters, new TestRequest(2));

    try {
        response.get();

        fail("The request should fail with a bad request return code.");
    } catch (ExecutionException ee) {
        Throwable t = ExceptionUtils.stripExecutionException(ee);

        assertTrue(t instanceof RestClientException);

        RestClientException rce = (RestClientException) t;

        assertEquals(HttpResponseStatus.BAD_REQUEST, rce.getHttpResponseStatus());
    }
}

From source file:io.pravega.client.stream.impl.ControllerImplTest.java

@Test
public void testParallelCreateStream() throws Exception {
    final ExecutorService executorService = Executors.newFixedThreadPool(10);
    Semaphore createCount = new Semaphore(-19);
    AtomicBoolean success = new AtomicBoolean(true);
    for (int i = 0; i < 10; i++) {
        executorService.submit(() -> {
            for (int j = 0; j < 2; j++) {
                try {
                    CompletableFuture<Boolean> createStreamStatus;
                    createStreamStatus = controllerClient
                            .createStream(StreamConfiguration.builder().streamName("streamparallel")
                                    .scope("scope1").scalingPolicy(ScalingPolicy.fixed(1)).build());
                    log.info("{}", createStreamStatus.get());
                    assertTrue(createStreamStatus.get());
                    createCount.release();
                } catch (Exception e) {
                    log.error("Exception when creating stream: {}", e);

                    // Don't wait for other threads to complete.
                    success.set(false);//from   w  w w  . jav  a2 s .  co  m
                    createCount.release(20);
                }
            }
        });
    }
    createCount.acquire();
    executorService.shutdownNow();
    assertTrue(success.get());
}

From source file:org.apache.flink.client.cli.CliFrontend.java

protected void modify(String[] args) throws CliArgsException, FlinkException {
    LOG.info("Running 'modify' command.");

    final Options commandOptions = CliFrontendParser.getModifyOptions();

    final Options commandLineOptions = CliFrontendParser.mergeOptions(commandOptions, customCommandLineOptions);

    final CommandLine commandLine = CliFrontendParser.parse(commandLineOptions, args, false);

    if (commandLine.hasOption(HELP_OPTION.getOpt())) {
        CliFrontendParser.printHelpForModify(customCommandLines);
    }/* w  w w  .j  ava 2s .c  om*/

    final JobID jobId;
    final String[] modifyArgs = commandLine.getArgs();

    if (modifyArgs.length > 0) {
        jobId = parseJobId(modifyArgs[0]);
    } else {
        throw new CliArgsException("Missing JobId");
    }

    final int newParallelism;
    if (commandLine.hasOption(MODIFY_PARALLELISM_OPTION.getOpt())) {
        try {
            newParallelism = Integer.parseInt(commandLine.getOptionValue(MODIFY_PARALLELISM_OPTION.getOpt()));
        } catch (NumberFormatException e) {
            throw new CliArgsException("Could not parse the parallelism which is supposed to be an integer.",
                    e);
        }
    } else {
        throw new CliArgsException("Missing new parallelism.");
    }

    final CustomCommandLine<?> activeCommandLine = getActiveCustomCommandLine(commandLine);

    logAndSysout("Modify job " + jobId + '.');
    runClusterAction(activeCommandLine, commandLine, clusterClient -> {
        CompletableFuture<Acknowledge> rescaleFuture = clusterClient.rescaleJob(jobId, newParallelism);

        try {
            rescaleFuture.get();
        } catch (Exception e) {
            throw new FlinkException("Could not rescale job " + jobId + '.',
                    ExceptionUtils.stripExecutionException(e));
        }
        logAndSysout("Rescaled job " + jobId + ". Its new parallelism is " + newParallelism + '.');
    });
}

From source file:org.apache.flink.client.cli.CliFrontend.java

private <T> void listJobs(ClusterClient<T> clusterClient, boolean showRunning, boolean showScheduled,
        boolean showAll) throws FlinkException {
    Collection<JobStatusMessage> jobDetails;
    try {/*from   ww  w  . j a v  a  2s.c o  m*/
        CompletableFuture<Collection<JobStatusMessage>> jobDetailsFuture = clusterClient.listJobs();

        logAndSysout("Waiting for response...");
        jobDetails = jobDetailsFuture.get();

    } catch (Exception e) {
        Throwable cause = ExceptionUtils.stripExecutionException(e);
        throw new FlinkException("Failed to retrieve job list.", cause);
    }

    LOG.info("Successfully retrieved list of jobs");

    final List<JobStatusMessage> runningJobs = new ArrayList<>();
    final List<JobStatusMessage> scheduledJobs = new ArrayList<>();
    final List<JobStatusMessage> terminatedJobs = new ArrayList<>();
    jobDetails.forEach(details -> {
        if (details.getJobState() == JobStatus.CREATED) {
            scheduledJobs.add(details);
        } else if (!details.getJobState().isGloballyTerminalState()) {
            runningJobs.add(details);
        } else {
            terminatedJobs.add(details);
        }
    });

    if (showRunning || showAll) {
        if (runningJobs.size() == 0) {
            System.out.println("No running jobs.");
        } else {
            System.out.println("------------------ Running/Restarting Jobs -------------------");
            printJobStatusMessages(runningJobs);
            System.out.println("--------------------------------------------------------------");
        }
    }
    if (showScheduled || showAll) {
        if (scheduledJobs.size() == 0) {
            System.out.println("No scheduled jobs.");
        } else {
            System.out.println("----------------------- Scheduled Jobs -----------------------");
            printJobStatusMessages(scheduledJobs);
            System.out.println("--------------------------------------------------------------");
        }
    }
    if (showAll) {
        if (terminatedJobs.size() != 0) {
            System.out.println("---------------------- Terminated Jobs -----------------------");
            printJobStatusMessages(terminatedJobs);
            System.out.println("--------------------------------------------------------------");
        }
    }
}

From source file:org.apache.flink.client.cli.CliFrontend.java

/**
 * Sends a {@link org.apache.flink.runtime.messages.JobManagerMessages.TriggerSavepoint}
 * message to the job manager.//from ww w.  j a  v  a  2 s.c  o m
 */
private String triggerSavepoint(ClusterClient<?> clusterClient, JobID jobId, String savepointDirectory)
        throws FlinkException {
    logAndSysout("Triggering savepoint for job " + jobId + '.');
    CompletableFuture<String> savepointPathFuture = clusterClient.triggerSavepoint(jobId, savepointDirectory);

    logAndSysout("Waiting for response...");

    final String savepointPath;

    try {
        savepointPath = savepointPathFuture.get();
    } catch (Exception e) {
        Throwable cause = ExceptionUtils.stripExecutionException(e);
        throw new FlinkException("Triggering a savepoint for the job " + jobId + " failed.", cause);
    }

    logAndSysout("Savepoint completed. Path: " + savepointPath);
    logAndSysout("You can resume your program from this savepoint with the run command.");

    return savepointPath;
}

From source file:com.ikanow.aleph2.shared.crud.mongodb.services.TestMongoDbCrudService.java

@Test
public void testUpdateDocs() throws InterruptedException, ExecutionException {

    final MongoDbCrudService<UpdateTestBean, String> service = getTestService("testUpdateDocs",
            UpdateTestBean.class, String.class);

    // Build an object to modify
    final UpdateTestBean.NestedNestedTestBean to_update_nested_nested = new UpdateTestBean.NestedNestedTestBean();
    to_update_nested_nested.nested_nested_string_field = "nested_nested_string_field";
    final UpdateTestBean.NestedTestBean to_update_nested = new UpdateTestBean.NestedTestBean();
    to_update_nested.nested_string_list = Arrays.asList("nested_string_list1", "nested_string_list2");
    to_update_nested.nested_string_field = "nested_string_field";
    to_update_nested.nested_object = to_update_nested_nested;
    final UpdateTestBean.NestedTestBean to_update_nested2 = BeanTemplateUtils.clone(to_update_nested)
            .with("nested_string_field", "nested_string_field2").done();
    final UpdateTestBean to_update = new UpdateTestBean();
    to_update.string_field = "string_field";
    to_update.string_fields = Arrays.asList("string_fields1", "string_fields2");
    to_update.string_fields2 = Arrays.asList("string_fields2_1", "string_fields2_2");
    to_update.bool_field = true;//ww  w  . j  a  v a2  s.  c o m
    to_update.long_field = 1L;
    to_update.nested_list = Arrays.asList(to_update_nested, to_update_nested2);
    to_update.map = ImmutableMap.<String, String>builder().put("mapkey", "mapval").build();
    to_update.nested_object = to_update_nested;
    to_update._id = "test1";

    final CompletableFuture<Supplier<Object>> ret_val_0 = service.storeObject(to_update);
    ret_val_0.get(); // (just check it succeeded)

    // Update test object:

    // Test 1 - getter fields ... update this will error 

    final BeanTemplate<UpdateTestBean.NestedTestBean> nested1 = BeanTemplateUtils
            .build(UpdateTestBean.NestedTestBean.class).with("nested_string_field", "test1").done();

    // Lots of things break here: attempt to do any operations on nested_list.*, multiple atomic operations
    final UpdateComponent<UpdateTestBean> test1 = CrudUtils.update(UpdateTestBean.class)
            .add(UpdateTestBean::string_fields, "AA", false).increment(UpdateTestBean::long_field, 4)
            .nested(UpdateTestBean::nested_list,
                    CrudUtils.update(nested1).unset(UpdateTestBean.NestedTestBean::nested_string_field)
                            .remove(UpdateTestBean.NestedTestBean::nested_string_list,
                                    Arrays.asList("x", "y", "z"))
                            .add(UpdateTestBean.NestedTestBean::nested_string_list, "A", true))
            .unset(UpdateTestBean::bool_field).unset(UpdateTestBean::nested_object)
            .remove(UpdateTestBean::nested_list,
                    CrudUtils.allOf(UpdateTestBean.NestedTestBean.class).when("nested_string_field", "1")) //6)
    ;

    try {
        CompletableFuture<Boolean> ret_val_1 = service.updateObjectById("test1", test1);
        ret_val_1.get();
        assertFalse("Should have thrown an exception", true);
    } catch (Exception e) {
    } // (this is just tmep until I can get the update working)

    // TEST 2 - Same but will succeed

    final QueryComponent<UpdateTestBean> query2 = CrudUtils.allOf(UpdateTestBean.class).when("_id", "test1");

    final BeanTemplate<UpdateTestBean.NestedTestBean> nested2 = BeanTemplateUtils
            .build(UpdateTestBean.NestedTestBean.class).with("nested_string_field", "test1").done(); //(2)

    // Tested: addToSet (collection) add (single val), set, unset, nested, increment, pull
    //TODO: pullAll
    final UpdateComponent<UpdateTestBean> test2 = CrudUtils.update(UpdateTestBean.class)
            .add(UpdateTestBean::string_fields, Arrays.asList("AA", "string_fields1"), true)
            .increment(UpdateTestBean::long_field, 4)
            .nested(UpdateTestBean::nested_object,
                    CrudUtils.update(nested2)
                            .add(UpdateTestBean.NestedTestBean::nested_string_list, "A", false))
            .unset(UpdateTestBean::bool_field)
            .remove(UpdateTestBean::nested_list,
                    CrudUtils.allOf(UpdateTestBean.NestedTestBean.class).when("nested_string_field",
                            "nested_string_field"))
            .remove("string_fields2", Arrays.asList("XXX", "string_fields2_1"));

    //DEBUG
    //System.out.println(service._state.orig_coll.findOne().toString());
    //System.out.println(MongoDbUtils.createUpdateObject(test2));

    CompletableFuture<Boolean> ret_val_2 = service.updateObjectBySpec(query2, Optional.of(false), test2);
    assertTrue("update succeeded", ret_val_2.get());

    final String expected_2 = "{ \"_id\" : \"test1\" , \"string_field\" : \"string_field\" , \"string_fields\" : [ \"string_fields1\" , \"string_fields2\" , \"AA\"] , \"string_fields2\" : [ \"string_fields2_2\"] , \"long_field\" : 5 , \"nested_list\" : [ { \"nested_string_list\" : [ \"nested_string_list1\" , \"nested_string_list2\"] , \"nested_string_field\" : \"nested_string_field2\" , \"nested_object\" : { \"nested_nested_string_field\" : \"nested_nested_string_field\"}}] , \"map\" : { \"mapkey\" : \"mapval\"} , \"nested_object\" : { \"nested_string_list\" : [ \"nested_string_list1\" , \"nested_string_list2\" , \"A\"] , \"nested_string_field\" : \"test1\" , \"nested_object\" : { \"nested_nested_string_field\" : \"nested_nested_string_field\"}}}";

    assertEquals(1L, (long) service.countObjects().get());
    assertEquals(expected_2, service._state.orig_coll.findOne().toString());

    // Tests where no matching object is found

    // Fail

    final QueryComponent<UpdateTestBean> query3 = CrudUtils.allOf(UpdateTestBean.class).when("_id", "test2");

    CompletableFuture<Boolean> ret_val_3 = service.updateObjectBySpec(query3, Optional.of(false), test2);

    assertEquals(1L, (long) service.countObjects().get());
    assertFalse("update did nothing", ret_val_3.get());

    // Upsert

    CompletableFuture<Boolean> ret_val_4 = service.updateObjectBySpec(query3, Optional.of(true), test2);

    assertEquals(2L, (long) service.countObjects().get());
    assertTrue("update upserted", ret_val_4.get());

    // (clear out this object)
    if (null == this._real_mongodb_connection) { // (upsert doens't work properly in fongo)
        service.deleteObjectsBySpec(CrudUtils.allOf(UpdateTestBean.class).whenNot("_id", "test1"));
    } else {
        assertTrue("Delete corrupted object I just inserted", service.deleteObjectById("test2").get());
    }
    assertEquals(1L, (long) service.countObjects().get());

    // Multi updates:

    for (int i = 2; i < 10; ++i) {
        UpdateTestBean to_insert = BeanTemplateUtils.clone(to_update).with("_id", "test" + i).done();
        final CompletableFuture<Supplier<Object>> ret_val = service.storeObject(to_insert);
        ret_val.get(); // (just check it succeeded)
    }
    assertEquals(9L, (long) service.countObjects().get());

    final QueryComponent<UpdateTestBean> query5 = CrudUtils.allOf(UpdateTestBean.class).rangeAbove("_id",
            "test4", true);

    CompletableFuture<Long> ret_val_5 = service.updateObjectsBySpec(query5, Optional.of(false), test2);

    assertEquals(5L, (long) ret_val_5.get());

    // check one of the objects we updated was in fact updated
    assertEquals(expected_2.replace("\"_id\" : \"test1\"", "\"_id\" : \"test6\""),
            service._state.orig_coll.findOne(new BasicDBObject("_id", "test6")).toString());

}

From source file:org.apache.tinkerpop.gremlin.server.GremlinDriverIntegrateTest.java

@Test
public void shouldGetSomeThenSomeMore() throws Exception {
    final Cluster cluster = Cluster.open();
    final Client client = cluster.connect();

    final ResultSet results = client.submit("[1,2,3,4,5,6,7,8,9]");
    final CompletableFuture<List<Result>> batch1 = results.some(5);
    final CompletableFuture<List<Result>> batch2 = results.some(5);
    final CompletableFuture<List<Result>> batchNothingLeft = results.some(5);

    assertEquals(5, batch1.get().size());
    assertEquals(1, batch1.get().get(0).getInt());
    assertEquals(2, batch1.get().get(1).getInt());
    assertEquals(3, batch1.get().get(2).getInt());
    assertEquals(4, batch1.get().get(3).getInt());
    assertEquals(5, batch1.get().get(4).getInt());

    assertEquals(4, batch2.get().size());
    assertEquals(6, batch2.get().get(0).getInt());
    assertEquals(7, batch2.get().get(1).getInt());
    assertEquals(8, batch2.get().get(2).getInt());
    assertEquals(9, batch2.get().get(3).getInt());

    assertEquals(0, batchNothingLeft.get().size());

    cluster.close();// w w w  .  j av  a2s.c  om
}

From source file:org.apache.tinkerpop.gremlin.server.GremlinDriverIntegrateTest.java

@Test
public void shouldGetOneThenSomeThenSomeMore() throws Exception {
    final Cluster cluster = Cluster.open();
    final Client client = cluster.connect();

    final ResultSet results = client.submit("[1,2,3,4,5,6,7,8,9]");
    final Result one = results.one();
    final CompletableFuture<List<Result>> batch1 = results.some(4);
    final CompletableFuture<List<Result>> batch2 = results.some(5);
    final CompletableFuture<List<Result>> batchNothingLeft = results.some(5);

    assertEquals(1, one.getInt());//w  ww  .j av  a 2 s  . c o m

    assertEquals(4, batch1.get().size());
    assertEquals(2, batch1.get().get(0).getInt());
    assertEquals(3, batch1.get().get(1).getInt());
    assertEquals(4, batch1.get().get(2).getInt());
    assertEquals(5, batch1.get().get(3).getInt());

    assertEquals(4, batch2.get().size());
    assertEquals(6, batch2.get().get(0).getInt());
    assertEquals(7, batch2.get().get(1).getInt());
    assertEquals(8, batch2.get().get(2).getInt());
    assertEquals(9, batch2.get().get(3).getInt());

    assertEquals(0, batchNothingLeft.get().size());

    cluster.close();
}

From source file:org.venice.beachfront.bfapi.services.PiazzaService.java

@Async
public void execute(String serviceId, Algorithm algorithm, String userId, String jobId, Boolean computeMask,
        String jobName, CompletableFuture<Scene> sceneFuture, JobStatusCallback callback) {
    String piazzaJobUrl = String.format("%s/job", PIAZZA_URL);
    piazzaLogger.log(String.format(
            "Preparing to submit Execute Job request to Piazza at %s to Service ID %s by User %s.",
            piazzaJobUrl, serviceId, userId), Severity.INFORMATIONAL);

    // Ensure that the Scene has finished activating before proceeding with the Piazza execution.
    Scene scene = null;//from   ww w  .  j  av a 2s .c  o  m
    // capture when activation began
    DateTime activationStart = new DateTime();
    try {
        piazzaLogger.log(String.format("Waiting for Activation for Job %s", jobId), Severity.INFORMATIONAL);
        scene = sceneFuture.get();

        // calculate diff between now and when job started activation
        piazzaLogger.log(String.format(
                "Job %s Scene has been activated for Scene ID %s, Scene platorm: %s, completed activation in %d seconds",
                jobId, Scene.parseExternalId(scene.getSceneId()), Scene.parsePlatform(scene.getSceneId()),
                new Duration(activationStart, new DateTime()).getStandardSeconds()), Severity.INFORMATIONAL);
    } catch (InterruptedException | ExecutionException e) {
        piazzaLogger.log(String.format("Getting Active Scene failed for Job %s : %s", jobId, e.getMessage()),
                Severity.ERROR);
        callback.updateStatus(jobId, Job.STATUS_ERROR, "Activation timeout");

        // calculate diff between now and when job started activation
        piazzaLogger.log(
                String.format("Job %s failed activation in %d seconds.", jobId,
                        new Duration(activationStart, new DateTime()).getStandardSeconds()),
                Severity.INFORMATIONAL);

        return;
    }

    // Generate the Algorithm CLI
    // Formulate the URLs for the Scene
    List<String> fileNames;
    List<String> fileUrls;
    try {
        fileNames = sceneService.getSceneInputFileNames(scene);
        fileUrls = sceneService.getSceneInputURLs(scene);
    } catch (Exception exception) {
        exception.printStackTrace();
        piazzaLogger.log(
                String.format("Could not get Asset Information for Job %s: %s", jobId, exception.getMessage()),
                Severity.ERROR);
        callback.updateStatus(jobId, Job.STATUS_ERROR, "Scene metadata error");
        return;
    }

    // Prepare Job Request
    String algorithmCli = getAlgorithmCli(algorithm.getName(), fileNames,
            Scene.parsePlatform(scene.getSceneId()), computeMask);
    piazzaLogger.log(String.format("Generated CLI Command for Job %s (Scene %s) for User %s : %s", jobName,
            scene.getSceneId(), userId, algorithmCli), Severity.INFORMATIONAL);

    // Generate the Headers for Execution, including the API Key
    HttpHeaders headers = createPiazzaHeaders(PIAZZA_API_KEY);
    // Structure the Job Request
    String requestJson = null;
    try {
        // Add quotations to each element in the files lists, to ensure that JSON has the quotes after the
        // string-replace.
        List<String> quotedFileNames = new ArrayList<>();
        List<String> quotedFileUrls = new ArrayList<>();
        for (String fileName : fileNames) {
            quotedFileNames.add(String.format("\\\"%s\\\"", fileName));
        }
        for (String fileUrl : fileUrls) {
            quotedFileUrls.add(String.format("\\\"%s\\\"", fileUrl));
        }
        // Replace all user values into the execute request JSON template
        requestJson = String.format(loadJobRequestJson(), jobId, serviceId, algorithmCli,
                String.join(", ", quotedFileUrls), String.join(", ", quotedFileNames), userId);
    } catch (Exception exception) {
        exception.printStackTrace();
        piazzaLogger.log(String.format("Could not load local resource file for Job Request for Job %s", jobId),
                Severity.ERROR);
        callback.updateStatus(jobId, Job.STATUS_ERROR, "Error submitting job");
        return;
    }
    HttpEntity<String> request = new HttpEntity<>(requestJson, headers);

    // Execute the Request
    try {
        restTemplate.exchange(URI.create(piazzaJobUrl), HttpMethod.POST, request, String.class);
    } catch (HttpClientErrorException | HttpServerErrorException exception) {
        piazzaLogger.log(String.format(
                "Piazza Job Request by User %s has failed with Code %s and Error %s. The body of the request was: %s",
                userId, exception.getStatusText(), exception.getResponseBodyAsString(), requestJson),
                Severity.ERROR);
        callback.updateStatus(jobId, Job.STATUS_ERROR, "Error submiting job");
        return;
    }

    // Update the Status of the Job as Submitted
    callback.updateStatus(jobId, Job.STATUS_SUBMITTED, null);
    // Log the Successful execution
    piazzaLogger.log(
            String.format("Received successful response from Piazza for Job %s by User %s.", jobId, userId),
            Severity.INFORMATIONAL);
}