Example usage for java.util.concurrent CompletableFuture cancel

List of usage examples for java.util.concurrent CompletableFuture cancel

Introduction

In this page you can find the example usage for java.util.concurrent CompletableFuture cancel.

Prototype

public boolean cancel(boolean mayInterruptIfRunning) 

Source Link

Document

If not already completed, completes this CompletableFuture with a CancellationException .

Usage

From source file:opensnap.repository.MongoRepository.java

public CompletableFuture<T> getOne(String key, Object value) {
    CompletableFuture<T> future = new CompletableFuture<>();

    collection.find(new Document(key, value)).one().register((document, e) -> {
        try {// w  w w .jav a 2 s.c  om
            if (document != null) {
                future.complete(mapper.readValue(toJson(document), clazz));
            } else {
                logger.error("No document with attribute " + key + "=" + value + " found", e);
                future.cancel(true);
            }
        } catch (IOException ex) {
            logger.error("Error while parsing document in getOne() : " + document.toString(), ex);
            future.cancel(true);
        }
    });
    return future;
}

From source file:opensnap.repository.MongoRepository.java

public CompletableFuture<T> insert(T elem) {
    CompletableFuture<T> future = new CompletableFuture<>();
    try {/*from w w w .j  a  va 2  s  .  c o  m*/
        Document doc = Document.valueOf(mapper.writeValueAsString(elem));
        collection.insert(doc).register((result, e) -> {
            if (result != null && result.wasAcknowledged()) {
                elem.setId(doc.getObjectId("_id"));
                future.complete(elem);
            } else {
                logger.error("Error while creating a new document in insert() : " + doc.toString(), e);
                future.cancel(true);
            }
        });
    } catch (JsonProcessingException e) {
        logger.error("Error while creating element " + elem.toString() + " in insert()", e);
        future.cancel(true);
    }
    return future;
}

From source file:com.microsoft.azure.servicebus.samples.receiveloop.ReceiveLoop.java

public void run(String connectionString) throws Exception {

    QueueClient sendClient;/*from w  w w.java 2s  .  c  om*/
    IMessageReceiver receiver;
    CompletableFuture receiveTask;

    // Create a QueueClient instance using the connection string builder
    // We set the receive mode to "PeekLock", meaning the message is delivered
    // under a lock and must be acknowledged ("completed") to be removed from the queue

    sendClient = new QueueClient(new ConnectionStringBuilder(connectionString, "BasicQueue"),
            ReceiveMode.PEEKLOCK);
    this.sendMessagesAsync(sendClient).thenRunAsync(() -> sendClient.closeAsync());

    receiver = ClientFactory.createMessageReceiverFromConnectionStringBuilder(
            new ConnectionStringBuilder(connectionString, "BasicQueue"), ReceiveMode.PEEKLOCK);
    receiveTask = this.receiveMessagesAsync(receiver);

    waitForEnter(10);

    receiveTask.cancel(true);
    receiver.close();

    CompletableFuture.allOf(receiveTask.exceptionally(t -> {
        if (t instanceof CancellationException) {
            return null;
        }
        throw new RuntimeException((Throwable) t);
    })).join();

}

From source file:com.microsoft.azure.servicebus.samples.messagebrowse.MessageBrowse.java

public void run(String connectionString) throws Exception {

    QueueClient sendClient;/* w w  w .j  a  v  a  2  s  .  co  m*/
    IMessageReceiver receiver;
    CompletableFuture receiveTask;

    // Create a QueueClient instance using the connection string builder
    // We set the receive mode to "PeekLock", meaning the message is delivered
    // under a lock and must be acknowledged ("completed") to be removed from the queue

    sendClient = new QueueClient(new ConnectionStringBuilder(connectionString, "BasicQueue"),
            ReceiveMode.PEEKLOCK);
    this.sendMessagesAsync(sendClient).thenRunAsync(() -> sendClient.closeAsync());

    receiver = ClientFactory.createMessageReceiverFromConnectionStringBuilder(
            new ConnectionStringBuilder(connectionString, "BasicQueue"), ReceiveMode.PEEKLOCK);
    receiveTask = this.peekMessagesAsync(receiver);

    // wait for ENTER or 10 seconds elapsing
    waitForEnter(10);

    receiveTask.cancel(true);

    CompletableFuture.allOf(receiveTask.exceptionally(t -> {
        if (t instanceof CancellationException) {
            return null;
        }
        throw new RuntimeException((Throwable) t);
    }), receiver.closeAsync()).join();
}

From source file:com.microsoft.azure.servicebus.samples.timetolive.TimeToLive.java

public void run(String connectionString) throws Exception {

    IMessageSender sendClient;/*from  ww w . j a v  a  2s.c o m*/
    CompletableFuture<Void> receiveTask;
    CompletableFuture<Void> fixUpTask;

    // send messages
    sendClient = ClientFactory.createMessageSenderFromConnectionStringBuilder(
            new ConnectionStringBuilder(connectionString, "BasicQueue"));
    this.sendMessagesAsync(sendClient);

    // wait for all messages to expire
    Thread.sleep(15 * 1000);

    ExecutorService executorService = Executors.newCachedThreadPool();
    // start the receiver tasks and the fixup tasks
    receiveTask = this.receiveMessagesAsync(connectionString, "BasicQueue", executorService);
    fixUpTask = this.pickUpAndFixDeadLetters(connectionString, "BasicQueue", sendClient, executorService);

    // wait for ENTER or 10 seconds elapsing
    waitForEnter(10);

    // cancel the running tasks
    receiveTask.cancel(true);
    fixUpTask.cancel(true);

    // wait for the tasks to complete
    CompletableFuture.allOf(sendClient.closeAsync(), receiveTask.exceptionally(t -> {
        if (t instanceof CancellationException) {
            return null;
        }
        throw new RuntimeException(t);
    }), fixUpTask.exceptionally(t -> {
        if (t instanceof CancellationException) {
            return null;
        }
        throw new RuntimeException(t);
    })).join();

    executorService.shutdown();
}

From source file:com.microsoft.azure.servicebus.samples.deadletterqueue.DeadletterQueue.java

public void run(String connectionString) throws Exception {

    CompletableFuture<Void> receiveTask;
    CompletableFuture<Void> fixUpTask;
    IMessageSender sendClient;//from  ww  w .  j a v  a2s.c o  m

    sendClient = ClientFactory.createMessageSenderFromConnectionStringBuilder(
            new ConnectionStringBuilder(connectionString, "BasicQueue"));

    // max delivery-count scenario
    this.sendMessagesAsync(sendClient, 1).join();
    this.exceedMaxDelivery(connectionString, "BasicQueue").join();

    // fix-up scenario
    this.sendMessagesAsync(sendClient, Integer.MAX_VALUE);
    ExecutorService executorService = Executors.newCachedThreadPool();
    receiveTask = this.receiveMessagesAsync(connectionString, "BasicQueue", executorService);
    fixUpTask = this.PickUpAndFixDeadletters(connectionString, "BasicQueue", sendClient, executorService);

    // wait for ENTER or 10 seconds elapsing
    waitForEnter(10);

    receiveTask.cancel(true);
    fixUpTask.cancel(true);

    CompletableFuture.allOf(sendClient.closeAsync(), receiveTask.exceptionally(t -> {
        if (t instanceof CancellationException) {
            return null;
        }
        throw new RuntimeException((Throwable) t);
    }), fixUpTask.exceptionally(t -> {
        if (t instanceof CancellationException) {
            return null;
        }
        throw new RuntimeException((Throwable) t);
    })).join();

    executorService.shutdown();
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java

@Override
public Future<Model> train(Dataset dataset, Algorithm algorithm, Map<String, Object> parameters,
        String predictionFeature, MetaInfo modelMeta, String taskId) {

    CompletableFuture<Model> futureModel = new CompletableFuture<>();

    TrainingRequest trainingRequest = new TrainingRequest();
    trainingRequest.setDataset(dataset);
    trainingRequest.setParameters(parameters);
    trainingRequest.setPredictionFeature(predictionFeature);
    //        String trainingRequestString = serializer.write(trainingRequest);

    final HttpPost request = new HttpPost(algorithm.getTrainingService());

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in;/*from w  w w  .ja v  a2  s  .c  o  m*/
    try {
        in = new PipedInputStream(out);
    } catch (IOException ex) {
        futureModel.completeExceptionally(ex);
        return futureModel;
    }
    InputStreamEntity entity = new InputStreamEntity(in, ContentType.APPLICATION_JSON);
    entity.setChunked(true);

    request.setEntity(entity);
    request.addHeader("Accept", "application/json");

    Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() {

        @Override
        public void completed(final HttpResponse response) {
            futureMap.remove(taskId);
            int status = response.getStatusLine().getStatusCode();
            try {
                InputStream responseStream = response.getEntity().getContent();

                switch (status) {
                case 200:
                case 201:
                    TrainingResponse trainingResponse = serializer.parse(responseStream,
                            TrainingResponse.class);
                    Model model = new Model();
                    model.setId(randomStringGenerator.nextString(20));
                    model.setActualModel(trainingResponse.getRawModel());
                    model.setPmmlModel(trainingResponse.getPmmlModel());
                    model.setAdditionalInfo(trainingResponse.getAdditionalInfo());
                    model.setAlgorithm(algorithm);
                    model.setParameters(parameters);
                    model.setDatasetUri(dataset != null ? dataset.getDatasetURI() : null);

                    //Check if independedFeatures of model exist in dataset
                    List<String> filteredIndependedFeatures = new ArrayList<String>();

                    if (dataset != null && dataset.getFeatures() != null
                            && trainingResponse.getIndependentFeatures() != null)
                        for (String feature : trainingResponse.getIndependentFeatures()) {
                            for (FeatureInfo featureInfo : dataset.getFeatures()) {
                                if (feature.equals(featureInfo.getURI()))
                                    filteredIndependedFeatures.add(feature);
                            }
                        }

                    model.setIndependentFeatures(filteredIndependedFeatures);
                    model.setDependentFeatures(Arrays.asList(predictionFeature));
                    model.setMeta(modelMeta);

                    List<String> predictedFeatures = new ArrayList<>();
                    for (String featureTitle : trainingResponse.getPredictedFeatures()) {
                        Feature predictionFeatureResource = featureHandler.findByTitleAndSource(featureTitle,
                                "algorithm/" + algorithm.getId());
                        if (predictionFeatureResource == null) {
                            // Create the prediction features (POST /feature)
                            String predFeatID = randomStringGenerator.nextString(12);
                            predictionFeatureResource = new Feature();
                            predictionFeatureResource.setId(predFeatID);
                            predictionFeatureResource.setPredictorFor(predictionFeature);
                            predictionFeatureResource.setMeta(MetaInfoBuilder.builder()
                                    .addSources(
                                            /*messageBody.get("base_uri") + */"algorithm/" + algorithm.getId())
                                    .addComments("Feature created to hold predictions by algorithm with ID "
                                            + algorithm.getId())
                                    .addTitles(featureTitle).addSeeAlso(predictionFeature)
                                    .addCreators(algorithm.getMeta().getCreators()).build());
                            /* Create feature */
                            featureHandler.create(predictionFeatureResource);
                        }
                        predictedFeatures.add(baseURI + "feature/" + predictionFeatureResource.getId());
                    }
                    model.setPredictedFeatures(predictedFeatures);
                    futureModel.complete(model);
                    break;
                case 400:
                    String message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new BadRequestException(message));
                    break;
                case 500:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new InternalServerErrorException(message));
                    break;
                default:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureModel.completeExceptionally(new InternalServerErrorException(message));
                }
            } catch (IOException | UnsupportedOperationException ex) {
                futureModel.completeExceptionally(ex);
            }
        }

        @Override
        public void failed(final Exception ex) {
            futureMap.remove(taskId);
            futureModel.completeExceptionally(ex);
        }

        @Override
        public void cancelled() {
            futureMap.remove(taskId);
            futureModel.cancel(true);
        }

    });

    serializer.write(trainingRequest, out);
    try {
        out.close();
    } catch (IOException ex) {
        futureModel.completeExceptionally(ex);
    }

    futureMap.put(taskId, futureResponse);
    return futureModel;
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java

@Override
public Future<Dataset> predict(Dataset inputDataset, Model model, MetaInfo datasetMeta, String taskId) {

    CompletableFuture<Dataset> futureDataset = new CompletableFuture<>();

    Dataset dataset = DatasetFactory.copy(inputDataset);
    Dataset tempWithDependentFeatures = DatasetFactory.copy(dataset,
            new HashSet<>(model.getDependentFeatures()));

    dataset.getDataEntry().parallelStream().forEach(dataEntry -> {
        dataEntry.getValues().keySet().retainAll(model.getIndependentFeatures());
    });//from w  ww  .j a v a 2  s  .  c om
    PredictionRequest predictionRequest = new PredictionRequest();
    predictionRequest.setDataset(dataset);
    predictionRequest.setRawModel(model.getActualModel());
    predictionRequest.setAdditionalInfo(model.getAdditionalInfo());

    final HttpPost request = new HttpPost(model.getAlgorithm().getPredictionService());
    request.addHeader("Accept", "application/json");
    request.addHeader("Content-Type", "application/json");

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in;
    try {
        in = new PipedInputStream(out);
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
        return futureDataset;
    }
    request.setEntity(new InputStreamEntity(in, ContentType.APPLICATION_JSON));

    Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() {

        @Override
        public void completed(final HttpResponse response) {
            futureMap.remove(taskId);
            int status = response.getStatusLine().getStatusCode();
            try {
                InputStream responseStream = response.getEntity().getContent();

                switch (status) {
                case 200:
                case 201:
                    try {
                        PredictionResponse predictionResponse = serializer.parse(responseStream,
                                PredictionResponse.class);

                        List<LinkedHashMap<String, Object>> predictions = predictionResponse.getPredictions();
                        if (dataset.getDataEntry().isEmpty()) {
                            DatasetFactory.addEmptyRows(dataset, predictions.size());
                        }
                        List<Feature> features = featureHandler
                                .findBySource("algorithm/" + model.getAlgorithm().getId());
                        IntStream.range(0, dataset.getDataEntry().size())
                                // .parallel()
                                .forEach(i -> {
                                    Map<String, Object> row = predictions.get(i);
                                    DataEntry dataEntry = dataset.getDataEntry().get(i);
                                    if (model.getAlgorithm().getOntologicalClasses().contains("ot:Scaling")
                                            || model.getAlgorithm().getOntologicalClasses()
                                                    .contains("ot:Transformation")) {
                                        dataEntry.getValues().clear();
                                        dataset.getFeatures().clear();
                                    }
                                    row.entrySet().stream().forEach(entry -> {
                                        //                                                    Feature feature = featureHandler.findByTitleAndSource(entry.getKey(), "algorithm/" + model.getAlgorithm().getId());
                                        Feature feature = features.stream()
                                                .filter(f -> f.getMeta().getTitles().contains(entry.getKey()))
                                                .findFirst().orElse(null);
                                        if (feature == null) {
                                            return;
                                        }
                                        dataEntry.getValues().put(baseURI + "feature/" + feature.getId(),
                                                entry.getValue());
                                        FeatureInfo featInfo = new FeatureInfo(
                                                baseURI + "feature/" + feature.getId(),
                                                feature.getMeta().getTitles().stream().findFirst().get());
                                        featInfo.setCategory(Dataset.DescriptorCategory.PREDICTED);
                                        dataset.getFeatures().add(featInfo);
                                    });
                                });
                        dataset.setId(randomStringGenerator.nextString(20));
                        dataset.setTotalRows(dataset.getDataEntry().size());
                        dataset.setMeta(datasetMeta);
                        futureDataset.complete(DatasetFactory.mergeColumns(dataset, tempWithDependentFeatures));
                    } catch (Exception ex) {
                        futureDataset.completeExceptionally(ex);
                    }
                    break;
                case 400:
                    String message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new BadRequestException(message));
                    break;
                case 404:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new NotFoundException(message));
                    break;
                case 500:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                    break;
                default:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                }
            } catch (IOException | UnsupportedOperationException ex) {
                futureDataset.completeExceptionally(ex);
            }
        }

        @Override
        public void failed(final Exception ex) {
            futureMap.remove(taskId);
            futureDataset.completeExceptionally(new InternalServerErrorException(ex));
        }

        @Override
        public void cancelled() {
            futureMap.remove(taskId);
            futureDataset.cancel(true);
        }
    });
    serializer.write(predictionRequest, out);
    try {
        out.close();
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
    }
    futureMap.put(taskId, futureResponse);
    return futureDataset;
}

From source file:org.eclipse.jdt.ls.core.internal.handlers.JDTLanguageServer.java

@Override
public CompletableFuture<Either<List<CompletionItem>, CompletionList>> completion(CompletionParams position) {
    logInfo(">> document/completion");
    CompletionHandler handler = new CompletionHandler();
    final IProgressMonitor[] monitors = new IProgressMonitor[1];
    CompletableFuture<Either<List<CompletionItem>, CompletionList>> result = computeAsync((monitor) -> {
        monitors[0] = monitor;//from ww w.ja  v a2s. c o m
        if (Boolean.getBoolean(JAVA_LSP_JOIN_ON_COMPLETION)) {
            waitForLifecycleJobs(monitor);
        }
        return handler.completion(position, monitor);
    });
    result.join();
    if (monitors[0].isCanceled()) {
        result.cancel(true);
    }
    return result;
}

From source file:org.eclipse.jdt.ls.core.internal.handlers.JDTLanguageServer.java

@Override
public CompletableFuture<CompletionItem> resolveCompletionItem(CompletionItem unresolved) {
    logInfo(">> document/resolveCompletionItem");
    CompletionResolveHandler handler = new CompletionResolveHandler(preferenceManager);
    final IProgressMonitor[] monitors = new IProgressMonitor[1];
    CompletableFuture<CompletionItem> result = computeAsync((monitor) -> {
        monitors[0] = monitor;/* ww  w  .  j  av  a 2  s. co  m*/
        if ((Boolean.getBoolean(JAVA_LSP_JOIN_ON_COMPLETION))) {
            waitForLifecycleJobs(monitor);
        }
        return handler.resolve(unresolved, monitor);
    });
    result.join();
    if (monitors[0].isCanceled()) {
        result.cancel(true);
    }
    return result;
}