Example usage for java.util.function Consumer accept

List of usage examples for java.util.function Consumer accept

Introduction

In this page you can find the example usage for java.util.function Consumer accept.

Prototype

void accept(T t);

Source Link

Document

Performs this operation on the given argument.

Usage

From source file:org.jboss.pnc.environment.openshift.OpenshiftStartedEnvironment.java

/**
 * Method to retry creating the whole Openshift environment in case of failure
 *
 * @param e exception thrown//w w w. ja v  a  2 s .  co m
 * @param onComplete consumer to call if successful
 * @param onError consumer to call if no more retries
 * @param retries how many times will we retry starting the build environment
 */
private void retryPod(Exception e, Consumer<RunningEnvironment> onComplete, Consumer<Exception> onError,
        int retries) {

    gaugeMetric.ifPresent(g -> g.incrementMetric(METRICS_POD_STARTED_FAILED_KEY));

    logger.debug("Cancelling existing monitors for this build environment");
    cancelAndClearMonitors();

    // no more retries, execute the onError consumer
    if (retries == 0) {
        onError.accept(e);

    } else {
        logger.error("Creating build environment failed! Retrying...");
        gaugeMetric.ifPresent(g -> g.incrementMetric(METRICS_POD_STARTED_RETRY_KEY));

        // since deletion runs in an executor, it might run *after* the createEnvironment() is finished.
        // createEnvironment()  will overwrite the Openshift object fields. So we need to capture the existing
        // openshift objects to delete before they get overwritten by createEnvironment()
        Route routeToDestroy = route;
        Service serviceToDestroy = service;
        Service sshServiceToDestroy = sshService;
        Pod podToDestroy = pod;

        executor.submit(() -> {
            try {
                logger.debug("Destroying old build environment");
                destroyEnvironment(routeToDestroy, serviceToDestroy, sshServiceToDestroy, podToDestroy, true);
            } catch (Exception ex) {
                logger.error("Error deleting previous environment", ex);
            }
        });

        logger.debug("Creating new build environment");
        createEnvironment();

        // restart the process again
        monitorInitialization(onComplete, onError, retries - 1);
        // at this point the running task running this is finished. New ones are created to monitor pod /service/route creation
    }

}

From source file:org.jboss.pnc.environment.openshift.OpenshiftStartedEnvironment.java

/**
 * retries is decremented in retryPod in case of pod failing to start
 *
 * @param onComplete//  ww w . ja v a2 s . c  om
 * @param onError
 * @param retries
 */
private void monitorInitialization(Consumer<RunningEnvironment> onComplete, Consumer<Exception> onError,
        int retries) {

    Consumer<RunningEnvironment> onCompleteInternal = (runningEnvironment) -> {
        logger.info("New build environment available on internal url: {}", getInternalEndpointUrl());

        try {
            Runnable onUrlAvailable = () -> onComplete.accept(runningEnvironment);

            URL url = new URL(getInternalEndpointUrl());
            addMonitors(pullingMonitor.monitor(onUrlAvailable, onError, () -> isServletAvailable(url)));
        } catch (IOException e) {
            onError.accept(e);
        }
    };

    Consumer<Exception> onErrorInternal = (exception) -> {
        cancelAndClearMonitors();
        onError.accept(exception);
    };

    cancelHook = () -> onComplete.accept(null);

    creatingPod.ifPresent((f) -> f.thenRunAsync(() -> {
        addMonitors(pullingMonitor.monitor(onEnvironmentInitComplete(onCompleteInternal, Selector.POD),
                (t) -> this.retryPod(t, onComplete, onError, retries), this::isPodRunning));
    }));

    creatingService.ifPresent((f) -> f.thenRunAsync(() -> {
        addMonitors(pullingMonitor.monitor(onEnvironmentInitComplete(onCompleteInternal, Selector.SERVICE),
                onErrorInternal, this::isServiceRunning));
    }));

    logger.info("Waiting to initialize environment. Pod [{}]; Service [{}].", pod.getName(), service.getName());

    creatingRoute.ifPresent((f) -> f.thenRunAsync(() -> {
        addMonitors(pullingMonitor.monitor(onEnvironmentInitComplete(onCompleteInternal, Selector.ROUTE),
                onErrorInternal, this::isRouteRunning));
        logger.info("Route [{}].", route.getName());
    }));

    // monitor creation errors after all other monitors to make sure we cancel all of them on failure
    addMonitors(pullingMonitor.monitor(() -> {
    }, onErrorInternal, this::checkOpenshiftObjectCreation));
}

From source file:ch.algotrader.esper.EngineImpl.java

@Override
public void addTimerCallback(Date dateTime, String name, Consumer<Date> consumer) {

    // execute callback immediately if dateTime is in the past
    if (dateTime.compareTo(getCurrentTime()) < 0) {
        consumer.accept(getCurrentTime());
    } else {//from w w  w .ja  va  2  s .  c  o m
        String alias = "ON_TIMER_"
                + DateTimeUtil.formatAsGMT(dateTime.toInstant()).replace(" ", "_").replace(":", "-")
                + (name != null ? "_" + name : "");

        Calendar cal = Calendar.getInstance();
        cal.setTime(dateTime);

        Object[] params = { alias, cal.get(Calendar.MINUTE), cal.get(Calendar.HOUR_OF_DAY),
                cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.MONTH) + 1, cal.get(Calendar.SECOND),
                cal.get(Calendar.YEAR) };

        deployStatement("prepared", "ON_TIMER", alias, params, new TimerCallback(this, alias, consumer), true);
    }
}

From source file:org.ligoj.app.plugin.prov.aws.in.ProvAwsPriceImportResource.java

private <A extends Serializable, N extends AbstractNamedEntity<A>, T extends AbstractPrice<N>> T saveAsNeeded(
        final T entity, final double oldCost, final double newCost, final Consumer<Double> updateCost,
        final Consumer<T> c) {
    if (oldCost != newCost) {
        updateCost.accept(newCost);
        c.accept(entity);/*  ww  w  .  ja  va 2s.  c  om*/
    }
    return entity;
}

From source file:org.haiku.haikudepotserver.pkg.job.PkgScreenshotImportArchiveJobRunner.java

private int consumeScreenshotArchiveEntries(ByteSource byteSource,
        Consumer<ArchiveEntryWithPkgNameAndOrdering> archiveEntryConsumer) throws IOException {
    int counter = 0;

    try (InputStream inputStream = byteSource.openStream();
            GZIPInputStream gzipInputStream = new GZIPInputStream(inputStream);
            ArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(gzipInputStream)) {
        ArchiveEntry archiveEntry;/*from   w  w w  .  j av a2  s .  c o m*/

        while (null != (archiveEntry = tarArchiveInputStream.getNextEntry())) {

            Matcher matcher = PATTERN_PATH.matcher(archiveEntry.getName());

            if (matcher.matches()) {
                archiveEntryConsumer
                        .accept(new ArchiveEntryWithPkgNameAndOrdering(tarArchiveInputStream, archiveEntry,
                                matcher.group(GROUP_PKGNAME), Integer.parseInt(matcher.group(GROUP_LEAFNAME))));

                counter++;
            }
        }
    }

    return counter;
}

From source file:eu.fthevenet.binjr.data.codec.CsvDecoder.java

/**
 * Decodes data from the provided stream and invoke the provided {@link Consumer} for each decoded record.
 *
 * @param in          the {@link InputStream} for the CSV file
 * @param headers     a list of the headers to keep from decoded records
 * @param mapToResult the function to invoke for reach decoded record
 * @throws IOException                      in the event of an I/O error.
 * @throws DecodingDataFromAdapterException if an error occurred while decoding the CSV file.
 *//*  w ww  . j ava2  s  .  c  o m*/
public void decode(InputStream in, List<String> headers, Consumer<DataSample<T>> mapToResult)
        throws IOException, DecodingDataFromAdapterException {
    try (Profiler ignored = Profiler.start("Building time series from csv data", logger::trace)) {
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, encoding))) {
            CSVFormat csvFormat = CSVFormat.DEFAULT.withAllowMissingColumnNames(false).withFirstRecordAsHeader()
                    .withSkipHeaderRecord().withDelimiter(delimiter);
            Iterable<CSVRecord> records = csvFormat.parse(reader);

            for (CSVRecord csvRecord : records) {
                ZonedDateTime timeStamp = dateParser.apply(csvRecord.get(0));
                DataSample<T> tRecord = new DataSample<>(timeStamp);
                for (String h : headers) {
                    tRecord.getCells().put(h, numberParser.apply(csvRecord.get(h)));
                }
                mapToResult.accept(tRecord);
            }
        }
    }
}

From source file:org.codice.ddf.parser.xml.XmlParser.java

private void marshal(ParserConfigurator configurator, Consumer<Marshaller> marshallerConsumer)
        throws ParserException {
    JAXBContext jaxbContext = getContext(configurator.getContextPath(), configurator.getClassLoader());

    ClassLoader tccl = Thread.currentThread().getContextClassLoader();
    try {/*from   w  w  w .  j  ava2  s. c  om*/
        Thread.currentThread().setContextClassLoader(configurator.getClassLoader());
        Marshaller marshaller = jaxbContext.createMarshaller();
        if (configurator.getAdapter() != null) {
            marshaller.setAdapter(configurator.getAdapter());
        }
        if (configurator.getHandler() != null) {
            marshaller.setEventHandler(configurator.getHandler());
        }
        for (Map.Entry<String, Object> propRow : configurator.getProperties().entrySet()) {
            marshaller.setProperty(propRow.getKey(), propRow.getValue());
        }

        marshallerConsumer.accept(marshaller);
    } catch (RuntimeException e) {
        LOGGER.error("Error marshalling ", e);
        throw new ParserException("Error marshalling ", e);
    } catch (JAXBException e) {
        LOGGER.error("Error marshalling ", e);
        throw new ParserException("Error marshalling", e);
    } finally {
        Thread.currentThread().setContextClassLoader(tccl);
    }
}

From source file:org.apache.hadoop.hbase.client.SimpleRequestController.java

@Override
public void waitForMaximumCurrentTasks(long max, long id, int periodToTrigger, Consumer<Long> trigger)
        throws InterruptedIOException {
    assert max >= 0;
    long lastLog = EnvironmentEdgeManager.currentTime();
    long currentInProgress, oldInProgress = Long.MAX_VALUE;
    while ((currentInProgress = tasksInProgress.get()) > max) {
        if (oldInProgress != currentInProgress) { // Wait for in progress to change.
            long now = EnvironmentEdgeManager.currentTime();
            if (now > lastLog + periodToTrigger) {
                lastLog = now;//w  ww . j a  v a 2  s .co m
                if (trigger != null) {
                    trigger.accept(currentInProgress);
                }
                logDetailsOfUndoneTasks(currentInProgress);
            }
        }
        oldInProgress = currentInProgress;
        try {
            synchronized (tasksInProgress) {
                if (tasksInProgress.get() == oldInProgress) {
                    tasksInProgress.wait(10);
                }
            }
        } catch (InterruptedException e) {
            throw new InterruptedIOException(
                    "#" + id + ", interrupted." + " currentNumberOfTask=" + currentInProgress);
        }
    }
}

From source file:com.vsct.dt.hesperides.storage.RedisEventStore.java

@Override
public void withEvents(final String streamName, final long start, final long stop,
        final Consumer<Object> eventConsumer) throws StoreReadingException {
    try (A jedis = dataPool.getResource()) {

        LOGGER.debug("{} events to restore for stream {}", stop - start, streamName);

        final List<String> events = jedis.lrange(streamName, start, stop);

        for (int indexEvent = 0; indexEvent < events.size(); indexEvent++) {

            LOGGER.trace("Processing event {}", indexEvent);

            Event event = MAPPER.readValue(events.get(indexEvent), Event.class);

            Object hesperidesEvent = MAPPER.readValue(event.getData(), Class.forName(event.getEventType()));

            eventConsumer.accept(hesperidesEvent);
        }//from ww  w  .j  a  va 2s . c o m

        LOGGER.debug("Stream {} complete ({} events processed)", streamName, stop - start);

    } catch (StoreReadingException | ClassNotFoundException | IOException e) {
        e.printStackTrace();
        throw new StoreReadingException(e);
    }
}

From source file:org.jsweet.transpiler.util.ProcessUtil.java

/**
 * Runs the given command./*w ww  .  j a  v  a  2s.com*/
 * 
 * @param command
 *            the command name
 * @param directory
 *            the working directory of the created process
 * @param async
 *            tells if the command should be run asynchronously (in a
 *            separate thread)
 * @param stdoutConsumer
 *            consumes the standard output stream as lines of characters
 * @param endConsumer
 *            called when the process actually ends
 * @param errorHandler
 *            upcalled when the command does not terminate successfully
 * @param args
 *            the command-line arguments
 * @return the process that was created to execute the command (can be still
 *         running at this point if <code>async</code> is <code>true</code>)
 */
public static Process runCommand(String command, File directory, boolean async, Consumer<String> stdoutConsumer,
        Consumer<Process> endConsumer, Runnable errorHandler, String... args) {

    String[] cmd;
    if (System.getProperty("os.name").startsWith("Windows")) {
        if (nodeCommands.contains(command)) {
            cmd = new String[] { getNpmPath(command) };
        } else {
            cmd = new String[] { "cmd", "/c", command };
        }
    } else {
        if (nodeCommands.contains(command)) {
            cmd = new String[] { getNpmPath(command) };
        } else {
            cmd = new String[] { command };
        }
    }
    cmd = ArrayUtils.addAll(cmd, args);

    logger.debug("run command: " + StringUtils.join(cmd, " "));
    Process[] process = { null };
    try {
        ProcessBuilder processBuilder = new ProcessBuilder(cmd);
        processBuilder.redirectErrorStream(true);
        if (directory != null) {
            processBuilder.directory(directory);
        }
        if (!StringUtils.isBlank(EXTRA_PATH)) {
            processBuilder.environment().put("PATH",
                    processBuilder.environment().get("PATH") + File.pathSeparator + EXTRA_PATH);
        }

        process[0] = processBuilder.start();

        Runnable runnable = new Runnable() {

            @Override
            public void run() {
                try {
                    try (BufferedReader in = new BufferedReader(
                            new InputStreamReader(process[0].getInputStream()))) {
                        String line;
                        while ((line = in.readLine()) != null) {
                            if (stdoutConsumer != null) {
                                stdoutConsumer.accept(line);
                            } else {
                                logger.info(command + " - " + line);
                            }
                        }
                    }

                    process[0].waitFor();
                    if (endConsumer != null) {
                        endConsumer.accept(process[0]);
                    }
                    if (process[0].exitValue() != 0) {
                        if (errorHandler != null) {
                            errorHandler.run();
                        }
                    }
                } catch (Exception e) {
                    logger.error(e.getMessage(), e);
                    if (errorHandler != null) {
                        errorHandler.run();
                    }
                }
            }
        };
        if (async) {
            new Thread(runnable).start();
        } else {
            runnable.run();
        }

    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        if (errorHandler != null) {
            errorHandler.run();
        }
        return null;
    }
    return process[0];
}