Example usage for java.util.function Consumer accept

List of usage examples for java.util.function Consumer accept

Introduction

In this page you can find the example usage for java.util.function Consumer accept.

Prototype

void accept(T t);

Source Link

Document

Performs this operation on the given argument.

Usage

From source file:com.playonlinux.core.utils.archive.Tar.java

/**
 * Uncompress a tar//  w  w w  . jav  a 2  s .c  o m
 *
 * @param countingInputStream
 *            to count the number of byte extracted
 * @param outputDir
 *            The directory where files should be extracted
 * @return A list of extracted files
 * @throws ArchiveException
 *             if the process fails
 */
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream,
        final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
    final List<File> uncompressedFiles = new LinkedList<>();
    try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("tar",
            inputStream)) {
        TarArchiveEntry entry;
        while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
            final File outputFile = new File(outputDir, entry.getName());
            if (entry.isDirectory()) {
                LOGGER.info(String.format("Attempting to write output directory %s.",
                        outputFile.getAbsolutePath()));

                if (!outputFile.exists()) {
                    LOGGER.info(String.format("Attempting to createPrefix output directory %s.",
                            outputFile.getAbsolutePath()));
                    Files.createDirectories(outputFile.toPath());
                }
            } else {
                LOGGER.info(String.format("Creating output file %s (%s).", outputFile.getAbsolutePath(),
                        entry.getMode()));

                if (entry.isSymbolicLink()) {
                    Files.createSymbolicLink(Paths.get(outputFile.getAbsolutePath()),
                            Paths.get(entry.getLinkName()));
                } else {
                    try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
                        IOUtils.copy(debInputStream, outputFileStream);

                        Files.setPosixFilePermissions(Paths.get(outputFile.getPath()),
                                com.playonlinux.core.utils.Files.octToPosixFilePermission(entry.getMode()));
                    }
                }

            }
            uncompressedFiles.add(outputFile);

            stateCallback.accept(new ProgressEntity.Builder()
                    .withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100)
                    .withProgressText("Extracting " + outputFile.getName()).build());

        }
        return uncompressedFiles;
    } catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
        throw new ArchiveException("Unable to extract the file", e);
    }
}

From source file:enumj.Enumerator.java

/**
 * Performs an action on each enumerated element.
 *
 * @param consumer action to perform on each enumerated element.
 * @exception IllegalArgumentException <code>consumer</code> is null.
 */// w  w  w .j  av a2  s  . c  o m
public default void forEach(Consumer<? super E> consumer) {
    Checks.ensureNotNull(consumer, Messages.NULL_ENUMERATOR_CONSUMER);
    while (hasNext()) {
        consumer.accept(next());
    }
}

From source file:org.apache.nifi.web.api.ApplicationResource.java

/**
 * Authorizes the specified Snippet with the specified request action.
 *
 * @param authorizer authorizer/*from  w  w w. j  a v  a2  s .c o m*/
 * @param lookup     lookup
 * @param action     action
 */
protected void authorizeSnippet(final SnippetAuthorizable snippet, final Authorizer authorizer,
        final AuthorizableLookup lookup, final RequestAction action, final boolean authorizeReferencedServices,
        final boolean authorizeTransitiveServices) {

    final Consumer<Authorizable> authorize = authorizable -> authorizable.authorize(authorizer, action,
            NiFiUserUtils.getNiFiUser());

    // authorize each component in the specified snippet
    snippet.getSelectedProcessGroups().stream().forEach(processGroupAuthorizable -> {
        // note - we are not authorizing templates or controller services as they are not considered when using this snippet. however,
        // referenced services are considered so those are explicitly authorized when authorizing a processor
        authorizeProcessGroup(processGroupAuthorizable, authorizer, lookup, action, authorizeReferencedServices,
                false, false, authorizeTransitiveServices);
    });
    snippet.getSelectedRemoteProcessGroups().stream().forEach(authorize);
    snippet.getSelectedProcessors().stream().forEach(processorAuthorizable -> {
        // authorize the processor
        authorize.accept(processorAuthorizable.getAuthorizable());

        // authorize any referenced services if necessary
        if (authorizeReferencedServices) {
            AuthorizeControllerServiceReference.authorizeControllerServiceReferences(processorAuthorizable,
                    authorizer, lookup, authorizeTransitiveServices);
        }
    });
    snippet.getSelectedInputPorts().stream().forEach(authorize);
    snippet.getSelectedOutputPorts().stream().forEach(authorize);
    snippet.getSelectedConnections().stream().forEach(connAuth -> authorize.accept(connAuth.getAuthorizable()));
    snippet.getSelectedFunnels().stream().forEach(authorize);
    snippet.getSelectedLabels().stream().forEach(authorize);
}

From source file:fi.vm.sade.eperusteet.ylops.service.ops.impl.OpetussuunnitelmaServiceImpl.java

private void importOppiaineet(Opetussuunnitelma ops, Collection<LukioPerusteOppiaineDto> from,
        Consumer<Jarjestetty<Oppiaine>> to, Oppiaine parent, Map<UUID, Lukiokurssi> kurssit) {
    for (LukioPerusteOppiaineDto oppiaine : from) {
        Oppiaine oa = new Oppiaine(oppiaine.getTunniste());
        oa.setTyyppi(OppiaineTyyppi.LUKIO);
        oa.setNimi(LokalisoituTeksti.of(oppiaine.getNimi().getTekstit()));
        oa.setOppiaine(parent);/*from   w  w w .  j av  a  2 s.  c  om*/
        oa.setAbstrakti(oppiaine.getAbstrakti());
        oa.setKoosteinen(oppiaine.isKoosteinen());
        oa.setKoodiArvo(oppiaine.getKoodiArvo());
        oa.setKoodiUri(oppiaine.getKoodiUri());
        for (Map.Entry<LukiokurssiTyyppi, Optional<LokalisoituTekstiDto>> kv : oppiaine
                .getKurssiTyyppiKuvaukset().entrySet()) {
            kv.getKey().oppiaineKuvausSetter().set(oa, kv.getValue().map(LokalisoituTekstiDto::getTekstit)
                    .map(LokalisoituTeksti::of).orElse(null));
        }
        to.accept(new Jarjestetty<>(oa, oppiaine.getJarjestys()));
        importOppiaineet(ops, oppiaine.getOppimaarat(), child -> {
            oa.getOppimaaratReal().add(child.getObj());
            ops.getOppiaineJarjestykset()
                    .add(new LukioOppiaineJarjestys(ops, child.getObj(), child.getJarjestys()));
        }, oa, kurssit);
        importKurssit(ops, oppiaine.getKurssit(), oa, kurssit);
    }
}

From source file:it.polimi.diceH2020.SPACE4CloudWS.core.CoarseGrainedOptimizer.java

private boolean hillClimbing(SolutionPerJob solPerJob, Technology technology) {
    boolean success = false;
    Pair<Optional<Double>, Long> simulatorResult = dataProcessor.simulateClass(solPerJob);
    Optional<Double> maybeResult = simulatorResult.getLeft();
    if (maybeResult.isPresent()) {
        success = true;/*w ww. ja v  a2 s .c  om*/

        PerformanceSolver currentSolver = dataProcessor.getPerformanceSolver();
        Function<Double, Double> fromResult = currentSolver.transformationFromSolverResult(solPerJob,
                technology);
        Predicate<Double> feasibilityCheck = currentSolver.feasibilityCheck(solPerJob, technology);
        Consumer<Double> metricUpdater = currentSolver.metricUpdater(solPerJob, technology);

        final double tolerance = settings.getOptimization().getTolerance();

        BiPredicate<Double, Double> incrementCheck;
        Function<Integer, Integer> updateFunction;
        Predicate<Double> stoppingCondition;
        Predicate<Integer> vmCheck;

        double responseTime = fromResult.apply(maybeResult.get());
        if (feasibilityCheck.test(responseTime)) {
            updateFunction = n -> n - 1;
            stoppingCondition = feasibilityCheck.negate();
            vmCheck = n -> n == 1;
            incrementCheck = (prev, curr) -> false;
        } else {
            updateFunction = n -> n + 1;
            stoppingCondition = feasibilityCheck;
            vmCheck = n -> false;
            incrementCheck = (prev, curr) -> Math.abs((prev - curr) / prev) < tolerance;
        }

        List<Triple<Integer, Optional<Double>, Boolean>> resultsList = alterUntilBreakPoint(solPerJob,
                updateFunction, fromResult, feasibilityCheck, stoppingCondition, incrementCheck, vmCheck);
        Optional<Triple<Integer, Optional<Double>, Boolean>> result = resultsList.parallelStream()
                .filter(t -> t.getRight() && t.getMiddle().isPresent())
                .min(Comparator.comparing(Triple::getLeft));
        result.ifPresent(triple -> triple.getMiddle().ifPresent(output -> {
            int nVM = triple.getLeft();
            switch (technology) {
            case HADOOP:
            case SPARK:
                solPerJob.setThroughput(output);
                break;
            case STORM:
                break;
            default:
                throw new RuntimeException("Unexpected technology");
            }
            solPerJob.updateNumberVM(nVM);
            double metric = fromResult.apply(output);
            metricUpdater.accept(metric);
            logger.info(String.format(
                    "class%s-> MakeFeasible ended, result = %f, other metric = %f, obtained with: %d VMs",
                    solPerJob.getId(), output, metric, nVM));
        }));
    } else {
        logger.info("class" + solPerJob.getId() + "-> MakeFeasible ended with ERROR");
        solPerJob.setFeasible(false);
    }
    return success;
}

From source file:org.fcrepo.client.ConnectionManagementTest.java

/**
 * Uses the FcrepoClient to connect to supplied {@code uri} using the supplied {@code method}.
 * This method invokes the supplied {@code responseHandler} on the {@code FcrepoResponse}.
 *
 * @param client the FcrepoClient used to invoke the request
 * @param uri the request URI to connect to
 * @param method the HTTP method corresponding to the FcrepoClient method invoked
 * @param responseHandler invoked on the {@code FcrepoResponse}, may be {@code null}
 *//*from  ww  w .  j  a  v a2s.com*/
private void connect(final FcrepoClient client, final MockHttpExpectations.Uris uri, final HttpMethods method,
        final Consumer<FcrepoResponse> responseHandler) {

    final NullInputStream nullIn = new NullInputStream(1, true, false);
    FcrepoResponse response = null;

    try {

        switch (method) {

        case OPTIONS:
            response = client.options(uri.asUri()).perform();
            break;

        case DELETE:
            response = client.delete(uri.asUri()).perform();
            break;

        case GET:
            response = client.get(uri.asUri()).accept(TEXT_TURTLE).perform();
            break;

        case HEAD:
            response = client.head(uri.asUri()).perform();
            break;

        case PATCH:
            response = client.patch(uri.asUri()).perform();
            break;

        case POST:
            response = client.post(uri.asUri()).body(nullIn, TEXT_TURTLE).perform();
            break;

        case PUT:
            response = client.put(uri.asUri()).body(nullIn, TEXT_TURTLE).perform();
            break;

        case MOVE:
            response = client.move(uri.asUri(), uri.asUri()).perform();
            break;

        case COPY:
            response = client.copy(uri.asUri(), uri.asUri()).perform();
            break;

        default:
            fail("Unknown HTTP method: " + method.name());
        }

        if (uri.statusCode >= HttpStatus.SC_INTERNAL_SERVER_ERROR) {
            fail("Expected a FcrepoOperationFailedException to be thrown for HTTP method " + method.name());
        }
    } catch (FcrepoOperationFailedException e) {
        assertEquals("Expected request for " + uri.asUri() + " to return a " + uri.statusCode + ".  " + "Was: "
                + e.getStatusCode() + " Method:" + method, uri.statusCode, e.getStatusCode());
    } finally {
        if (responseHandler != null) {
            responseHandler.accept(response);
        }
    }
}

From source file:org.apache.directory.studio.connection.core.io.api.DirectoryApiConnectionWrapper.java

private boolean checkAndHandleReferral(ResultResponse response, StudioProgressMonitor monitor,
        ReferralsInfo referralsInfo, Consumer<ReferralHandlingData> consumer)
        throws NamingException, LdapURLEncodingException {
    if (response == null) {
        return false;
    }/*from  w  w  w  . j a v  a  2  s .  c  o m*/

    LdapResult ldapResult = response.getLdapResult();
    if (ldapResult == null || !ResultCodeEnum.REFERRAL.equals(ldapResult.getResultCode())) {
        return false;
    }

    if (referralsInfo == null) {
        referralsInfo = new ReferralsInfo(true);
    }

    Referral referral = ldapResult.getReferral();
    referralsInfo.addReferral(referral);
    Referral nextReferral = referralsInfo.getNextReferral();

    Connection referralConnection = ConnectionWrapperUtils.getReferralConnection(nextReferral, monitor, this);
    if (referralConnection == null) {
        monitor.setCanceled(true);
        return true;
    }

    List<String> urls = new ArrayList<>(referral.getLdapUrls());
    String referralDn = new LdapUrl(urls.get(0)).getDn().getName();
    ReferralHandlingData referralHandlingData = new ReferralHandlingData(
            referralConnection.getConnectionWrapper(), referralDn, referralsInfo);
    consumer.accept(referralHandlingData);

    return true;
}

From source file:com.xylocore.cassandra.query.PagedQuery.java

/**
 * FILLIN/*from  w w  w  .  j  a v a  2s .c  o  m*/
 * 
 * @param       aQueryContext
 * @param       aResultSet
 * @param       aCompletionHandler
 */
private void processResultSet(QueryContext aQueryContext, Consumer<Boolean> aCompletionHandler) {
    PagedQueryExecutionContext<T> myExecutionContext = aQueryContext.getExecutionContext();
    ResultSet myResultSet = aQueryContext.getResultSet();
    boolean myNoResults = true;

    if (!myResultSet.isExhausted()) {
        List<T> myEntities = new ArrayList<>();
        T myEntity = null;
        Row myRow;

        myNoResults = false;

        if (myExecutionContext.isReuseEntity()) {
            logger.debug("creating reusable entity");

            myEntity = myExecutionContext.getEntityCreator().get();
        }

        while ((myRow = myResultSet.one()) != null) {
            if (!myExecutionContext.isReuseEntity()) {
                logger.debug("creating non-reusable entity");

                myEntity = myExecutionContext.getEntityCreator().get();
            }

            logger.debug("extracting row data into entity");

            myExecutionContext.getEntityExtractor().accept(myRow, myEntity);

            if (myExecutionContext.getEntityFilter() == null
                    || myExecutionContext.getEntityFilter().test(myEntity)) {
                if (logger.isDebugEnabled()) {
                    logger.debug("processing partition: {}", myEntity.toString());
                }

                logger.debug("processing entity");

                myEntities.clear();
                myEntities.add(myEntity);

                myExecutionContext.getEntityProcessor().accept(myEntities);
            } else {
                if (logger.isDebugEnabled()) {
                    logger.debug("partition filtered: {}", myEntity.toString());
                }
            }

            storeLastKey(aQueryContext, myRow);
        }
    }

    aCompletionHandler.accept(myNoResults);
}

From source file:com.ethlo.geodata.importer.CountryImporter.java

@Override
public long processFile(Consumer<Map<String, String>> sink) throws IOException {
    /*//from w  ww  . j a v  a  2  s.c o  m
     * ISO  
     * ISO3    
     * ISO-Numeric 
     * fips    
     * Country 
     * Capital 
     * Area(in sq km)  
     * Population  
     * Continent   
     * tld 
     * CurrencyCode    
     * CurrencyName
     * Phone   
     * Postal Code Format  
     * Postal Code Regex   
     * Languages   
     * geonameid   
     * neighbours  
     * EquivalentFipsCode
     */
    long count = 0;
    try (final BufferedReader reader = IoUtils.getBufferedReader(csvFile)) {
        String line;
        while ((line = reader.readLine()) != null) {
            if (!line.startsWith("#")) {
                final String[] entry = StringUtils.delimitedListToStringArray(line, "\t");
                final Map<String, String> paramMap = new TreeMap<>();
                paramMap.put("iso", stripToNull(entry[0]));
                paramMap.put("iso3", stripToNull(entry[1]));
                paramMap.put("iso_numeric", stripToNull(entry[2]));
                paramMap.put("fips", stripToNull(entry[3]));
                paramMap.put("country", stripToNull(entry[4]));
                paramMap.put("capital", stripToNull(entry[5]));
                paramMap.put("area", stripToNull(entry[6]));
                paramMap.put("population", stripToNull(entry[7]));
                paramMap.put("continent", stripToNull(entry[8]));
                paramMap.put("tld", stripToNull(entry[9]));
                paramMap.put("currency_code", stripToNull(entry[10]));
                paramMap.put("currency_name", stripToNull(entry[11]));
                paramMap.put("phone", stripToNull(entry[12]));
                paramMap.put("postal_code_format", stripToNull(entry[13]));
                paramMap.put("postal_code_regex", stripToNull(entry[14]));
                paramMap.put("languages", stripToNull(entry[15]));
                paramMap.put("geoname_id", stripToNull(entry[16]));
                paramMap.put("neighbours", stripToNull(entry[17]));
                paramMap.put("equivalent_fips_code", stripToNull(entry[18]));

                sink.accept(paramMap);
            }

            count++;
        }
    }
    return count;
}

From source file:com.vsct.dt.hesperides.storage.RedisEventStore.java

@Override
public void withEvents(final String streamName, final long start, final long stop, final long stopTimestamp,
        final Consumer<Object> eventConsumer) throws StoreReadingException {
    try (A jedis = dataPool.getResource()) {
        LOGGER.debug("{} events to restore for stream {}", stop - start, streamName);

        final long startTime = System.nanoTime();

        int indexEvent;
        long indexBatch;
        int counter = 0;
        long startIO;
        long stopIO;

        long ioAccumulator = 0, serializationAccumulator = 0, processingAccumulator = 0;

        for (indexBatch = start; indexBatch < stop; indexBatch = indexBatch + BATCH_SIZE) {

            startIO = System.nanoTime();

            List<String> events = jedis.lrange(streamName, indexBatch, indexBatch + BATCH_SIZE - 1);

            if (LOGGER.isDebugEnabled()) {
                stopIO = System.nanoTime();

                ioAccumulator += stopIO - startIO;
            }/*from www.ja v  a 2 s. c  om*/

            for (indexEvent = 0; indexEvent < events.size(); indexEvent++) {

                if (LOGGER.isTraceEnabled()) {
                    LOGGER.trace("Processing event {}", indexBatch + indexEvent);
                }

                long startSerialization = System.nanoTime();

                Event event = MAPPER.readValue(events.get(indexEvent), Event.class);

                if (event.getTimestamp() > stopTimestamp) {
                    //No need to go beyong this point in time
                    indexBatch = stop;
                    break;
                }

                Object hesperidesEvent = MAPPER.readValue(event.getData(), Class.forName(event.getEventType()));
                long stopSerialization = System.nanoTime();

                serializationAccumulator += stopSerialization - startSerialization;

                long startProcessing = System.nanoTime();
                eventConsumer.accept(hesperidesEvent);
                long stopProcessing = System.nanoTime();

                processingAccumulator += stopProcessing - startProcessing;

                counter++;
            }
        }

        if (LOGGER.isDebugEnabled()) {
            final long stopTime = System.nanoTime();

            long durationMs = (stopTime - startTime) / 1000000;

            double frequency = ((double) counter / durationMs) * 1000;

            LOGGER.debug(
                    "Stream {} complete ({} events processed - duration {} ms - {} msg/sec - {} ms IO -"
                            + "{} ms Serialization - {} ms processing)",
                    streamName, counter, durationMs, frequency, ioAccumulator / 1000000,
                    serializationAccumulator / 1000000, processingAccumulator / 1000000);
        }
    } catch (StoreReadingException | ClassNotFoundException | IOException e) {
        e.printStackTrace();
        throw new StoreReadingException(e);
    }
}