Example usage for java.util.concurrent Executors newCachedThreadPool

List of usage examples for java.util.concurrent Executors newCachedThreadPool

Introduction

In this page you can find the example usage for java.util.concurrent Executors newCachedThreadPool.

Prototype

public static ExecutorService newCachedThreadPool() 

Source Link

Document

Creates a thread pool that creates new threads as needed, but will reuse previously constructed threads when they are available.

Usage

From source file:ddf.catalog.metrics.source.SourceMetricsImpl.java

/**
 * Creates metrics for new CatalogProvider or FederatedSource when they are initially created.
 * Metrics creation includes the JMX MBeans and associated ddf.metrics.collector.JmxCollector.
 *
 * @param source/*from   w w  w .j av  a 2  s.  com*/
 * @param props
 */
public void addingSource(final Source source, Map props) {
    LOGGER.trace("ENTERING: addingSource");

    if (executorPool == null) {
        executorPool = Executors.newCachedThreadPool();
    }

    // Creating JmxCollectors for all of the source metrics can be time consuming,
    // so do this in a separate thread to prevent blacklisting by EventAdmin
    final Runnable metricsCreator = new Runnable() {
        public void run() {
            createSourceMetrics(source);
        }
    };

    LOGGER.debug("Start metricsCreator thread for Source {}", source.getId());
    executorPool.execute(metricsCreator);

    LOGGER.trace("EXITING: addingSource");
}

From source file:com.google.cloud.hadoop.gcsio.GoogleCloudStorageIntegrationHelper.java

/**
 * Creates objects with the given names in the given bucket.
 *//* w  w w  .  j a v  a 2s.c o  m*/
private void createObjects(final String bucketName, String[] objectNames) throws IOException {

    final ExecutorService threadPool = Executors.newCachedThreadPool();
    final CountDownLatch counter = new CountDownLatch(objectNames.length);
    List<Future<?>> futures = new ArrayList<>();
    // Do each creation asynchronously.
    for (final String objectName : objectNames) {
        Future<?> future = threadPool.submit(new Runnable() {
            @Override
            public void run() {
                try {
                    if (objectName.endsWith(GoogleCloudStorage.PATH_DELIMITER)) {
                        mkdir(bucketName, objectName);
                    } else {
                        // Just use objectName as file contents.
                        writeTextFile(bucketName, objectName, objectName);
                    }
                } catch (Throwable ioe) {
                    throw new RuntimeException(
                            String.format("Exception creating %s/%s", bucketName, objectName), ioe);
                } finally {
                    counter.countDown();
                }
            }
        });
        futures.add(future);
    }

    try {
        counter.await();
    } catch (InterruptedException ie) {
        throw new IOException("Interrupted while awaiting object creation!", ie);
    } finally {
        threadPool.shutdown();
        try {
            if (!threadPool.awaitTermination(10L, TimeUnit.SECONDS)) {
                System.err.println("Failed to awaitTermination! Forcing executor shutdown.");
                threadPool.shutdownNow();
            }
        } catch (InterruptedException ie) {
            throw new IOException("Interrupted while shutting down threadpool!", ie);
        }
    }

    for (Future<?> future : futures) {
        try {
            // We should already be done.
            future.get(10, TimeUnit.MILLISECONDS);
        } catch (Exception e) {
            throw new IOException("Creation of file failed with exception", e);
        }
    }
}

From source file:ubic.gemma.analysis.preprocess.batcheffects.ComBat.java

/**
 * Multithreaded//from  w w  w.j a  v a2  s  .  co  m
 * 
 * @param sdata
 * @param gammastar
 * @param deltastar
 */
private void runNonParametric(final DoubleMatrix2D sdata, DoubleMatrix2D gammastar, DoubleMatrix2D deltastar) {
    final ConcurrentHashMap<String, DoubleMatrix1D[]> results = new ConcurrentHashMap<String, DoubleMatrix1D[]>();
    int numThreads = Math.min(batches.size(), Runtime.getRuntime().availableProcessors());

    log.info("Runing nonparametric estimation on " + numThreads + " threads");

    Future<?>[] futures = new Future[numThreads];
    ExecutorService service = Executors.newCachedThreadPool();

    /*
     * Divvy up batches over threads.
     */

    int batchesPerThread = batches.size() / numThreads;

    final String[] batchIds = batches.keySet().toArray(new String[] {});

    for (int i = 0; i < numThreads; i++) {

        final int firstBatch = i * batchesPerThread;
        final int lastBatch = i == (numThreads - 1) ? batches.size() : firstBatch + batchesPerThread;

        futures[i] = service.submit(new Runnable() {
            @Override
            public void run() {
                for (int k = firstBatch; k < lastBatch; k++) {
                    String batchId = batchIds[k];
                    DoubleMatrix2D batchData = getBatchData(sdata, batchId);
                    DoubleMatrix1D[] batchResults = nonParametricFit(batchData, gammaHat.viewRow(k),
                            deltaHat.viewRow(k));
                    results.put(batchId, batchResults);
                }
            }
        });
    }

    service.shutdown();

    boolean allDone = false;
    do {
        for (Future<?> f : futures) {
            allDone = true;
            if (!f.isDone() && !f.isCancelled()) {
                allDone = false;
                break;
            }
        }
    } while (!allDone);

    for (int i = 0; i < batchIds.length; i++) {
        String batchId = batchIds[i];
        DoubleMatrix1D[] batchResults = results.get(batchId);
        for (int j = 0; j < batchResults[0].size(); j++) {
            gammastar.set(i, j, batchResults[0].get(j));
        }
        for (int j = 0; j < batchResults[1].size(); j++) {
            deltastar.set(i, j, batchResults[1].get(j));
        }
    }
}

From source file:com.netflix.curator.framework.recipes.queue.TestDistributedQueue.java

@Test
public void testSafetyWithCrash() throws Exception {
    final int itemQty = 100;

    DistributedQueue<TestQueueItem> producerQueue = null;
    DistributedQueue<TestQueueItem> consumerQueue1 = null;
    DistributedQueue<TestQueueItem> consumerQueue2 = null;

    CuratorFramework producerClient = CuratorFrameworkFactory.newClient(server.getConnectString(),
            new RetryOneTime(1));
    CuratorFramework consumerClient1 = CuratorFrameworkFactory.newClient(server.getConnectString(),
            new RetryOneTime(1));
    CuratorFramework consumerClient2 = CuratorFrameworkFactory.newClient(server.getConnectString(),
            new RetryOneTime(1));
    try {/*from ww  w.j  a  v a  2  s  .  co m*/
        producerClient.start();
        consumerClient1.start();
        consumerClient2.start();

        ExecutorService service = Executors.newCachedThreadPool();

        // make the producer queue
        {
            producerQueue = QueueBuilder.builder(producerClient, null, serializer, QUEUE_PATH).buildQueue();
            producerQueue.start();
            QueueTestProducer producer = new QueueTestProducer(producerQueue, itemQty, 0);
            service.submit(producer);
        }

        final Set<TestQueueItem> takenItems = Sets.newTreeSet();
        final Set<TestQueueItem> takenItemsForConsumer1 = Sets.newTreeSet();
        final Set<TestQueueItem> takenItemsForConsumer2 = Sets.newTreeSet();
        final AtomicReference<TestQueueItem> thrownItemFromConsumer1 = new AtomicReference<TestQueueItem>(null);

        // make the first consumer queue
        {
            final QueueConsumer<TestQueueItem> ourQueue = new QueueConsumer<TestQueueItem>() {
                @Override
                public void consumeMessage(TestQueueItem message) throws Exception {
                    synchronized (takenItems) {
                        if (takenItems.size() > 10) {
                            thrownItemFromConsumer1.set(message);
                            throw new Exception("dummy"); // simulate a crash
                        }
                    }

                    addToTakenItems(message, takenItems, itemQty);
                    synchronized (takenItemsForConsumer1) {
                        takenItemsForConsumer1.add(message);
                    }

                    Thread.sleep((long) (Math.random() * 5));
                }

                @Override
                public void stateChanged(CuratorFramework client, ConnectionState newState) {
                }
            };
            consumerQueue1 = QueueBuilder.builder(consumerClient1, ourQueue, serializer, QUEUE_PATH)
                    .lockPath("/a/locks").buildQueue();
            consumerQueue1.start();
        }

        // make the second consumer queue
        {
            final QueueConsumer<TestQueueItem> ourQueue = new QueueConsumer<TestQueueItem>() {
                @Override
                public void consumeMessage(TestQueueItem message) throws Exception {
                    addToTakenItems(message, takenItems, itemQty);
                    synchronized (takenItemsForConsumer2) {
                        takenItemsForConsumer2.add(message);
                    }
                    Thread.sleep((long) (Math.random() * 5));
                }

                @Override
                public void stateChanged(CuratorFramework client, ConnectionState newState) {
                }
            };
            consumerQueue2 = QueueBuilder.builder(consumerClient2, ourQueue, serializer, QUEUE_PATH)
                    .lockPath("/a/locks").buildQueue();
            consumerQueue2.start();
        }

        synchronized (takenItems) {
            while (takenItems.size() < itemQty) {
                takenItems.wait(1000);
            }
        }

        int i = 0;
        for (TestQueueItem item : takenItems) {
            Assert.assertEquals(item.str, Integer.toString(i++));
        }

        Assert.assertNotNull(thrownItemFromConsumer1.get());
        Assert.assertTrue((takenItemsForConsumer2.contains(thrownItemFromConsumer1.get())));
        Assert.assertTrue(Sets.intersection(takenItemsForConsumer1, takenItemsForConsumer2).size() == 0);
    } finally {
        IOUtils.closeQuietly(producerQueue);
        IOUtils.closeQuietly(consumerQueue1);
        IOUtils.closeQuietly(consumerQueue2);

        IOUtils.closeQuietly(producerClient);
        IOUtils.closeQuietly(consumerClient1);
        IOUtils.closeQuietly(consumerClient2);
    }
}

From source file:it.units.malelab.ege.util.DUMapper.java

private static double[][][] buildGEData(String mapperName, int generations, int genotypeSize, Problem problem,
        long seed, int tournamentSize) throws InterruptedException, ExecutionException {
    Mapper<BitsGenotype, String> mapper;
    if (mapperName.equals("whge")) {
        mapper = new WeightedHierarchicalMapper<>(2, problem.getGrammar());
    } else if (mapperName.equals("hge")) {
        mapper = new HierarchicalMapper<>(problem.getGrammar());
    } else {//from  w ww . j a v a2 s.c om
        mapper = new StandardGEMapper<>(8, 10, problem.getGrammar());
    }
    StandardConfiguration<BitsGenotype, String, NumericFitness> configuration = new StandardConfiguration<>(500,
            generations, new RandomInitializer<>(new BitsGenotypeFactory(genotypeSize)),
            new Any<BitsGenotype>(), mapper,
            new Utils.MapBuilder<GeneticOperator<BitsGenotype>, Double>()
                    .put(new LengthPreservingTwoPointsCrossover(), 0.8d)
                    .put(new ProbabilisticMutation(0.01), 0.2d).build(),
            new ComparableRanker<>(new IndividualComparator<BitsGenotype, String, NumericFitness>(
                    IndividualComparator.Attribute.FITNESS)),
            new Tournament<Individual<BitsGenotype, String, NumericFitness>>(tournamentSize),
            new LastWorst<Individual<BitsGenotype, String, NumericFitness>>(), 500, true, problem, false, -1,
            -1);
    StandardEvolver evolver = new StandardEvolver(configuration, false);
    List<EvolverListener> listeners = new ArrayList<>();
    final EvolutionImageSaverListener evolutionImageSaverListener = new EvolutionImageSaverListener(
            Collections.EMPTY_MAP, null, EvolutionImageSaverListener.ImageType.DU);
    listeners.add(evolutionImageSaverListener);
    listeners.add(new CollectorGenerationLogger<>(Collections.EMPTY_MAP, System.out, true, 10, " ", " | ",
            new Population(), new NumericFirstBest(false, problem.getTestingFitnessComputer(), "%6.2f"),
            new Diversity(), new BestPrinter(problem.getPhenotypePrinter(), "%30.30s")));
    ExecutorService executorService = Executors.newCachedThreadPool();
    evolver.solve(executorService, new Random(seed), listeners);
    return evolutionImageSaverListener.getLastEvolutionData();
}

From source file:com.alliander.osgp.webdevicesimulator.application.config.ApplicationContext.java

@Bean(destroyMethod = "releaseExternalResources")
public ServerBootstrap serverBootstrap() {
    final ChannelFactory factory = new NioServerSocketChannelFactory(Executors.newCachedThreadPool(),
            Executors.newCachedThreadPool());

    final ServerBootstrap bootstrap = new ServerBootstrap(factory);

    bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
        @Override/*from   ww w  .j  av  a 2s  .co m*/
        public ChannelPipeline getPipeline()
                throws InvalidKeySpecException, NoSuchAlgorithmException, IOException, NoSuchProviderException {
            final ChannelPipeline pipeline = ApplicationContext.this.createPipeLine();
            LOGGER.info("Created new server pipeline");

            return pipeline;
        }
    });

    bootstrap.setOption("child.tcpNoDelay", true);
    bootstrap.setOption("child.keepAlive", false);

    bootstrap.bind(new InetSocketAddress(this.oslpPortServer()));

    return bootstrap;
}

From source file:edu.lternet.pasta.portal.HarvesterServlet.java

/**
 * The doPost method of the servlet. <br>
 * /*from   w  ww .  j  a v  a  2 s .  c  om*/
 * This method is called when a form has its tag value method equals to post.
 * 
 * @param request
 *          the request send by the client to the server
 * @param response
 *          the response send by the server to the client
 * @throws ServletException
 *           if an error occurred
 * @throws IOException
 *           if an error occurred
 */
public void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    HttpSession httpSession = request.getSession();
    ServletContext servletContext = httpSession.getServletContext();
    ArrayList<String> documentURLs = null;
    File emlFile = null;
    String emlTextArea = null;
    Harvester harvester = null;
    String harvestId = null;
    String harvestListURL = null;
    String harvestReportId = null;
    boolean isDesktopUpload = false;
    boolean isEvaluate = false;
    String uid = (String) httpSession.getAttribute("uid");
    String urlTextArea = null;
    String warningMessage = "";

    try {
        if (uid == null) {
            throw new PastaAuthenticationException(LOGIN_WARNING);
        } else {
            /*
             * The "metadataSource" request parameter can have a value of
             * "emlText", "emlFile", "urlList", "harvestList", or
             * "desktopHarvester". It is set as a hidden input field in 
             * each of the harvester forms.
             */
            String metadataSource = request.getParameter("metadataSource");

            /*
             * "mode" can have a value of "evaluate" or "upgrade". It is set
             * as the value of the submit button in each of the harvester
             * forms.
             */
            String mode = request.getParameter("submit");
            if ((mode != null) && (mode.equalsIgnoreCase("evaluate"))) {
                isEvaluate = true;
            }

            if ((metadataSource != null) && (!metadataSource.equals("desktopHarvester"))) {
                harvestId = generateHarvestId();
                if (isEvaluate) {
                    harvestReportId = uid + "-evaluate-" + harvestId;
                } else {
                    harvestReportId = uid + "-upload-" + harvestId;
                }
            }

            if (metadataSource != null) {
                if (metadataSource.equals("emlText")) {
                    emlTextArea = request.getParameter("emlTextArea");
                    if (emlTextArea == null || emlTextArea.trim().isEmpty()) {
                        warningMessage = "<p class=\"warning\">Please enter the text of an EML document into the text area.</p>";
                    }
                } else if (metadataSource.equals("emlFile")) {
                    Collection<Part> parts = request.getParts();
                    for (Part part : parts) {
                        if (part.getContentType() != null) {
                            // save EML file to disk
                            emlFile = processUploadedFile(part);
                        } else {
                            /*
                             * Parse the request parameters.
                             */
                            String fieldName = part.getName();
                            String fieldValue = request.getParameter(fieldName);
                            if (fieldName != null && fieldValue != null) {
                                if (fieldName.equals("submit") && fieldValue.equalsIgnoreCase("evaluate")) {
                                    isEvaluate = true;
                                } else if (fieldName.equals("desktopUpload")
                                        && fieldValue.equalsIgnoreCase("1")) {
                                    isDesktopUpload = true;
                                }
                            }
                        }
                    }
                } else if (metadataSource.equals("urlList")) {
                    urlTextArea = request.getParameter("urlTextArea");
                    if (urlTextArea == null || urlTextArea.trim().isEmpty()) {
                        warningMessage = "<p class=\"warning\">Please enter one or more EML document URLs into the text area.</p>";
                    } else {
                        documentURLs = parseDocumentURLsFromTextArea(urlTextArea);
                        warningMessage = CHECK_BACK_LATER;
                    }
                } else if (metadataSource.equals("harvestList")) {
                    harvestListURL = request.getParameter("harvestListURL");
                    if (harvestListURL == null || harvestListURL.trim().isEmpty()) {
                        warningMessage = "<p class=\"warning\">Please enter the URL to a Metacat Harvest List.</p>";
                    } else {
                        documentURLs = parseDocumentURLsFromHarvestList(harvestListURL);
                        warningMessage = CHECK_BACK_LATER;
                    }
                }
                /*
                 * If the metadata source is "desktopHarvester", we already have the
                 * EML file stored in a session attribute. Now we need to retrieve
                 * the data files from the brower's form fields and write the
                 * data files to a URL accessible location.
                 */
                else if (metadataSource.equals("desktopHarvester")) {
                    emlFile = (File) httpSession.getAttribute("emlFile");
                    ArrayList<Entity> entityList = parseEntityList(emlFile);
                    harvestReportId = (String) httpSession.getAttribute("harvestReportId");
                    String dataPath = servletContext.getRealPath(DESKTOP_DATA_DIR);
                    String harvestPath = String.format("%s/%s", dataPath, harvestReportId);

                    Collection<Part> parts = request.getParts();
                    String objectName = null;
                    Part filePart = null;

                    for (Part part : parts) {
                        if (part.getContentType() != null) {
                            // save data file to disk
                            //processDataFile(part, harvestPath);
                            filePart = part;
                        } else {
                            /*
                             * Parse the request parameters.
                             */
                            String fieldName = part.getName();
                            String fieldValue = request.getParameter(fieldName);
                            if (fieldName != null && fieldValue != null) {
                                if (fieldName.equals("submit") && fieldValue.equalsIgnoreCase("evaluate")) {
                                    isEvaluate = true;
                                } else if (fieldName.startsWith("object-name-")) {
                                    objectName = fieldValue;
                                }
                            }
                        }

                        if (filePart != null && objectName != null) {
                            processDataFile(filePart, harvestPath, objectName);
                            objectName = null;
                            filePart = null;
                        }

                    }

                    emlFile = transformDesktopEML(harvestPath, emlFile, harvestReportId, entityList);
                }
            } else {
                throw new IllegalStateException("No value specified for request parameter 'metadataSource'");
            }

            if (harvester == null) {
                harvester = new Harvester(harvesterPath, harvestReportId, uid, isEvaluate);
            }

            if (emlTextArea != null) {
                harvester.processSingleDocument(emlTextArea);
            } else if (emlFile != null) {
                if (isDesktopUpload) {
                    ArrayList<Entity> entityList = parseEntityList(emlFile);
                    httpSession.setAttribute("entityList", entityList);
                    httpSession.setAttribute("emlFile", emlFile);
                    httpSession.setAttribute("harvestReportId", harvestReportId);
                    httpSession.setAttribute("isEvaluate", new Boolean(isEvaluate));
                } else {
                    harvester.processSingleDocument(emlFile);
                }
            } else if (documentURLs != null) {
                harvester.setDocumentURLs(documentURLs);
                ExecutorService executorService = Executors.newCachedThreadPool();
                executorService.execute(harvester);
                executorService.shutdown();
            }
        }
    } catch (Exception e) {
        handleDataPortalError(logger, e);
    }

    request.setAttribute("message", warningMessage);

    /*
     * If we have a new reportId, and either there is no warning message or
     * it's the "Check back later" message, set the harvestReportID session
     * attribute to the new reportId value.
     */
    if (harvestReportId != null && harvestReportId.length() > 0
            && (warningMessage.length() == 0 || warningMessage.equals(CHECK_BACK_LATER))) {
        httpSession.setAttribute("harvestReportID", harvestReportId);
    }

    if (isDesktopUpload) {
        RequestDispatcher requestDispatcher = request.getRequestDispatcher("./desktopHarvester.jsp");
        requestDispatcher.forward(request, response);
    } else if (warningMessage.length() == 0) {
        response.sendRedirect("./harvestReport.jsp");
    } else {
        RequestDispatcher requestDispatcher = request.getRequestDispatcher("./harvester.jsp");
        requestDispatcher.forward(request, response);
    }

}