Example usage for java.util.concurrent ExecutorCompletionService ExecutorCompletionService

List of usage examples for java.util.concurrent ExecutorCompletionService ExecutorCompletionService

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorCompletionService ExecutorCompletionService.

Prototype

public ExecutorCompletionService(Executor executor) 

Source Link

Document

Creates an ExecutorCompletionService using the supplied executor for base task execution and a LinkedBlockingQueue as a completion queue.

Usage

From source file:org.nickelproject.nickel.mapReduce.ThreadedMapper.java

@Override
protected <T> CompletionService<T> newCompletionService() {
    return new ExecutorCompletionService<T>(mExecutor);
}

From source file:org.apache.hadoop.hbase.util.TestIdLock.java

@Test
public void testMultipleClients() throws Exception {
    ExecutorService exec = Executors.newFixedThreadPool(NUM_THREADS);
    try {//  www. ja  va 2  s. c  o m
        ExecutorCompletionService<Boolean> ecs = new ExecutorCompletionService<Boolean>(exec);
        for (int i = 0; i < NUM_THREADS; ++i)
            ecs.submit(new IdLockTestThread("client_" + i));
        for (int i = 0; i < NUM_THREADS; ++i) {
            Future<Boolean> result = ecs.take();
            assertTrue(result.get());
        }
        idLock.assertMapEmpty();
    } finally {
        exec.shutdown();
        exec.awaitTermination(5000, TimeUnit.MILLISECONDS);
    }
}

From source file:org.geoserver.bkprst.RestoreTask.java

@Override
public void run() {

    // If previous' backup info cannot be read, aborts the restore
    // Writes info about backup in a file
    BackupTask backupInfo = this.readBackupInfo(this.path);
    if (backupInfo == null) {
        LOGGER.severe("Backup data info were not written properly, the restore will not start");
        this.state = BrTaskState.FAILED;
        return;/* w  ww .  j a  v  a  2s .  c om*/
    }

    // Sets up the filter to exclude some directories according to the previous backup info
    IOFileFilter excludeFilter = this.getExcludeFilter(backupInfo.includeData, backupInfo.includeGwc,
            backupInfo.includeLog);

    // Sets up source and destination
    File srcMount = new File(this.path);
    File trgMount = this.dataRoot.root();

    // Sets transaction
    this.trans = new RestoreTransaction(this, srcMount, trgMount, excludeFilter);

    try {
        // Start transanction
        this.trans.start();
        if (checkForHalt()) {
            return;
        }

        // Sets up the copy task
        ExecutorService ex = Executors.newFixedThreadPool(2);
        if (ex == null || ex.isTerminated()) {
            throw new IllegalArgumentException(
                    "Unable to run asynchronously using a terminated or null ThreadPoolExecutor");
        }
        ExecutorCompletionService<File> cs = new ExecutorCompletionService<File>(ex);

        this.act = new CopyTree(excludeFilter, cs, srcMount, trgMount);
        this.act.addCopyListener(new DefaultProgress(this.id.toString()) {
            public void onUpdateProgress(float percent) {
                super.onUpdateProgress(percent);
                progress = percent;
            }
        });

        // Starts restore
        int workSize = this.act.copy();
        LOGGER.info("Restore " + this.id + " has started");
        this.startTime = new Date();
        this.state = BrTaskState.RUNNING;

        // This is to keep track af restore advancement
        while (workSize-- > 0) {
            Future<File> future = cs.take();
            try {
                LOGGER.info("copied file: " + future.get());
            } catch (ExecutionException e) {

                LOGGER.log(Level.INFO, e.getLocalizedMessage(), e);
            }
            if (checkForHalt()) {
                ex.shutdown();
                if (!ex.awaitTermination(5, TimeUnit.SECONDS)) {
                    throw new RuntimeException("Unable to stop backup task");
                }
                return;
            }
        }

        // Restore completed
        this.trans.commit();

        // reload the config from disk
        getGeoServer().reload();
    } catch (Exception e) {
        LOGGER.log(Level.SEVERE, e.getLocalizedMessage(), e);

        // In case of errors, rollback
        this.trans.rollback();
    } finally {
        haltSemaphore.release();
    }
}

From source file:rk.java.compute.cep.ComputeService.java

public ComputeService(BlockingQueue<IPriceTick> queue, final int numberOfTickSources,
        IPriceEventSink eventbus) {/*w  w w  . j  a  v a 2s. c  om*/
    this.feedQueue = queue;
    this.numberOfTickSources = numberOfTickSources;
    this.eventbus = eventbus;
    this.handlerCache = new ConcurrentHashMap<String, Compute>();
    this.stopWatch = new StopWatch("Dispatcher Task");
    executorService = Executors.newCachedThreadPool();
    ecs = new ExecutorCompletionService<StopWatch>(executorService);
    dispatchTaskFuture = ecs.submit(new Callable<StopWatch>() {
        @Override
        public StopWatch call() throws Exception {
            stopWatch.start();
            run();
            stopWatch.stop();
            return stopWatch;
        }
    });
}

From source file:org.ros.concurrent.RetryingExecutorService.java

/**
 * @param scheduledExecutorService//  w  w w  . j  ava2s. c  o m
 *          the {@link ExecutorService} to wrap
 */
public RetryingExecutorService(ScheduledExecutorService scheduledExecutorService) {
    this.scheduledExecutorService = scheduledExecutorService;
    retryLoop = new RetryLoop();
    latches = Maps.newConcurrentMap();
    callables = Maps.newConcurrentMap();
    completionService = new ExecutorCompletionService<Boolean>(scheduledExecutorService);
    mutex = new Object();
    retryDelay = DEFAULT_RETRY_DELAY;
    retryTimeUnit = DEFAULT_RETRY_TIME_UNIT;
    running = true;
    // TODO(damonkohler): Unify this with the passed in ExecutorService.
    scheduledExecutorService.execute(retryLoop);
}

From source file:com.flipkart.bifrost.ListenTest.java

@Test
public void testSendReceive() throws Exception {
    ObjectMapper mapper = new ObjectMapper();
    mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
    mapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY);

    Connection connection = new Connection(Lists.newArrayList("localhost"), "guest", "guest");
    connection.start();//w  ww.j  a va 2s .  com

    BifrostExecutor<Void> executor = BifrostExecutor.<Void>builder(TestAction.class).connection(connection)
            .objectMapper(mapper).requestQueue("bifrost-send").responseQueue("bifrost-recv").concurrency(20)
            .executorService(Executors.newFixedThreadPool(20)).build();

    BifrostRemoteCallExecutionServer<Void> executionServer = BifrostRemoteCallExecutionServer
            .<Void>builder(TestAction.class).objectMapper(mapper).connection(connection).concurrency(20)
            .requestQueue("bifrost-send").build();
    executionServer.start();

    long startTime = System.currentTimeMillis();
    AtomicInteger counter = new AtomicInteger(0);
    int requestCount = 1000000;
    CompletionService<Void> ecs = new ExecutorCompletionService<>(Executors.newFixedThreadPool(50));
    List<Future<Void>> futures = Lists.newArrayListWithCapacity(requestCount);
    for (int i = 0; i < requestCount; i++) {
        futures.add(ecs.submit(new ServiceCaller(executor, counter)));
    }
    for (int i = 0; i < requestCount; i++) {
        try {
            ecs.take().get();
        } catch (ExecutionException e) {
            e.printStackTrace();
        }
    }
    while (counter.get() != requestCount)
        ;
    System.out.println(
            String.format("Completed: %d in %d ms", counter.get(), (System.currentTimeMillis() - startTime)));
    executor.shutdown();
    executionServer.stop();
    connection.stop();

    Assert.assertEquals(requestCount, counter.get());
}

From source file:org.geoserver.bkprst.BackupTask.java

@Override
public void run() {

    // Sets up the filter to exclude some directories according to the previous backup info
    IOFileFilter excludeFilter = this.getExcludeFilter(this.includeData, this.includeGwc, this.includeLog);

    // Sets up source and destination
    File srcMount = this.dataRoot.root();
    File trgMount = new File(this.path);

    // Sets transaction
    this.trans = new BackupTransaction(this, srcMount, trgMount, excludeFilter);

    try {/*from   w  w w.ja  v  a2s  . co m*/
        // Deletes dest directory if existing
        if (trgMount.exists()) {
            Remove.deleteDirectory(trgMount,
                    FileFilterUtils.or(FileFilterUtils.directoryFileFilter(), FileFilterUtils.fileFileFilter()),
                    true, true);
        }

        // Starts transanction
        this.trans.start();
        if (checkForHalt()) {
            LOGGER.fine("run:Halt requested " + this.id);
            return;
        }

        // Sets up the copy task
        ExecutorService ex = Executors.newFixedThreadPool(2);
        if (ex == null || ex.isTerminated()) {
            throw new IllegalArgumentException(
                    "Unable to run asynchronously using a terminated or null ThreadPoolExecutor");
        }
        ExecutorCompletionService<File> cs = new ExecutorCompletionService<File>(ex);

        this.act = new CopyTree(excludeFilter, cs, srcMount, trgMount);
        this.act.addCopyListener(new DefaultProgress(this.id.toString()) {
            public void onUpdateProgress(float percent) {
                super.onUpdateProgress(percent);
                progress = percent;
            }
        });

        // Starts backup
        int workSize = this.act.copy();

        // This is to keep track af restore advancement
        while (workSize-- > 0) {
            Future<File> future;
            try {
                future = cs.take();
                LOGGER.info("copied file: " + future.get());
            } catch (Exception e) {
                LOGGER.log(Level.INFO, e.getLocalizedMessage(), e);
            }

            if (checkForHalt()) {
                LOGGER.fine("run:Halt requested, shutting down threads " + this.id);
                ex.shutdown();
                if (!ex.awaitTermination(5, TimeUnit.SECONDS)) {
                    throw new RuntimeException("Unable to stop backup task");
                }
                return;
            }
        }

        // Writes info about backup
        if (!this.writeBackupInfo(this.path)) {
            LOGGER.severe(
                    "Backup data info were not written properly, a restore operation will fail on this data");
            this.state = BrTaskState.FAILED;
        }

        if (checkForHalt()) {
            LOGGER.fine("run:Halt requested " + this.id);
            return;
        }
        // Restore completed
        this.trans.commit();

    } catch (Exception e) {
        LOGGER.log(Level.SEVERE, e.getLocalizedMessage(), e);
        // In case of errors, rollbacks
        this.trans.rollback();
    } finally {
        haltSemaphore.release();
    }
}

From source file:org.springframework.integration.groovy.GroovyExpressionTests.java

@Test
public void testScriptFactoryCustomizerThreadSafety() throws Exception {
    final Customizer customizer = new Customizer(Collections.singletonMap("name", (Object) "foo"));
    final GroovyScriptFactory factory = new GroovyScriptFactory("Groovy Script", customizer);
    final ResourceScriptSource scriptSource = new ResourceScriptSource(
            new NamedByteArrayResource("\"name=${name}\"".getBytes(), "InlineScript"));
    Object scriptedObject = factory.getScriptedObject(scriptSource, null);
    assertEquals("name=foo", scriptedObject.toString());
    CompletionService<String> completionService = new ExecutorCompletionService<String>(
            Executors.newFixedThreadPool(10));
    for (int i = 0; i < 100; i++) {
        final String name = "bar" + i;
        completionService.submit(new Callable<String>() {
            public String call() throws Exception {
                Object scriptedObject;
                synchronized (customizer) {
                    customizer.setMap(Collections.singletonMap("name", (Object) name));
                    scriptedObject = factory.getScriptedObject(scriptSource, null);
                }//  w  ww .  j av  a 2s  .  c o  m
                String result = scriptedObject.toString();
                logger.debug("Result=" + result + " with name=" + name);
                if (!("name=" + name).equals(result)) {
                    throw new IllegalStateException("Wrong value (" + result + ") for: " + name);
                }
                return name;
            }
        });
    }
    Set<String> set = new HashSet<String>();
    for (int i = 0; i < 100; i++) {
        set.add(completionService.take().get());
    }
    assertEquals(100, set.size());
}

From source file:org.commonjava.util.partyline.ManyReadersWithPreExistingWriterTest.java

private void executeTestIteration() throws Exception {
    ThreadContext.getContext(true);/* w w w.  j  av a 2  s.c om*/

    ExecutorCompletionService<String> completionService = new ExecutorCompletionService<String>(executor);

    final AtomicBoolean readFlag = new AtomicBoolean(false);
    final AtomicBoolean writeFlag = new AtomicBoolean(false);

    completionService.submit(writer(writeFlag, readFlag));
    for (int i = 0; i < THREADS; i++) {
        completionService.submit(reader(readFlag));
    }

    writeFlag.set(true);

    for (int i = 0; i < (THREADS + 1); i++) {
        String error = completionService.take().get();
        if (error != null) {
            logger.info(error);
            fail("thread failed.");
        }
        assertThat(error, nullValue());
    }

    ThreadContext.clearContext();
}

From source file:com.appdynamics.monitors.hadoop.communicator.AmbariCommunicator.java

/**
 * Populates <code>metrics</code> Map with all numeric Ambari clusters metrics.
 * @see #getClusterMetrics(java.io.Reader)
 *
 * @param metrics//from  w ww  . j a va2  s. c o m
 */
public void populate(Map<String, Object> metrics) {
    this.metrics = metrics;
    try {
        Reader response = (new Response("http://" + host + ":" + port + "/api/v1/clusters")).call();

        Map<String, Object> json = (Map<String, Object>) parser.parse(response, simpleContainer);
        try {
            List<Map> clusters = (ArrayList<Map>) json.get("items");

            CompletionService<Reader> threadPool = new ExecutorCompletionService<Reader>(executor);
            int count = 0;
            for (Map cluster : clusters) {
                if (xmlParser.isIncludeCluster((String) ((Map) cluster.get("Clusters")).get("cluster_name"))) {
                    threadPool.submit(new Response(cluster.get("href") + CLUSTER_FIELDS));
                    count++;
                }
            }
            for (; count > 0; count--) {
                getClusterMetrics(threadPool.take().get());
            }
        } catch (Exception e) {
            logger.error("Failed to parse cluster names: " + stackTraceToString(e));
        }
    } catch (Exception e) {
        logger.error("Failed to get response for cluster names: " + stackTraceToString(e));
    }
    executor.shutdown();
}