List of usage examples for java.util.concurrent ThreadPoolExecutor submit
public Future<?> submit(Runnable task)
From source file:metlos.executors.batch.BatchCpuThrottlingExecutorTest.java
@Test public void maxUsage_MultiThreaded() throws Exception { NamingThreadFactory factory = new NamingThreadFactory(); ThreadPoolExecutor e = new ThreadPoolExecutor(10, 10, 0, TimeUnit.DAYS, new LinkedBlockingQueue<Runnable>(), factory);/*from w ww. ja va2 s . com*/ e.prestartAllCoreThreads(); List<Future<?>> payloadResults = new ArrayList<Future<?>>(); long startTime = System.nanoTime(); //create load for (int i = 0; i < NOF_JOBS; ++i) { Future<?> f = e.submit(new Payload()); payloadResults.add(f); } //wait for it all to finish for (Future<?> f : payloadResults) { f.get(); } long endTime = System.nanoTime(); long time = endTime - startTime; LOG.info("MAX Multithreaded test took " + (time / 1000.0 / 1000.0) + "ms"); ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); long cpuTime = 0; for (Thread t : factory.createdThreads) { long threadCpuTime = threadBean.getThreadCpuTime(t.getId()); LOG.info(t.getName() + ": " + threadCpuTime + "ns"); cpuTime += threadCpuTime; } float actualUsage = (float) cpuTime / time; LOG.info("MAX Multithreaded overall usage: " + actualUsage); }
From source file:Service.Service.java
@POST @Produces(MediaType.APPLICATION_JSON)//from w ww . java2 s. c o m @Path("switchLightsOff") public Response switchLight(@FormParam("lights") String lightsJson) throws ParseException, ExecutionException { JSONObject response = new JSONObject(); JSONArray tasks = new JSONArray(); JSONArray results = new JSONArray(); JSONArray lightsList = (JSONArray) new JSONParser().parse(lightsJson); for (Object entry : lightsList) { JSONObject obj = (JSONObject) entry; if (obj.get("On").equals("on")) { this.lights.put((String) obj.get("Light"), true); } else { this.lights.put((String) obj.get("Light"), false); } System.out.println(obj.get("Light")); System.out.println(obj.get("On")); } ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool(); List<Future<State>> list = new ArrayList<Future<State>>(); for (Map.Entry<String, Boolean> entry : lights.entrySet()) { JSONObject obj = new JSONObject(); Callable<State> light = new Light(entry.getKey(), entry.getValue(), 1); obj.put("Light found", ((Light) light).getLocation()); obj.put("On", ((Light) light).getState()); Future<State> future = executor.submit(light); list.add(future); tasks.add(obj); } JSONArray newStates = new JSONArray(); for (Future<State> event : list) { try { JSONObject obj = new JSONObject(); JSONObject state = new JSONObject(); obj.put("task", new Date() + " - " + event.get().getMessage()); state.put("Light", event.get().getField()); state.put("On", event.get().getState()); results.add(obj); newStates.add(state); } catch (InterruptedException e) { e.printStackTrace(); } } executor.shutdown(); response.put("Lights", tasks); response.put("results", results); response.put("newStates", newStates); return Response.status(200).entity(response).build(); }
From source file:Service.Service.java
@POST @Produces(MediaType.APPLICATION_JSON)//from w w w. j a v a2 s .co m @Path("switchLightsOn") public Response switchLightsOn(@FormParam("lights") String lightsJson) throws ParseException, ExecutionException { JSONObject response = new JSONObject(); JSONArray tasks = new JSONArray(); JSONArray results = new JSONArray(); JSONArray lightsList = (JSONArray) new JSONParser().parse(lightsJson); for (Object entry : lightsList) { JSONObject obj = (JSONObject) entry; if (obj.get("On").equals("on")) { this.lights.put((String) obj.get("Light"), true); } else { this.lights.put((String) obj.get("Light"), false); } System.out.println(obj.get("Light")); System.out.println(obj.get("On")); } ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newCachedThreadPool(); List<Future<State>> list = new ArrayList<Future<State>>(); for (Map.Entry<String, Boolean> entry : lights.entrySet()) { JSONObject obj = new JSONObject(); Callable<State> light = new Light(entry.getKey(), entry.getValue(), 2); obj.put("Light found", ((Light) light).getLocation()); obj.put("On", ((Light) light).getState()); Future<State> future = executor.submit(light); list.add(future); tasks.add(obj); } JSONArray newStates = new JSONArray(); for (Future<State> event : list) { try { JSONObject obj = new JSONObject(); JSONObject state = new JSONObject(); obj.put("task", new Date() + " - " + event.get().getMessage()); state.put("Light", event.get().getField()); state.put("On", event.get().getState()); results.add(obj); newStates.add(state); } catch (InterruptedException e) { e.printStackTrace(); } } executor.shutdown(); response.put("Lights", tasks); response.put("results", results); response.put("newStates", newStates); return Response.status(200).entity(response).build(); }
From source file:org.thelq.stackexchange.dbimport.Controller.java
public void importAll(int threads, final boolean createTables) throws InterruptedException { //Build a test session factory with the first entry to see if database credentials work DatabaseWriter.buildSessionFactory(dumpContainers.get(0)); if (metadataMap == null) initMetadataMap(dumpContainers.get(0).getSessionFactory()); //Database wors, start importing ThreadPoolExecutor importThreadPool = (ThreadPoolExecutor) Executors.newFixedThreadPool(threads, new BasicThreadFactory.Builder().namingPattern("seImport-pool-%d").build()); log.info("Starting import pool with " + threads + " threads"); if (createTables) { log.info("Creating all tables"); final CountDownLatch finishedLatch = new CountDownLatch(dumpContainers.size()); for (final DumpContainer curContainer : dumpContainers) importThreadPool.submit(new Runnable() { public void run() { try { MDC.put("longContainer", " [" + curContainer.getName() + "]"); synchronized (curContainer.getHibernateCreateLock()) { if (curContainer.getSessionFactory() == null) DatabaseWriter.buildSessionFactory(curContainer); }/*from w ww .j a va 2 s.co m*/ DatabaseWriter.createTables(curContainer); } finally { finishedLatch.countDown(); } } }); //Wait for all imports to finish finishedLatch.await(); } //Order threads by first entry from each container, second entry from each container... //This is so we don't slam a single container (IE 7z archives) with all the threads List<Future<Void>> futures = new ArrayList<Future<Void>>(); int curIndex = 0; while (true) { int numFailed = 0; for (final DumpContainer curContainer : dumpContainers) { //Make sure this entry actually exists List<DumpEntry> entries = curContainer.getEntries(); if (curIndex >= entries.size()) { numFailed++; continue; } //Add to queue final DumpEntry curEntry = curContainer.getEntries().get(curIndex); futures.add(importThreadPool.submit(new Callable<Void>() { public Void call() { currentDumpEntry.set(curEntry); synchronized (curContainer.getHibernateCreateLock()) { if (curContainer.getSessionFactory() == null) DatabaseWriter.buildSessionFactory(curContainer); } importSingle(curContainer, curEntry, createTables); currentDumpEntry.remove(); return null; } })); } //Check if we've exausted all DumpEntries if (numFailed == dumpContainers.size()) break; //Nope, continue to the next index curIndex++; } //Block until all imports have completed for (Future<Void> curFuture : futures) try { curFuture.get(); } catch (Exception e) { log.error("Could not wait for import thread to complete", e); } Utils.shutdownPool(importThreadPool, "import pool"); log.info("Import finished!"); }
From source file:org.nuxeo.ecm.automation.server.jaxrs.batch.BatchManagerFixture.java
@Test public void testChunkConcurrency() throws Exception { // Initialize a batch BatchManager bm = Framework.getService(BatchManager.class); String batchId = bm.initBatch(); // Add chunks concurrently int nbChunks = 100; ThreadPoolExecutor tpe = new ThreadPoolExecutor(5, 5, 500L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(nbChunks + 1)); for (int i = 0; i < nbChunks; i++) { final int chunkIndex = i; tpe.submit(new Runnable() { @Override//from www . j a v a 2 s. c o m public void run() { try { bm.addStream(batchId, "0", new ByteArrayInputStream( ("SomeChunkContent_" + chunkIndex + " ").getBytes(StandardCharsets.UTF_8)), nbChunks, chunkIndex, "MyChunkedFile.txt", "text/plain", 0); } catch (IOException e) { fail(e.getMessage()); } } }); } tpe.shutdown(); boolean finish = tpe.awaitTermination(20, TimeUnit.SECONDS); assertTrue("timeout", finish); // Check chunked file Blob blob = bm.getBlob(batchId, "0"); assertNotNull(blob); int nbOccurrences = 0; Pattern p = Pattern.compile("SomeChunkContent_"); Matcher m = p.matcher(blob.getString()); while (m.find()) { nbOccurrences++; } assertEquals(nbChunks, nbOccurrences); // Check storage size TransientStore ts = bm.getTransientStore(); assertTrue(((AbstractTransientStore) ts).getStorageSize() > 17 * nbChunks); // Clean batch bm.clean(batchId); assertEquals(ts.getStorageSizeMB(), 0); }
From source file:org.nuxeo.ecm.automation.server.jaxrs.batch.BatchManagerFixture.java
@Test public void testBatchConcurrency() throws Exception { BatchManager bm = Framework.getService(BatchManager.class); // Initialize batches with one file concurrently int nbBatches = 100; String[] batchIds = new String[nbBatches]; ThreadPoolExecutor tpe = new ThreadPoolExecutor(5, 5, 500L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(nbBatches + 1)); for (int i = 0; i < nbBatches; i++) { final int batchIndex = i; tpe.submit(new Runnable() { @Override/*from w ww .j av a 2 s .c om*/ public void run() { try { String batchId = bm.initBatch(); bm.addStream(batchId, "0", new ByteArrayInputStream( ("SomeContent_" + batchId).getBytes(StandardCharsets.UTF_8)), "MyBatchFile.txt", "text/plain"); batchIds[batchIndex] = batchId; } catch (IOException e) { fail(e.getMessage()); } } }); } tpe.shutdown(); boolean finish = tpe.awaitTermination(20, TimeUnit.SECONDS); assertTrue("timeout", finish); // Check batches for (String batchId : batchIds) { assertNotNull(batchId); } // Test indexes 0, 9, 99, ..., nbFiles - 1 int nbDigits = (int) (Math.log10(nbBatches) + 1); int divisor = nbBatches; for (int i = 0; i < nbDigits; i++) { int batchIndex = nbBatches / divisor - 1; String batchId = batchIds[batchIndex]; Blob blob = bm.getBlob(batchId, "0"); assertNotNull(blob); assertEquals("MyBatchFile.txt", blob.getFilename()); assertEquals("SomeContent_" + batchId, blob.getString()); divisor = divisor / 10; } // Check storage size TransientStore ts = bm.getTransientStore(); assertTrue(((AbstractTransientStore) ts).getStorageSize() > 12 * nbBatches); // Clean batches for (String batchId : batchIds) { bm.clean(batchId); } assertEquals(ts.getStorageSizeMB(), 0); }
From source file:org.nuxeo.ecm.automation.server.jaxrs.batch.BatchManagerFixture.java
@Test public void testFileConcurrency() throws Exception { // Initialize a batch BatchManager bm = Framework.getService(BatchManager.class); String batchId = bm.initBatch(); // Add files concurrently int nbFiles = 100; ThreadPoolExecutor tpe = new ThreadPoolExecutor(5, 5, 500L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(nbFiles + 1)); for (int i = 0; i < nbFiles; i++) { final String fileIndex = String.valueOf(i); tpe.submit(new Runnable() { @Override//w w w . j ava 2 s.c o m public void run() { try { bm.addStream(batchId, fileIndex, new ByteArrayInputStream( ("SomeContent_" + fileIndex).getBytes(StandardCharsets.UTF_8)), fileIndex + ".txt", "text/plain"); } catch (IOException e) { fail(e.getMessage()); } } }); } tpe.shutdown(); boolean finish = tpe.awaitTermination(20, TimeUnit.SECONDS); assertTrue("timeout", finish); // Check blobs List<Blob> blobs = bm.getBlobs(batchId); assertEquals(nbFiles, blobs.size()); // Test indexes 0, 9, 99, ..., nbFiles - 1 int nbDigits = (int) (Math.log10(nbFiles) + 1); int divisor = nbFiles; for (int i = 0; i < nbDigits; i++) { int fileIndex = nbFiles / divisor - 1; assertEquals(fileIndex + ".txt", blobs.get(fileIndex).getFilename()); assertEquals("SomeContent_" + fileIndex, blobs.get(fileIndex).getString()); divisor = divisor / 10; } // Check storage size TransientStore ts = bm.getTransientStore(); assertTrue(((AbstractTransientStore) ts).getStorageSize() > 12 * nbFiles); // Clean batch bm.clean(batchId); assertEquals(ts.getStorageSizeMB(), 0); }
From source file:com.kolich.boildown.Boil.java
private final void run() throws Exception { final Boiler.CompressionMethod method; final List<String> arguments; if (compress_ != null) { arguments = colonSplitter.splitToList(compress_); method = Boiler.CompressionMethod.COMPRESS; } else {/*from w w w . ja v a 2 s . c om*/ arguments = colonSplitter.splitToList(decompress_); method = Boiler.CompressionMethod.DECOMPRESS; } if (arguments.size() != 3) { throw new IllegalArgumentException("Forwarder must be in the format of [port]:[host]:[port]"); } // Parse the arguments. final int listenPort = Integer.parseInt(arguments.get(0)); final String forwardHost = arguments.get(1); final int forwardPort = Integer.parseInt(arguments.get(2)); final Boiler.Strategery strategery = getStrategery(); final ThreadFactoryBuilder factoryBuilder = new ThreadFactoryBuilder().setDaemon(true) .setNameFormat("boiler-%d (" + listenPort + ":" + forwardHost + ":" + forwardPort + ")"); final ThreadPoolExecutor threadPool = (ThreadPoolExecutor) Executors.newFixedThreadPool(poolSize_, factoryBuilder.build()); try (final ServerSocket listener = new ServerSocket(listenPort)) { // Run loop! while (true) { // Blocks, waiting for new connections. final Socket client = listener.accept(); if (threadPool.getActiveCount() >= poolSize_) { // All boilers busy, forcibly hang up. IOUtils.closeQuietly(client); } else { // Submit the boiler to the pool, only if there's space to safely do so. threadPool .submit(new Boiler(client, method, strategery, forwardHost, forwardPort, bufferSize_)); } } } catch (Exception e) { log.error("Exception in main run-loop.", e); } finally { threadPool.shutdown(); } }
From source file:com.bleum.canton.jms.scheduler.AbstractJMSScheduler.java
/** * Run the tasks using threads.// w w w. jav a 2 s .c o m * * @param tasks */ private void excuteTasks(List<JMSTask> tasks) { int tSize = tasks.size(); if (tSize <= 0) { return; } ThreadPoolExecutor executor = new ThreadPoolExecutor(1, threads, 60, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(maxTasksPerThread * threads)); for (final JMSTask task : tasks) { final int mRetry = this.maxRetry; final int mAckRetry = this.maxAckRetry; runFutures.add(executor.submit(new Runnable() { @Override public void run() { try { sendMessage(task); if (clientAck == JMSTaskConstant.NO_ACKNOWLEDGE) { jmsTaskDao.updateTaskCompeleted(task.getId()); } else if (clientAck == JMSTaskConstant.CLIENT_ACKNOWLEDGE) { // if sent maxRetry times, won't wait for // acknowledge, just complete it. jmsTaskDao.updateTaskProcessed(task, mAckRetry); } } catch (Exception e) { // if retried sending maxRetry times, make it fatal, and // no longer retry. task.setLastError(e.getClass().getSimpleName() + ":" + e.getMessage()); jmsTaskDao.updateErrorTask(task, mRetry); } } })); } }
From source file:org.apache.hadoop.hbase.PerformanceEvaluation2.java
private void doMultipleClients(final Test cmd, final List<TableSplit> splits, final int nthread) throws IOException { BlockingQueue<Runnable> queue = new LinkedBlockingQueue<Runnable>(nthread); final ThreadPoolExecutor services = new ThreadPoolExecutor(nthread, nthread, 10, TimeUnit.SECONDS, queue, new ThreadPoolExecutor.CallerRunsPolicy()); for (final TableSplit ts : splits) { services.submit(new Runnable() { @Override/* w ww. ja v a 2 s . c o m*/ public void run() { try { long startTime = System.currentTimeMillis(); runOneClient(cmd, ts); long elapsedTime = System.currentTimeMillis() - startTime; LOG.info("Finished " + Thread.currentThread().getName() + " in " + elapsedTime + "ms for " + cmd.rows.get() + " rows and " + cmd.kvs.get() + " cols"); totalRowCount.add(cmd.rows.get()); totalKVCount.add(cmd.kvs.get()); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } }); } services.shutdown(); try { services.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); } catch (InterruptedException e) { e.printStackTrace(); } }