List of usage examples for java.util.concurrent.atomic AtomicInteger get
public final int get()
From source file:com.facebook.BatchRequestTests.java
@LargeTest public void testBatchLastOnProgressCallbackIsCalledOnce() { final AtomicInteger count = new AtomicInteger(); final AccessToken accessToken = getAccessTokenForSharedUser(); String appId = getApplicationId(); GraphRequest.setDefaultBatchApplicationId(appId); GraphRequest request1 = GraphRequest.newGraphPathRequest(accessToken, "4", null); assertNotNull(request1);/*www.ja v a2 s .c o m*/ GraphRequest request2 = GraphRequest.newGraphPathRequest(accessToken, "4", null); assertNotNull(request2); GraphRequestBatch batch = new GraphRequestBatch(request1, request2); batch.addCallback(new GraphRequestBatch.OnProgressCallback() { @Override public void onBatchCompleted(GraphRequestBatch batch) { } @Override public void onBatchProgress(GraphRequestBatch batch, long current, long max) { if (current == max) { count.incrementAndGet(); } else if (current > max) { count.set(0); } } }); batch.executeAndWait(); assertEquals(1, count.get()); }
From source file:com.netflix.curator.framework.recipes.queue.TestDistributedQueue.java
@Test public void testErrorMode() throws Exception { Timing timing = new Timing(); CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1)); client.start();//w ww. java 2 s . c o m try { final AtomicReference<CountDownLatch> latch = new AtomicReference<CountDownLatch>( new CountDownLatch(1)); final AtomicInteger count = new AtomicInteger(0); QueueConsumer<TestQueueItem> consumer = new QueueConsumer<TestQueueItem>() { @Override public void consumeMessage(TestQueueItem message) throws Exception { if (count.incrementAndGet() < 2) { throw new Exception(); } latch.get().countDown(); } @Override public void stateChanged(CuratorFramework client, ConnectionState newState) { } }; DistributedQueue<TestQueueItem> queue = QueueBuilder.builder(client, consumer, serializer, QUEUE_PATH) .lockPath("/locks").buildQueue(); try { queue.start(); TestQueueItem item = new TestQueueItem("1"); queue.put(item); Assert.assertTrue(timing.awaitLatch(latch.get())); Assert.assertEquals(count.get(), 2); queue.setErrorMode(ErrorMode.DELETE); count.set(0); latch.set(new CountDownLatch(1)); item = new TestQueueItem("1"); queue.put(item); Assert.assertFalse(latch.get().await(5, TimeUnit.SECONDS)); // consumer should get called only once Assert.assertEquals(count.get(), 1); } finally { queue.close(); } } finally { client.close(); } }
From source file:com.netflix.curator.framework.recipes.queue.TestDistributedQueue.java
@Test public void testPutListener() throws Exception { final int itemQty = 10; DistributedQueue<TestQueueItem> queue = null; CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1)); client.start();/*w ww.j ava 2s . c o m*/ try { BlockingQueueConsumer<TestQueueItem> consumer = new BlockingQueueConsumer<TestQueueItem>( Mockito.mock(ConnectionStateListener.class)); queue = QueueBuilder.builder(client, consumer, serializer, QUEUE_PATH).buildQueue(); queue.start(); QueueTestProducer producer = new QueueTestProducer(queue, itemQty, 0); final AtomicInteger listenerCalls = new AtomicInteger(0); QueuePutListener<TestQueueItem> listener = new QueuePutListener<TestQueueItem>() { @Override public void putCompleted(TestQueueItem item) { listenerCalls.incrementAndGet(); } @Override public void putMultiCompleted(MultiItem<TestQueueItem> items) { } }; queue.getPutListenerContainer().addListener(listener); ExecutorService service = Executors.newCachedThreadPool(); service.submit(producer); int iteration = 0; while (consumer.size() < itemQty) { Assert.assertTrue(++iteration < 10); Thread.sleep(1000); } int i = 0; for (TestQueueItem item : consumer.getItems()) { Assert.assertEquals(item.str, Integer.toString(i++)); } Assert.assertEquals(listenerCalls.get(), itemQty); } finally { IOUtils.closeQuietly(queue); IOUtils.closeQuietly(client); } }
From source file:com.smartitengineering.cms.spi.impl.content.VelocityGeneratorTest.java
@Test public void testMultiVelocityRepGeneration() throws IOException { TypeRepresentationGenerator generator = new VelocityRepresentationGenerator(); final RepresentationTemplate template = mockery.mock(RepresentationTemplate.class); WorkspaceAPIImpl impl = new WorkspaceAPIImpl() { @Override// w ww. j a va 2 s. c o m public RepresentationTemplate getRepresentationTemplate(WorkspaceId id, String name) { return template; } }; impl.setRepresentationGenerators(Collections.singletonMap(TemplateType.VELOCITY, generator)); final RepresentationProvider provider = new RepresentationProviderImpl(); final WorkspaceAPI api = impl; registerBeanFactory(api); final Content content = mockery.mock(Content.class); final Field field = mockery.mock(Field.class); final FieldValue value = mockery.mock(FieldValue.class); final Map<String, Field> fieldMap = mockery.mock(Map.class); final ContentType type = mockery.mock(ContentType.class); final Map<String, RepresentationDef> reps = mockery.mock(Map.class, "repMap"); final RepresentationDef def = mockery.mock(RepresentationDef.class); final int threadCount = new Random().nextInt(100); logger.info("Number of parallel threads " + threadCount); mockery.checking(new Expectations() { { exactly(threadCount).of(template).getTemplateType(); will(returnValue(TemplateType.VELOCITY)); exactly(threadCount).of(template).getTemplate(); final byte[] toByteArray = IOUtils.toByteArray( getClass().getClassLoader().getResourceAsStream("scripts/velocity/test-template.vm")); will(returnValue(toByteArray)); exactly(threadCount).of(template).getName(); will(returnValue(REP_NAME)); for (int i = 0; i < threadCount; ++i) { exactly(1).of(value).getValue(); will(returnValue(String.valueOf(i))); } exactly(threadCount).of(field).getValue(); will(returnValue(value)); exactly(threadCount).of(fieldMap).get(with(Expectations.<String>anything())); will(returnValue(field)); exactly(threadCount).of(content).getFields(); will(returnValue(fieldMap)); exactly(threadCount).of(content).getContentDefinition(); will(returnValue(type)); final ContentId contentId = mockery.mock(ContentId.class); exactly(2 * threadCount).of(content).getContentId(); will(returnValue(contentId)); final WorkspaceId wId = mockery.mock(WorkspaceId.class); exactly(threadCount).of(contentId).getWorkspaceId(); will(returnValue(wId)); exactly(2 * threadCount).of(type).getRepresentationDefs(); will(returnValue(reps)); exactly(2 * threadCount).of(reps).get(with(REP_NAME)); will(returnValue(def)); exactly(threadCount).of(def).getParameters(); will(returnValue(Collections.emptyMap())); exactly(threadCount).of(def).getMIMEType(); will(returnValue(GroovyGeneratorTest.MIME_TYPE)); final ResourceUri rUri = mockery.mock(ResourceUri.class); exactly(threadCount).of(def).getResourceUri(); will(returnValue(rUri)); exactly(threadCount).of(rUri).getValue(); will(returnValue("iUri")); } }); final Set<String> set = Collections.synchronizedSet(new LinkedHashSet<String>(threadCount)); final List<String> list = Collections.synchronizedList(new ArrayList<String>(threadCount)); final AtomicInteger integer = new AtomicInteger(0); Threads group = new Threads(); for (int i = 0; i < threadCount; ++i) { group.addThread(new Thread(new Runnable() { public void run() { Representation representation = provider.getRepresentation(REP_NAME, type, content); Assert.assertNotNull(representation); Assert.assertEquals(REP_NAME, representation.getName()); final String rep = StringUtils.newStringUtf8(representation.getRepresentation()); list.add(rep); set.add(rep); Assert.assertEquals(GroovyGeneratorTest.MIME_TYPE, representation.getMimeType()); integer.addAndGet(1); } })); } group.start(); try { group.join(); } catch (Exception ex) { logger.error(ex.getMessage(), ex); } logger.info("Generated reps list: " + list); logger.info("Generated reps set: " + set); Assert.assertEquals(threadCount, integer.get()); Assert.assertEquals(threadCount, list.size()); Assert.assertEquals(threadCount, set.size()); }
From source file:com.smartitengineering.cms.spi.impl.content.VelocityGeneratorTest.java
@Test public void testContinuousVelocityRepGeneration() throws IOException { TypeRepresentationGenerator generator = new VelocityRepresentationGenerator(); final RepresentationTemplate template = mockery.mock(RepresentationTemplate.class); WorkspaceAPIImpl impl = new WorkspaceAPIImpl() { @Override//from ww w . j av a 2s .com public RepresentationTemplate getRepresentationTemplate(WorkspaceId id, String name) { return template; } }; impl.setRepresentationGenerators(Collections.singletonMap(TemplateType.VELOCITY, generator)); final RepresentationProvider provider = new RepresentationProviderImpl(); final WorkspaceAPI api = impl; registerBeanFactory(api); final Content content = mockery.mock(Content.class); final Field field = mockery.mock(Field.class); final FieldValue value = mockery.mock(FieldValue.class); final Map<String, Field> fieldMap = mockery.mock(Map.class); final ContentType type = mockery.mock(ContentType.class); final Map<String, RepresentationDef> reps = mockery.mock(Map.class, "repMap"); final RepresentationDef def = mockery.mock(RepresentationDef.class); final int threadCount = new Random().nextInt(100); logger.info("Number of parallel threads " + threadCount); mockery.checking(new Expectations() { { exactly(threadCount).of(template).getTemplateType(); will(returnValue(TemplateType.VELOCITY)); exactly(threadCount).of(template).getTemplate(); final byte[] toByteArray = IOUtils.toByteArray( getClass().getClassLoader().getResourceAsStream("scripts/velocity/test-template.vm")); will(returnValue(toByteArray)); exactly(threadCount).of(template).getName(); will(returnValue(REP_NAME)); for (int i = 0; i < threadCount; ++i) { exactly(1).of(value).getValue(); will(returnValue(String.valueOf(Integer.MAX_VALUE))); } exactly(threadCount).of(field).getValue(); will(returnValue(value)); exactly(threadCount).of(fieldMap).get(with(Expectations.<String>anything())); will(returnValue(field)); exactly(threadCount).of(content).getFields(); will(returnValue(fieldMap)); exactly(threadCount).of(content).getContentDefinition(); will(returnValue(type)); final ContentId contentId = mockery.mock(ContentId.class); exactly(2 * threadCount).of(content).getContentId(); will(returnValue(contentId)); final WorkspaceId wId = mockery.mock(WorkspaceId.class); exactly(threadCount).of(contentId).getWorkspaceId(); will(returnValue(wId)); exactly(2 * threadCount).of(type).getRepresentationDefs(); will(returnValue(reps)); exactly(2 * threadCount).of(reps).get(with(REP_NAME)); will(returnValue(def)); exactly(threadCount).of(def).getParameters(); will(returnValue(Collections.emptyMap())); exactly(threadCount).of(def).getMIMEType(); will(returnValue(GroovyGeneratorTest.MIME_TYPE)); final ResourceUri rUri = mockery.mock(ResourceUri.class); exactly(threadCount).of(def).getResourceUri(); will(returnValue(rUri)); exactly(threadCount).of(rUri).getValue(); will(returnValue("iUri")); } }); final Set<String> set = Collections.synchronizedSet(new LinkedHashSet<String>(threadCount)); final List<String> list = Collections.synchronizedList(new ArrayList<String>(threadCount)); final AtomicInteger integer = new AtomicInteger(0); Threads group = new Threads(); for (int i = 0; i < threadCount; ++i) { group.addThread(new Thread(new Runnable() { public void run() { Representation representation = provider.getRepresentation(REP_NAME, type, content); Assert.assertNotNull(representation); Assert.assertEquals(REP_NAME, representation.getName()); final String rep = StringUtils.newStringUtf8(representation.getRepresentation()); list.add(rep); set.add(rep); Assert.assertEquals(GroovyGeneratorTest.MIME_TYPE, representation.getMimeType()); integer.addAndGet(1); } })); } group.start(); try { group.join(); } catch (Exception ex) { logger.error(ex.getMessage(), ex); } logger.info("Generated reps list: " + list); logger.info("Generated reps set: " + set); Assert.assertEquals(threadCount, integer.get()); Assert.assertEquals(threadCount, list.size()); Assert.assertEquals(1, set.size()); }
From source file:org.apache.hadoop.hbase.client.TestAsyncProcess.java
@Test public void testSubmitWithCB() throws Exception { ClusterConnection hc = createHConnection(); final AtomicInteger updateCalled = new AtomicInteger(0); Batch.Callback<Object> cb = new Batch.Callback<Object>() { @Override//from www . j ava 2s . c om public void update(byte[] region, byte[] row, Object result) { updateCalled.incrementAndGet(); } }; AsyncProcess ap = new MyAsyncProcess(hc, conf); List<Put> puts = new ArrayList<Put>(); puts.add(createPut(1, true)); final AsyncRequestFuture ars = ap.submit(DUMMY_TABLE, puts, false, cb, false); Assert.assertTrue(puts.isEmpty()); ars.waitUntilDone(); Assert.assertEquals(updateCalled.get(), 1); }
From source file:org.apache.distributedlog.admin.DistributedLogAdmin.java
private static Map<String, StreamCandidate> checkStreams(final Namespace namespace, final Collection<String> streams, final OrderedScheduler scheduler, final int concurrency) throws IOException { final LinkedBlockingQueue<String> streamQueue = new LinkedBlockingQueue<String>(); streamQueue.addAll(streams);//from ww w . java2 s . c om final Map<String, StreamCandidate> candidateMap = new ConcurrentSkipListMap<String, StreamCandidate>(); final AtomicInteger numPendingStreams = new AtomicInteger(streams.size()); final CountDownLatch doneLatch = new CountDownLatch(1); Runnable checkRunnable = new Runnable() { @Override public void run() { while (!streamQueue.isEmpty()) { String stream; try { stream = streamQueue.take(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; } StreamCandidate candidate; try { LOG.info("Checking stream {}.", stream); candidate = checkStream(namespace, stream, scheduler); LOG.info("Checked stream {} - {}.", stream, candidate); } catch (Throwable e) { LOG.error("Error on checking stream {} : ", stream, e); doneLatch.countDown(); break; } if (null != candidate) { candidateMap.put(stream, candidate); } if (numPendingStreams.decrementAndGet() == 0) { doneLatch.countDown(); } } } }; Thread[] threads = new Thread[concurrency]; for (int i = 0; i < concurrency; i++) { threads[i] = new Thread(checkRunnable, "check-thread-" + i); threads[i].start(); } try { doneLatch.await(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } if (numPendingStreams.get() != 0) { throw new IOException(numPendingStreams.get() + " streams left w/o checked"); } for (int i = 0; i < concurrency; i++) { threads[i].interrupt(); try { threads[i].join(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } return candidateMap; }
From source file:metlos.executors.batch.BatchExecutorTest.java
public void testChangesInTaskCollectionPickedUpInRepetitions() throws Exception { final ConcurrentLinkedQueue<Runnable> tasks = new ConcurrentLinkedQueue<Runnable>(); final AtomicInteger reportedNofTasks = new AtomicInteger(); final CountDownLatch waitForTask2 = new CountDownLatch(2); final CountDownLatch waitForTask3 = new CountDownLatch(2); Runnable task1 = new Runnable() { @Override/*from w w w. jav a 2 s .co m*/ public void run() { } }; Runnable task2 = new Runnable() { @Override public void run() { if (tasks.size() == 2) { reportedNofTasks.set(2); waitForTask2.countDown(); } } }; Runnable task3 = new Runnable() { @Override public void run() { if (tasks.size() == 3) { reportedNofTasks.set(3); waitForTask3.countDown(); } } }; BatchExecutor ex = getExecutor(10); tasks.add(task1); tasks.add(task2); ex.submitWithPreferedDurationAndFixedDelay(tasks, 0, 0, 0, TimeUnit.MILLISECONDS); //k, now the tasks should be running and there should be just 2 of them... //so we should be getting the value of "2" reported by the reportedNofTasks waitForTask2.countDown(); waitForTask2.await(); int currentReportedTasks = reportedNofTasks.get(); assert currentReportedTasks == 2 : "We should be getting 2 tasks reported but are getting " + currentReportedTasks + " instead."; //k, now let's try updating the tasks collection... this should get picked up by the //repeated executions tasks.add(task3); //now the reported nof tasks should change to 3. let's wait on it first to make sure the executor has had time to //register the change. waitForTask3.countDown(); waitForTask3.await(); currentReportedTasks = reportedNofTasks.get(); assert currentReportedTasks == 3 : "We should be getting 3 tasks reported but are getting " + currentReportedTasks + " instead."; ex.shutdown(); }
From source file:com.heliosdecompiler.helios.gui.controller.FileTreeController.java
@FXML public void initialize() { this.rootItem = new TreeItem<>(new TreeNode("[root]")); this.root.setRoot(this.rootItem); this.root.setCellFactory(new TreeCellFactory<>(node -> { if (node.getParent() == null) { ContextMenu export = new ContextMenu(); MenuItem exportItem = new MenuItem("Export"); export.setOnAction(e -> { File file = messageHandler.chooseFile().withInitialDirectory(new File(".")) .withTitle(Message.GENERIC_CHOOSE_EXPORT_LOCATION_JAR.format()) .withExtensionFilter(new FileFilter(Message.FILETYPE_JAVA_ARCHIVE.format(), "*.jar"), true)/*from ww w .j a v a 2 s . c om*/ .promptSave(); OpenedFile openedFile = (OpenedFile) node.getMetadata().get(OpenedFile.OPENED_FILE); Map<String, byte[]> clone = new HashMap<>(openedFile.getContents()); backgroundTaskHelper.submit( new BackgroundTask(Message.TASK_SAVING_FILE.format(node.getDisplayName()), true, () -> { try { if (!file.exists()) { if (!file.createNewFile()) { throw new IOException("Could not create export file"); } } try (ZipOutputStream zipOutputStream = new ZipOutputStream( new FileOutputStream(file))) { for (Map.Entry<String, byte[]> ent : clone.entrySet()) { ZipEntry zipEntry = new ZipEntry(ent.getKey()); zipOutputStream.putNextEntry(zipEntry); zipOutputStream.write(ent.getValue()); zipOutputStream.closeEntry(); } } messageHandler.handleMessage(Message.GENERIC_EXPORTED.format()); } catch (IOException ex) { messageHandler.handleException(Message.ERROR_IOEXCEPTION_OCCURRED.format(), ex); } })); }); export.getItems().add(exportItem); return export; } return null; })); root.addEventHandler(KeyEvent.KEY_RELEASED, event -> { if (event.getCode() == KeyCode.ENTER) { TreeItem<TreeNode> selected = this.root.getSelectionModel().getSelectedItem(); if (selected != null) { if (selected.getChildren().size() != 0) { selected.setExpanded(!selected.isExpanded()); } else { getParentController().getAllFilesViewerController().handleClick(selected.getValue()); } } } }); Tooltip tooltip = new Tooltip(); StringBuilder search = new StringBuilder(); List<TreeItem<TreeNode>> searchContext = new ArrayList<>(); AtomicInteger searchIndex = new AtomicInteger(); root.focusedProperty().addListener((observable, oldValue, newValue) -> { if (!newValue) { tooltip.hide(); search.setLength(0); } }); root.boundsInLocalProperty().addListener((observable, oldValue, newValue) -> { Bounds bounds = root.localToScreen(newValue); tooltip.setAnchorX(bounds.getMinX()); tooltip.setAnchorY(bounds.getMinY()); }); root.addEventHandler(KeyEvent.KEY_PRESSED, event -> { if (tooltip.isShowing() && event.getCode() == KeyCode.UP) { if (searchIndex.decrementAndGet() < 0) { searchIndex.set(searchContext.size() - 1); } } else if (tooltip.isShowing() && event.getCode() == KeyCode.DOWN) { if (searchIndex.incrementAndGet() >= searchContext.size()) { searchIndex.set(0); } } else { return; } event.consume(); root.scrollTo(root.getRow(searchContext.get(searchIndex.get()))); root.getSelectionModel().select(searchContext.get(searchIndex.get())); }); root.addEventHandler(KeyEvent.KEY_TYPED, event -> { if (event.getCharacter().charAt(0) == '\b') { if (search.length() > 0) { search.setLength(search.length() - 1); } } else if (event.getCharacter().charAt(0) == '\u001B') { //esc tooltip.hide(); search.setLength(0); return; } else if (search.length() > 0 || (search.length() == 0 && StringUtils.isAlphanumeric(event.getCharacter()))) { search.append(event.getCharacter()); if (!tooltip.isShowing()) { tooltip.show(root.getScene().getWindow()); } } if (!tooltip.isShowing()) return; String str = search.toString(); tooltip.setText("Search for: " + str); searchContext.clear(); ArrayDeque<TreeItem<TreeNode>> deque = new ArrayDeque<>(); deque.addAll(rootItem.getChildren()); while (!deque.isEmpty()) { TreeItem<TreeNode> item = deque.poll(); if (item.getValue().getDisplayName().contains(str)) { searchContext.add(item); } if (item.isExpanded() && item.getChildren().size() > 0) deque.addAll(item.getChildren()); } searchIndex.set(0); if (searchContext.size() > 0) { root.scrollTo(root.getRow(searchContext.get(0))); root.getSelectionModel().select(searchContext.get(0)); } }); openedFileController.loadedFiles().addListener((MapChangeListener<String, OpenedFile>) change -> { if (change.getValueAdded() != null) { updateTree(change.getValueAdded()); } if (change.getValueRemoved() != null) { this.rootItem.getChildren() .removeIf(ti -> ti.getValue().equals(change.getValueRemoved().getRoot())); } }); }
From source file:com.twitter.distributedlog.admin.DistributedLogAdmin.java
private static Map<String, StreamCandidate> checkStreams( final com.twitter.distributedlog.DistributedLogManagerFactory factory, final Collection<String> streams, final ExecutorService executorService, final BookKeeperClient bkc, final String digestpw, final int concurrency) throws IOException { final LinkedBlockingQueue<String> streamQueue = new LinkedBlockingQueue<String>(); streamQueue.addAll(streams);/*from w w w .j a v a 2 s . c om*/ final Map<String, StreamCandidate> candidateMap = new ConcurrentSkipListMap<String, StreamCandidate>(); final AtomicInteger numPendingStreams = new AtomicInteger(streams.size()); final CountDownLatch doneLatch = new CountDownLatch(1); Runnable checkRunnable = new Runnable() { @Override public void run() { while (!streamQueue.isEmpty()) { String stream; try { stream = streamQueue.take(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; } StreamCandidate candidate; try { LOG.info("Checking stream {}.", stream); candidate = checkStream(factory, stream, executorService, bkc, digestpw); LOG.info("Checked stream {} - {}.", stream, candidate); } catch (IOException e) { LOG.error("Error on checking stream {} : ", stream, e); doneLatch.countDown(); break; } if (null != candidate) { candidateMap.put(stream, candidate); } if (numPendingStreams.decrementAndGet() == 0) { doneLatch.countDown(); } } } }; Thread[] threads = new Thread[concurrency]; for (int i = 0; i < concurrency; i++) { threads[i] = new Thread(checkRunnable, "check-thread-" + i); threads[i].start(); } try { doneLatch.await(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } if (numPendingStreams.get() != 0) { throw new IOException(numPendingStreams.get() + " streams left w/o checked"); } for (int i = 0; i < concurrency; i++) { threads[i].interrupt(); try { threads[i].join(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } return candidateMap; }