List of usage examples for java.util.concurrent.atomic AtomicInteger AtomicInteger
public AtomicInteger(int initialValue)
From source file:fr.wseduc.webutils.email.SendInBlueSender.java
@Override protected void sendEmail(JsonObject json, final Handler<Message<JsonObject>> handler) { if (json == null || json.getArray("to") == null || json.getString("from") == null || json.getString("subject") == null || json.getString("body") == null) { handler.handle(new ResultMessage().error("invalid.parameters")); return;//from w w w .ja va 2 s. co m } if (splitRecipients && json.getArray("to").size() > 1) { final AtomicInteger count = new AtomicInteger(json.getArray("to").size()); final AtomicBoolean success = new AtomicBoolean(true); final JsonArray errors = new JsonArray(); final Handler<Message<JsonObject>> h = new Handler<Message<JsonObject>>() { @Override public void handle(Message<JsonObject> message) { if (!"ok".equals(message.body().getString("status"))) { success.set(false); errors.addString(message.body().getString("message")); } if (count.decrementAndGet() == 0) { if (success.get()) { handler.handle(new ResultMessage()); } else { handler.handle(new ResultMessage().error(errors.encode())); } } } }; for (Object to : json.getArray("to")) { send(json.copy().putArray("to", new JsonArray().addString(to.toString())), h); } } else { send(json, handler); } }
From source file:dk.statsbiblioteket.util.JobControllerTest.java
public void TestAutoEmptyMultiPoll() throws InterruptedException { final int JOBS = 10; final AtomicInteger counter = new AtomicInteger(0); JobController<Long> controller = new JobController<Long>(10, true) { @Override//from w w w. ja v a 2 s . co m protected void afterExecute(Future<Long> finished) { counter.incrementAndGet(); } }; for (int i = 0; i < JOBS; i++) { controller.submit(new Shout(JOBS / 4)); synchronized (Thread.currentThread()) { Thread.currentThread().wait(JOBS / 10); } } int popped = controller.popAll().size(); assertEquals("The auto removed count should be all the jobs", JOBS, counter.get()); assertEquals("The JobController should be empty", 0, controller.getTaskCount()); assertTrue("The number of explicit popped jobs should be > 0 and < " + JOBS + " but was " + popped, popped > 0 && popped < JOBS); }
From source file:com.ikanow.aleph2.analytics.services.TestGraphBuilderEnrichmentService.java
@Test public void test_empty() { final AtomicInteger counter = new AtomicInteger(0); final Streamable<Tuple2<Long, IBatchRecord>> test_stream = Streamable .of(Arrays.asList(_mapper.createObjectNode())) .<Tuple2<Long, IBatchRecord>>map(j -> Tuples._2T(0L, new BatchRecordUtils.JsonBatchRecord(j))); final IGraphService throwing_graph_service = Mockito.mock(IGraphService.class); Mockito.when(throwing_graph_service.getUnderlyingPlatformDriver(Mockito.any(), Mockito.any())) .thenThrow(new RuntimeException("getUnderlyingPlatformDriver")); final MockServiceContext mock_service_context = new MockServiceContext(); final IEnrichmentModuleContext enrich_context = Mockito.mock(IEnrichmentModuleContext.class); Mockito.when(enrich_context.getServiceContext()).thenReturn(mock_service_context); Mockito.when(enrich_context.emitImmutableObject(Mockito.anyLong(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any())).thenAnswer(invocation -> { counter.incrementAndGet(); return null; });/*from w ww.j a v a 2s.co m*/ // Bucket enabled but no graph service { final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService(); final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get(); final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class) .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class) .with(DataSchemaBean::graph_schema, graph_schema).done().get()) .done().get(); final EnrichmentControlMetadataBean control = BeanTemplateUtils .build(EnrichmentControlMetadataBean.class).done().get(); under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty()); under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty()); under_test.onStageComplete(true); assertEquals(under_test, under_test.cloneForNewGrouping()); assertEquals(Collections.emptyList(), under_test.validateModule(enrich_context, bucket, control)); assertEquals(1, counter.getAndSet(0)); } // Use override { final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService(); final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get(); final GraphConfigBean graph_config = BeanTemplateUtils.build(GraphConfigBean.class) .with(GraphConfigBean::graph_schema_override, graph_schema).done().get(); final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class) .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class) .with(DataSchemaBean::graph_schema, graph_schema).done().get()) .done().get(); final EnrichmentControlMetadataBean control = BeanTemplateUtils .build(EnrichmentControlMetadataBean.class).with(EnrichmentControlMetadataBean::config, new LinkedHashMap<String, Object>(BeanTemplateUtils.toMap(graph_config))) .done().get(); under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty()); under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty()); under_test.onStageComplete(true); assertEquals(under_test, under_test.cloneForNewGrouping()); assertEquals(Collections.emptyList(), under_test.validateModule(enrich_context, bucket, control)); assertEquals(1, counter.getAndSet(0)); } mock_service_context.addService(IGraphService.class, Optional.empty(), throwing_graph_service); // Add graph service, check it starts failing (bucket enabled) { final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService(); final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get(); final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class) .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class) .with(DataSchemaBean::graph_schema, graph_schema).done().get()) .done().get(); final EnrichmentControlMetadataBean control = BeanTemplateUtils .build(EnrichmentControlMetadataBean.class).done().get(); try { under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty()); fail("Should have thrown"); } catch (Exception e) { } } // Add graph service, check it starts failing (override) { final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService(); final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get(); final GraphConfigBean graph_config = BeanTemplateUtils.build(GraphConfigBean.class) .with(GraphConfigBean::graph_schema_override, graph_schema).done().get(); final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class) .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class) .with(DataSchemaBean::graph_schema, graph_schema).done().get()) .done().get(); final EnrichmentControlMetadataBean control = BeanTemplateUtils .build(EnrichmentControlMetadataBean.class).with(EnrichmentControlMetadataBean::config, new LinkedHashMap<String, Object>(BeanTemplateUtils.toMap(graph_config))) .done().get(); try { under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty()); fail("Should have thrown"); } catch (Exception e) { } } // From bucket, graph service disabled, won't fail { final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService(); final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class).done().get(); // (no data_schema.graph_schema) final EnrichmentControlMetadataBean control = BeanTemplateUtils .build(EnrichmentControlMetadataBean.class).done().get(); under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty()); under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty()); under_test.onStageComplete(true); assertEquals(under_test, under_test.cloneForNewGrouping()); assertEquals(Collections.emptyList(), under_test.validateModule(enrich_context, bucket, control)); assertEquals(1, counter.getAndSet(0)); } // From override, graph service disabled, won't fail { final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService(); final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class) .with(GraphSchemaBean::enabled, false).done().get(); final GraphConfigBean graph_config = BeanTemplateUtils.build(GraphConfigBean.class) .with(GraphConfigBean::graph_schema_override, graph_schema).done().get(); final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class) .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class) .with(DataSchemaBean::graph_schema, graph_schema).done().get()) .done().get(); final EnrichmentControlMetadataBean control = BeanTemplateUtils .build(EnrichmentControlMetadataBean.class).with(EnrichmentControlMetadataBean::config, new LinkedHashMap<String, Object>(BeanTemplateUtils.toMap(graph_config))) .done().get(); under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty()); under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty()); under_test.onStageComplete(true); assertEquals(under_test, under_test.cloneForNewGrouping()); assertEquals(Collections.emptyList(), under_test.validateModule(enrich_context, bucket, control)); assertEquals(1, counter.getAndSet(0)); } }
From source file:com.netflix.curator.framework.recipes.barriers.TestDistributedDoubleBarrier.java
@Test public void testBasic() throws Exception { final Timing timing = new Timing(); final List<Closeable> closeables = Lists.newArrayList(); final CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1)); try {/* w w w. j a v a 2 s .c o m*/ closeables.add(client); client.start(); final CountDownLatch postEnterLatch = new CountDownLatch(QTY); final CountDownLatch postLeaveLatch = new CountDownLatch(QTY); final AtomicInteger count = new AtomicInteger(0); final AtomicInteger max = new AtomicInteger(0); List<Future<Void>> futures = Lists.newArrayList(); ExecutorService service = Executors.newCachedThreadPool(); for (int i = 0; i < QTY; ++i) { Future<Void> future = service.submit(new Callable<Void>() { @Override public Void call() throws Exception { DistributedDoubleBarrier barrier = new DistributedDoubleBarrier(client, "/barrier", QTY); Assert.assertTrue(barrier.enter(timing.seconds(), TimeUnit.SECONDS)); synchronized (TestDistributedDoubleBarrier.this) { int thisCount = count.incrementAndGet(); if (thisCount > max.get()) { max.set(thisCount); } } postEnterLatch.countDown(); Assert.assertTrue(timing.awaitLatch(postEnterLatch)); Assert.assertEquals(count.get(), QTY); Assert.assertTrue(barrier.leave(10, TimeUnit.SECONDS)); count.decrementAndGet(); postLeaveLatch.countDown(); Assert.assertTrue(timing.awaitLatch(postLeaveLatch)); return null; } }); futures.add(future); } for (Future<Void> f : futures) { f.get(); } Assert.assertEquals(count.get(), 0); Assert.assertEquals(max.get(), QTY); } finally { for (Closeable c : closeables) { IOUtils.closeQuietly(c); } } }
From source file:de.fosd.jdime.artifact.file.FileArtifact.java
/** * Constructs a new <code>FileArtifact</code> representing the given <code>File</code>. If <code>file</code> is a * directory then <code>FileArtifact</code>s representing its contents will be added as children to this * <code>FileArtifact</code>. * * @param revision// w w w.j a v a 2s . c om * the <code>Revision</code> the artifact belongs to * @param file * the <code>File</code> in which the artifact is stored * @throws IllegalArgumentException * if {@code file} does not exist */ public FileArtifact(Revision revision, File file) { this(revision, new AtomicInteger(0)::getAndIncrement, file, true); }
From source file:hd3gtv.embddb.network.DataBlock.java
public String toString() { AtomicInteger all_size = new AtomicInteger(0); entries.forEach(block -> {/*from ww w . j av a 2 s .co m*/ all_size.addAndGet(block.getLen()); }); if (entries.size() == 1) { return request_name + " (" + all_size.get() + " bytes in 1 item)"; } else { return request_name + " (" + all_size.get() + " bytes in " + entries.size() + " items)"; } }
From source file:com.ngdata.hbaseindexer.indexer.FusionPipelineClient.java
public FusionPipelineClient(String endpointUrl, String fusionUser, String fusionPass, String fusionRealm) throws MalformedURLException { this.fusionUser = fusionUser; this.fusionPass = fusionPass; this.fusionRealm = fusionRealm; String fusionLoginConf = System.getProperty(FusionKrb5HttpClientConfigurer.LOGIN_CONFIG_PROP); if (fusionLoginConf != null && !fusionLoginConf.isEmpty()) { httpClient = FusionKrb5HttpClientConfigurer.createClient(fusionUser); isKerberos = true;//from ww w .j a v a2 s . c o m } else { globalConfig = RequestConfig.custom().setCookieSpec(CookieSpecs.BEST_MATCH).build(); cookieStore = new BasicCookieStore(); // build the HttpClient to be used for all requests HttpClientBuilder httpClientBuilder = HttpClientBuilder.create(); httpClientBuilder.setDefaultRequestConfig(globalConfig).setDefaultCookieStore(cookieStore); httpClientBuilder.setMaxConnPerRoute(100); httpClientBuilder.setMaxConnTotal(500); if (fusionUser != null && fusionRealm == null) httpClientBuilder.addInterceptorFirst(new PreEmptiveBasicAuthenticator(fusionUser, fusionPass)); httpClient = httpClientBuilder.build(); } originalEndpoints = Arrays.asList(endpointUrl.split(",")); try { sessions = establishSessions(originalEndpoints, fusionUser, fusionPass, fusionRealm); } catch (Exception exc) { if (exc instanceof RuntimeException) { throw (RuntimeException) exc; } else { throw new RuntimeException(exc); } } random = new Random(); jsonObjectMapper = new ObjectMapper(); requestCounter = new AtomicInteger(0); }
From source file:interactivespaces.activity.component.ActivityComponentContextTest.java
/** * Make a couple of threads start running and see if they properly stop * running when the context signals startup failure. *//*from w w w. ja v a 2s . c o m*/ @Test public void testStartupWaitWithTwoThreadsFailure() throws Exception { final CountDownLatch startLatch = new CountDownLatch(2); final CountDownLatch stopLatch = new CountDownLatch(2); final AtomicInteger countAllowedHandlers = new AtomicInteger(0); Runnable runnable = new Runnable() { @Override public void run() { startLatch.countDown(); if (context.canHandlerRun()) { countAllowedHandlers.incrementAndGet(); } stopLatch.countDown(); } }; executor.execute(runnable); executor.execute(runnable); // Make sure they have both entered before starting the wait. Assert.assertTrue(startLatch.await(500, TimeUnit.MILLISECONDS)); context.endStartupPhase(false); // Make sure they have both entered before starting the wait. Assert.assertTrue(stopLatch.await(500, TimeUnit.MILLISECONDS)); // No handlers should have been allowed. Assert.assertEquals(0, countAllowedHandlers.get()); }
From source file:ws.antonov.config.consumer.ConfigClientTest.java
@SuppressWarnings({ "unchecked" }) public void testCachingConfigClientWrapper() throws Exception { FileInputStream fis = new FileInputStream("build/classes/test/config.pb"); final FlatConfigObject msg = FlatConfigObject.parseFrom(fis); final AtomicInteger accessCount = new AtomicInteger(0); ConfigClient client = new ConfigClient() { @Override/*from www . ja v a2s. c om*/ public Message getConfig(Class configClass, ConfigParamsBuilder.ConfigParamsMap configParams) { accessCount.incrementAndGet(); if (configParams.size() == 0) return msg; else return null; } @Override public ConfigProvider getConfigProvider() { return null; } @Override public boolean reloadConfig() { return true; } }; Map objects = new HashMap(); Set keys = new HashSet(); CachingConfigClientWrapper cachingConfig = new CachingConfigClientWrapper(client, objects, keys); assertEquals(0, accessCount.get()); assertEquals(0, cachingConfig.getObjectCache().size()); assertEquals(0, cachingConfig.getNegativeCache().size()); assertEquals(cachingConfig.getConfig(FlatConfigObject.class, ConfigParamsBuilder.newInstance().build()), msg); assertEquals(1, accessCount.get()); assertEquals(1, cachingConfig.getObjectCache().size()); assertEquals(0, cachingConfig.getNegativeCache().size()); assertEquals(cachingConfig.getConfig(FlatConfigObject.class, ConfigParamsBuilder.newInstance().build()), msg); assertEquals(1, accessCount.get()); assertEquals(1, cachingConfig.getObjectCache().size()); assertEquals(0, cachingConfig.getNegativeCache().size()); assertNull(cachingConfig.getConfig(FlatConfigObject.class, ConfigParamsBuilder.newInstance("foo", "bar").build())); assertEquals(2, accessCount.get()); assertEquals(1, cachingConfig.getObjectCache().size()); assertEquals(1, cachingConfig.getNegativeCache().size()); assertNull(cachingConfig.getConfig(FlatConfigObject.class, ConfigParamsBuilder.newInstance("foo", "bar").build())); assertEquals(2, accessCount.get()); assertEquals(1, cachingConfig.getObjectCache().size()); assertEquals(1, cachingConfig.getNegativeCache().size()); }
From source file:com.streamsets.pipeline.stage.origin.spooldir.TestWholeFileSpoolDirSource.java
@Test public void testWholeFileRecordsCopy() throws Exception { Path sourcePath = Paths.get(testDir + "/source.txt"); Files.write(sourcePath, "Sample Text 1".getBytes()); SpoolDirSource source = createSource(); PushSourceRunner runner = new PushSourceRunner.Builder(SpoolDirDSource.class, source).addOutputLane("lane") .setOnRecordError(OnRecordError.TO_ERROR).build(); final List<Record> records = Collections.synchronizedList(new ArrayList<>(10)); AtomicInteger batchCount = new AtomicInteger(0); runner.runInit();//w ww . java 2 s . c om try { runner.runProduce(new HashMap<>(), 10, output2 -> { synchronized (records) { records.addAll(output2.getRecords().get("lane")); } batchCount.incrementAndGet(); runner.setStop(); }); runner.waitOnProduce(); Assert.assertNotNull(records); Assert.assertEquals(1, records.size()); Record record = records.get(0); Assert.assertTrue(record.has(FileRefUtil.FILE_INFO_FIELD_PATH)); Assert.assertTrue(record.has(FileRefUtil.FILE_REF_FIELD_PATH)); FileRef fileRef = record.get(FileRefUtil.FILE_REF_FIELD_PATH).getValueAsFileRef(); String targetFile = testDir + "/target.txt"; Stage.Context context = (Stage.Context) Whitebox.getInternalState(source, "context"); initMetrics(context); IOUtils.copy(fileRef.createInputStream(context, InputStream.class), new FileOutputStream(targetFile)); //Now make sure the file is copied properly, checkFileContent(new FileInputStream(sourcePath.toString()), new FileInputStream(targetFile)); } finally { runner.runDestroy(); } }