Example usage for java.util.concurrent.atomic AtomicInteger get

List of usage examples for java.util.concurrent.atomic AtomicInteger get

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicInteger get.

Prototype

public final int get() 

Source Link

Document

Returns the current value, with memory effects as specified by VarHandle#getVolatile .

Usage

From source file:org.eclipse.hono.deviceregistry.FileBasedRegistrationService.java

Future<Void> saveToFile() {

    if (!getConfig().isSaveToFile()) {
        return Future.succeededFuture();
    } else if (dirty) {
        return checkFileExists(true).compose(s -> {
            final AtomicInteger idCount = new AtomicInteger();
            final JsonArray tenants = new JsonArray();
            for (final Entry<String, Map<String, JsonObject>> entry : identities.entrySet()) {
                final JsonArray devices = new JsonArray();
                for (final Entry<String, JsonObject> deviceEntry : entry.getValue().entrySet()) {
                    devices.add(new JsonObject().put(FIELD_PAYLOAD_DEVICE_ID, deviceEntry.getKey())
                            .put(FIELD_DATA, deviceEntry.getValue()));
                    idCount.incrementAndGet();
                }/*w  w  w.j  av  a  2 s  .co  m*/
                tenants.add(new JsonObject().put(FIELD_TENANT, entry.getKey()).put(ARRAY_DEVICES, devices));
            }

            final Future<Void> writeHandler = Future.future();
            vertx.fileSystem().writeFile(getConfig().getFilename(),
                    Buffer.factory.buffer(tenants.encodePrettily()), writeHandler.completer());
            return writeHandler.map(ok -> {
                dirty = false;
                log.trace("successfully wrote {} device identities to file {}", idCount.get(),
                        getConfig().getFilename());
                return (Void) null;
            }).otherwise(t -> {
                log.warn("could not write device identities to file {}", getConfig().getFilename(), t);
                return (Void) null;
            });
        });
    } else {
        log.trace("registry does not need to be persisted");
        return Future.succeededFuture();
    }
}

From source file:com.streamsets.pipeline.stage.origin.spooldir.TestSpoolDirSource.java

private void readFilesMultipleThreads(String spoolDir, int numberOfThreads) throws Exception {
    SpoolDirConfigBean conf = new SpoolDirConfigBean();
    conf.dataFormat = DataFormat.TEXT;// w ww  .  ja  va2 s  .  c om
    conf.spoolDir = spoolDir;
    conf.batchSize = 10;
    conf.overrunLimit = 100;
    conf.poolingTimeoutSecs = 1;
    conf.filePattern = "*file-[0-9].log";
    conf.pathMatcherMode = PathMatcherMode.GLOB;
    conf.maxSpoolFiles = 10;
    conf.initialFileToProcess = null;
    conf.dataFormatConfig.compression = Compression.NONE;
    conf.dataFormatConfig.filePatternInArchive = "*";
    conf.errorArchiveDir = null;
    conf.postProcessing = PostProcessingOptions.NONE;
    conf.retentionTimeMins = 10;
    conf.dataFormatConfig.textMaxLineLen = 10;
    conf.dataFormatConfig.onParseError = OnParseError.ERROR;
    conf.dataFormatConfig.maxStackTraceLines = 0;
    conf.allowLateDirectory = false;
    conf.numberOfThreads = numberOfThreads;

    SpoolDirSource source = new SpoolDirSource(conf);
    PushSourceRunner runner = new PushSourceRunner.Builder(SpoolDirDSource.class, source).addOutputLane("lane")
            .build();

    AtomicInteger batchCount = new AtomicInteger(0);
    final List<Record> records = Collections.synchronizedList(new ArrayList<>(10));
    runner.runInit();

    final int maxBatchSize = 10;

    try {
        runner.runProduce(new HashMap<>(), maxBatchSize, output -> {
            batchCount.incrementAndGet();

            synchronized (records) {
                records.addAll(output.getRecords().get("lane"));
            }

            if (records.size() == 50 || batchCount.get() > 10) {
                runner.setStop();
            }
        });

        runner.waitOnProduce();
        Assert.assertTrue(batchCount.get() > 1);
        TestOffsetUtil.compare("-file-9.log::-1", runner.getOffsets());
        Assert.assertEquals(50, records.size());
    } finally {
        runner.runDestroy();
    }
}

From source file:org.apache.bookkeeper.replication.AuditorLedgerCheckerTest.java

private void addEntry(int numEntriesToWrite, LedgerHandle lh) throws InterruptedException, BKException {
    final CountDownLatch completeLatch = new CountDownLatch(numEntriesToWrite);
    final AtomicInteger rc = new AtomicInteger(BKException.Code.OK);

    for (int i = 0; i < numEntriesToWrite; i++) {
        ByteBuffer entry = ByteBuffer.allocate(4);
        entry.putInt(rng.nextInt(Integer.MAX_VALUE));
        entry.position(0);//w  w w .j  a va2s .  co  m
        lh.asyncAddEntry(entry.array(), new AddCallback() {
            public void addComplete(int rc2, LedgerHandle lh, long entryId, Object ctx) {
                rc.compareAndSet(BKException.Code.OK, rc2);
                completeLatch.countDown();
            }
        }, null);
    }
    completeLatch.await();
    if (rc.get() != BKException.Code.OK) {
        throw BKException.create(rc.get());
    }

}

From source file:com.unboundid.scim.marshal.json.JsonUnmarshaller.java

/**
 * {@inheritDoc}/*from w w  w  .j  ava 2s.c o m*/
 */
public void bulkUnmarshal(final File file, final BulkConfig bulkConfig, final BulkContentHandler handler)
        throws SCIMException {
    // First pass: ensure the number of operations is less than the max,
    // and save the failOnErrrors value.
    final AtomicInteger failOnErrorsValue = new AtomicInteger(-1);
    final BulkContentHandler preProcessHandler = new BulkContentHandler() {
        @Override
        public void handleFailOnErrors(final int failOnErrors) {
            failOnErrorsValue.set(failOnErrors);
        }
    };
    try {
        final FileInputStream fileInputStream = new FileInputStream(file);
        try {
            final BufferedInputStream bufferedInputStream = new BufferedInputStream(fileInputStream);
            try {
                final JsonBulkParser jsonBulkParser = new JsonBulkParser(bufferedInputStream, bulkConfig,
                        preProcessHandler);
                jsonBulkParser.setSkipOperations(true);
                jsonBulkParser.unmarshal();
            } finally {
                bufferedInputStream.close();
            }
        } finally {
            fileInputStream.close();
        }
    } catch (IOException e) {
        Debug.debugException(e);
        throw new ServerErrorException("Error pre-processing bulk request: " + e.getMessage());
    }

    int failOnErrors = failOnErrorsValue.get();
    if (failOnErrors != -1) {
        handler.handleFailOnErrors(failOnErrors);
    }

    // Second pass: Parse fully.
    try {
        final FileInputStream fileInputStream = new FileInputStream(file);
        try {
            final BufferedInputStream bufferedInputStream = new BufferedInputStream(fileInputStream);
            try {
                final JsonBulkParser jsonBulkParser = new JsonBulkParser(bufferedInputStream, bulkConfig,
                        handler);
                jsonBulkParser.unmarshal();
            } finally {
                bufferedInputStream.close();
            }
        } finally {
            fileInputStream.close();
        }
    } catch (IOException e) {
        Debug.debugException(e);
        throw new ServerErrorException("Error parsing bulk request: " + e.getMessage());
    }
}

From source file:org.jtheque.file.FileServiceTest.java

@Test
@DirtiesContext/*from   w  w w .  j  a  va 2  s.  c  o  m*/
public void restore() {
    File backupFile = new File(SystemProperty.USER_DIR.get(), "backup.xml");

    createFakeBackupFile(backupFile);

    final AtomicInteger counter = new AtomicInteger(0);

    fileService.registerBackuper("no-module", new ModuleBackuper() {
        @Override
        public String getId() {
            return "test-backup";
        }

        @Override
        public String[] getDependencies() {
            return new String[0];
        }

        @Override
        public ModuleBackup backup() {
            fail("Backup must not be called");

            return null;
        }

        @Override
        public void restore(ModuleBackup backup) {
            assertEquals("test-backup", backup.getId());
            assertEquals(Version.get("1.0"), backup.getVersion());

            assertEquals(1, backup.getNodes().size());

            for (org.jtheque.xml.utils.Node node : backup.getNodes()) {
                assertEquals("simple", node.getName());
                assertEquals("true", node.getAttributeValue("test"));
            }

            counter.incrementAndGet();
        }
    });

    try {
        fileService.restore(backupFile);
    } catch (XMLException e) {
        fail(e.getMessage());
    }

    assertEquals(1, counter.get());
}

From source file:com.streamsets.pipeline.stage.origin.spooldir.TestSpoolDirSource.java

@Test
public void testWithMultipleThreadsInitialOffsets() throws Exception {
    // set up multiple test files
    File f = new File("target", UUID.randomUUID().toString());
    Assert.assertTrue(f.mkdirs());/*from ww w .  j a va 2s .co  m*/

    final int numFiles = 10;
    for (int i = 0; i < numFiles; i++) {
        FileOutputStream outputStream = new FileOutputStream(
                new File(f.getAbsolutePath(), "file-" + i + ".log"));
        // each file has 5 lines
        IOUtils.writeLines(ImmutableList.of("1", "2", "3", "4", "5"), "\n", outputStream);
        outputStream.close();
    }

    // let the first 2 files, file-0.log and file-3.log, were processed and
    // file-2.log was processed 1 line/record
    // file-1.log will be skipped since is less then file-3.log
    Map<String, String> lastSourceOffsetMap = ImmutableMap.of(SpoolDirSource.OFFSET_VERSION, OFFSET_VERSION_ONE,
            "file-0.log", "{\"POS\":\"-1\"}", "file-2.log", "{\"POS\":\"2\"}", "file-3.log",
            "{\"POS\":\"-1\"}");

    SpoolDirConfigBean conf = new SpoolDirConfigBean();
    conf.dataFormat = DataFormat.TEXT;
    conf.spoolDir = f.getAbsolutePath();
    conf.batchSize = 10;
    conf.overrunLimit = 100;
    conf.poolingTimeoutSecs = 1;
    conf.filePattern = "file-[0-9].log";
    conf.pathMatcherMode = PathMatcherMode.GLOB;
    conf.maxSpoolFiles = 10;
    conf.initialFileToProcess = null;
    conf.dataFormatConfig.compression = Compression.NONE;
    conf.dataFormatConfig.filePatternInArchive = "*";
    conf.errorArchiveDir = null;
    conf.postProcessing = PostProcessingOptions.NONE;
    conf.retentionTimeMins = 10;
    conf.dataFormatConfig.textMaxLineLen = 10;
    conf.dataFormatConfig.onParseError = OnParseError.ERROR;
    conf.dataFormatConfig.maxStackTraceLines = 0;
    conf.allowLateDirectory = false;
    conf.numberOfThreads = 10;

    SpoolDirSource source = new SpoolDirSource(conf);
    PushSourceRunner runner = new PushSourceRunner.Builder(SpoolDirDSource.class, source).addOutputLane("lane")
            .build();

    AtomicInteger batchCount = new AtomicInteger(0);
    final List<Record> records = Collections.synchronizedList(new ArrayList<>(10));
    runner.runInit();

    final int maxBatchSize = 10;

    try {
        runner.runProduce(lastSourceOffsetMap, maxBatchSize, output -> {
            batchCount.incrementAndGet();

            synchronized (records) {
                records.addAll(output.getRecords().get("lane"));
            }

            if (records.size() == 34 || batchCount.get() > 10) {
                runner.setStop();
            }
        });

        runner.waitOnProduce();
        Assert.assertTrue(batchCount.get() > 1);
        TestOffsetUtil.compare("file-9.log::-1", runner.getOffsets());
        Assert.assertEquals(34, records.size());
    } finally {
        runner.runDestroy();
    }

}

From source file:com.zaubersoftware.gnip4j.api.impl.XMLActivityStreamFeedProcessorTest.java

/** test */
@Test// w ww.j  av  a2 s  .  com
public final void test() throws IOException, ParseException {
    final InputStream is = getClass().getResourceAsStream("fanpage.xml");
    try {
        final AtomicInteger i = new AtomicInteger();
        final ObjectMapper mapper = JsonActivityFeedProcessor.getObjectMapper();
        final FeedProcessor p = new XMLActivityStreamFeedProcessor<Activity>("foo", new DirectExecuteService(),
                new StreamNotificationAdapter<Activity>() {
                    @Override
                    public void notify(final Activity activity, final GnipStream stream) {
                        i.incrementAndGet();
                        try {
                            final byte[] data0 = mapper.writeValueAsBytes(activity);
                            final Activity e = mapper.reader(Activity.class).readValue(data0);
                            final byte[] data1 = mapper.writeValueAsBytes(e);
                            assertArrayEquals(data0, data1);

                            // test serialization
                            final ObjectOutputStream os = new ObjectOutputStream(new ByteArrayOutputStream());
                            os.writeObject(activity);
                            os.close();
                        } catch (final Exception e) {
                            throw new RuntimeException(e);
                        }
                    }
                }, new ActivityUnmarshaller("hola"));
        p.process(is);
        assertEquals(23, i.get());
    } finally {
        IOUtils.closeQuietly(is);
    }
}

From source file:com.smartitengineering.cms.spi.impl.content.RubyGeneratorTest.java

@Test
public void testMultiRubyRepGeneration() throws IOException {
    TypeRepresentationGenerator generator = new RubyRepresentationGenerator();
    final RepresentationTemplate template = mockery.mock(RepresentationTemplate.class);
    WorkspaceAPIImpl impl = new WorkspaceAPIImpl() {

        @Override/* w  w  w  .jav a2 s  . c  o m*/
        public RepresentationTemplate getRepresentationTemplate(WorkspaceId id, String name) {
            return template;
        }
    };
    impl.setRepresentationGenerators(Collections.singletonMap(TemplateType.RUBY, generator));
    final RepresentationProvider provider = new RepresentationProviderImpl();
    final WorkspaceAPI api = impl;
    registerBeanFactory(api);
    final Content content = mockery.mock(Content.class);
    final Field field = mockery.mock(Field.class);
    final FieldValue value = mockery.mock(FieldValue.class);
    final Map<String, Field> fieldMap = mockery.mock(Map.class);
    final ContentType type = mockery.mock(ContentType.class);
    final Map<String, RepresentationDef> reps = mockery.mock(Map.class, "repMap");
    final RepresentationDef def = mockery.mock(RepresentationDef.class);
    final int threadCount = new Random().nextInt(100);
    logger.info("Number of parallel threads " + threadCount);
    mockery.checking(new Expectations() {

        {
            exactly(threadCount).of(template).getTemplateType();
            will(returnValue(TemplateType.RUBY));
            exactly(threadCount).of(template).getTemplate();
            final byte[] toByteArray = IOUtils.toByteArray(
                    getClass().getClassLoader().getResourceAsStream("scripts/ruby/test-script.rb"));
            will(returnValue(toByteArray));
            exactly(threadCount).of(template).getName();
            will(returnValue(REP_NAME));
            for (int i = 0; i < threadCount; ++i) {
                exactly(1).of(value).getValue();
                will(returnValue(String.valueOf(i)));
            }
            exactly(threadCount).of(field).getValue();
            will(returnValue(value));
            exactly(threadCount).of(fieldMap).get(with(Expectations.<String>anything()));
            will(returnValue(field));
            exactly(threadCount).of(content).getFields();
            will(returnValue(fieldMap));
            exactly(threadCount).of(content).getContentDefinition();
            will(returnValue(type));
            final ContentId contentId = mockery.mock(ContentId.class);
            exactly(2 * threadCount).of(content).getContentId();
            will(returnValue(contentId));
            final WorkspaceId wId = mockery.mock(WorkspaceId.class);
            exactly(threadCount).of(contentId).getWorkspaceId();
            will(returnValue(wId));
            exactly(2 * threadCount).of(type).getRepresentationDefs();
            will(returnValue(reps));
            exactly(2 * threadCount).of(reps).get(with(REP_NAME));
            will(returnValue(def));
            exactly(threadCount).of(def).getParameters();
            will(returnValue(Collections.emptyMap()));
            exactly(threadCount).of(def).getMIMEType();
            will(returnValue(GroovyGeneratorTest.MIME_TYPE));
            final ResourceUri rUri = mockery.mock(ResourceUri.class);
            exactly(threadCount).of(def).getResourceUri();
            will(returnValue(rUri));
            exactly(threadCount).of(rUri).getValue();
            will(returnValue("iUri"));
        }
    });
    final Set<String> set = Collections.synchronizedSet(new LinkedHashSet<String>(threadCount));
    final List<String> list = Collections.synchronizedList(new ArrayList<String>(threadCount));
    final AtomicInteger integer = new AtomicInteger(0);
    Threads group = new Threads();
    for (int i = 0; i < threadCount; ++i) {
        group.addThread(new Thread(new Runnable() {

            public void run() {
                Representation representation = provider.getRepresentation(REP_NAME, type, content);
                Assert.assertNotNull(representation);
                Assert.assertEquals(REP_NAME, representation.getName());
                final String rep = StringUtils.newStringUtf8(representation.getRepresentation());
                list.add(rep);
                set.add(rep);
                Assert.assertEquals(GroovyGeneratorTest.MIME_TYPE, representation.getMimeType());
                integer.addAndGet(1);
            }
        }));
    }
    group.start();
    try {
        group.join();
    } catch (Exception ex) {
        logger.error(ex.getMessage(), ex);
    }
    logger.info("Generated reps list: " + list);
    logger.info("Generated reps set: " + set);
    Assert.assertEquals(threadCount, integer.get());
    Assert.assertEquals(threadCount, list.size());
    Assert.assertEquals(threadCount, set.size());
}

From source file:org.alfresco.bm.event.mongo.MongoResultServiceTest.java

/**
 * Test the case where the reporting period is smaller than the stats window
 *///from  ww w .j a  va 2  s . c om
@Test
public void getResultsUsingHandler() {
    pumpRecords(10);
    final long firstEventTime = resultService.getFirstResult().getStartTime();
    final long lastEventTime = resultService.getLastResult().getStartTime();

    final AtomicInteger count = new AtomicInteger();
    final Set<String> names = new HashSet<String>(17);

    resultService.getResults(new ResultHandler() {
        @Override
        public boolean processResult(long fromTime, long toTime,
                Map<String, DescriptiveStatistics> statsByEventName, Map<String, Integer> failuresByEventName)
                throws Throwable {
            if (toTime <= firstEventTime) {
                fail("The window is before the first event.");
            }
            if (fromTime > lastEventTime) {
                fail("The window is past the last event.");
            }
            assertEquals("Window not rebased. ", 0L, fromTime % 10L); // Rebased on reporting period
            assertEquals("Window size incorrect", 20L, toTime - fromTime);

            // Record all the event names we got
            names.addAll(statsByEventName.keySet());

            // Increment
            count.incrementAndGet();

            return true;
        }
    }, 0L, 20L, 10L, false);

    // Check
    assertEquals(10, count.get());
    assertEquals(resultService.getEventNames().size(), names.size());
}

From source file:com.smartitengineering.cms.spi.impl.content.JavascriptGeneratorTest.java

@Test
public void testMultiJavascriptRepGeneration() throws IOException {
    TypeRepresentationGenerator generator = new JavascriptRepresentationGenerator();
    final RepresentationTemplate template = mockery.mock(RepresentationTemplate.class);
    WorkspaceAPIImpl impl = new WorkspaceAPIImpl() {

        @Override/*ww w.j  a  v  a 2 s.  com*/
        public RepresentationTemplate getRepresentationTemplate(WorkspaceId id, String name) {
            return template;
        }
    };
    impl.setRepresentationGenerators(Collections.singletonMap(TemplateType.JAVASCRIPT, generator));
    final RepresentationProvider provider = new RepresentationProviderImpl();
    registerBeanFactory(impl);
    final Content content = mockery.mock(Content.class);
    final Field field = mockery.mock(Field.class);
    final FieldValue value = mockery.mock(FieldValue.class);
    final Map<String, Field> fieldMap = mockery.mock(Map.class);
    final ContentType type = mockery.mock(ContentType.class);
    final Map<String, RepresentationDef> reps = mockery.mock(Map.class, "repMap");
    final RepresentationDef def = mockery.mock(RepresentationDef.class);
    final int threadCount = new Random().nextInt(100);
    logger.info("Number of parallel threads " + threadCount);
    mockery.checking(new Expectations() {

        {
            exactly(threadCount).of(template).getTemplateType();
            will(returnValue(TemplateType.JAVASCRIPT));
            exactly(threadCount).of(template).getTemplate();
            final byte[] toByteArray = IOUtils
                    .toByteArray(getClass().getClassLoader().getResourceAsStream("scripts/js/test-script.js"));
            will(returnValue(toByteArray));
            exactly(threadCount).of(template).getName();
            will(returnValue(REP_NAME));
            for (int i = 0; i < threadCount; ++i) {
                exactly(1).of(value).getValue();
                will(returnValue(String.valueOf(i)));
            }
            exactly(threadCount).of(field).getValue();
            will(returnValue(value));
            exactly(threadCount).of(fieldMap).get(with(Expectations.<String>anything()));
            will(returnValue(field));
            exactly(threadCount).of(content).getFields();
            will(returnValue(fieldMap));
            exactly(threadCount).of(content).getContentDefinition();
            will(returnValue(type));
            final ContentId contentId = mockery.mock(ContentId.class);
            exactly(2 * threadCount).of(content).getContentId();
            will(returnValue(contentId));
            final WorkspaceId wId = mockery.mock(WorkspaceId.class);
            exactly(threadCount).of(contentId).getWorkspaceId();
            will(returnValue(wId));
            exactly(2 * threadCount).of(type).getRepresentationDefs();
            will(returnValue(reps));
            exactly(2 * threadCount).of(reps).get(with(REP_NAME));
            will(returnValue(def));
            exactly(threadCount).of(def).getParameters();
            will(returnValue(Collections.emptyMap()));
            exactly(threadCount).of(def).getMIMEType();
            will(returnValue(GroovyGeneratorTest.MIME_TYPE));
            final ResourceUri rUri = mockery.mock(ResourceUri.class);
            exactly(threadCount).of(def).getResourceUri();
            will(returnValue(rUri));
            exactly(threadCount).of(rUri).getValue();
            will(returnValue("iUri"));
        }
    });
    Assert.assertNotNull(SmartContentAPI.getInstance());
    Assert.assertNotNull(SmartContentAPI.getInstance().getContentLoader());
    final Set<String> set = Collections.synchronizedSet(new LinkedHashSet<String>(threadCount));
    final List<String> list = Collections.synchronizedList(new ArrayList<String>(threadCount));
    final AtomicInteger integer = new AtomicInteger(0);
    Threads group = new Threads();
    for (int i = 0; i < threadCount; ++i) {
        group.addThread(new Thread(new Runnable() {

            public void run() {
                Representation representation = provider.getRepresentation(REP_NAME, type, content);
                Assert.assertNotNull(representation);
                Assert.assertEquals(REP_NAME, representation.getName());
                final String rep = StringUtils.newStringUtf8(representation.getRepresentation());
                list.add(rep);
                set.add(rep);
                Assert.assertEquals(GroovyGeneratorTest.MIME_TYPE, representation.getMimeType());
                integer.addAndGet(1);
            }
        }));
    }
    group.start();
    try {
        group.join();
    } catch (Exception ex) {
        logger.error(ex.getMessage(), ex);
    }
    logger.info("Generated reps list: " + list);
    logger.info("Generated reps set: " + set);
    Assert.assertEquals(threadCount, integer.get());
    Assert.assertEquals(threadCount, list.size());
    Assert.assertEquals(threadCount, set.size());
}