Example usage for java.util.concurrent.atomic AtomicInteger AtomicInteger

List of usage examples for java.util.concurrent.atomic AtomicInteger AtomicInteger

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicInteger AtomicInteger.

Prototype

public AtomicInteger(int initialValue) 

Source Link

Document

Creates a new AtomicInteger with the given initial value.

Usage

From source file:io.druid.benchmark.datagen.SegmentGenerator.java

public SegmentGenerator() {
    this.tempDir = Files.createTempDir();
    this.seed = new AtomicInteger(STARTING_SEED);
}

From source file:com.netflix.spinnaker.front50.migrations.LinearToParallelMigration.java

private void migrate(ItemDAO<Pipeline> dao, String type, Pipeline pipeline) {
    log.info(format("Migrating %s '%s' from linear -> parallel", type, pipeline.getId()));

    AtomicInteger refId = new AtomicInteger(0);
    List<Map<String, Object>> stages = (List<Map<String, Object>>) pipeline.getOrDefault("stages",
            Collections.emptyList());
    stages.forEach(stage -> {//from  w  w w.  j a  v a 2s .  co m
        stage.put("refId", String.valueOf(refId.get()));
        if (refId.get() > 0) {
            stage.put("requisiteStageRefIds", Collections.singletonList(String.valueOf(refId.get() - 1)));
        } else {
            stage.put("requisiteStageRefIds", Collections.emptyList());
        }

        refId.incrementAndGet();
    });

    pipeline.put("parallel", true);
    dao.update(pipeline.getId(), pipeline);

    log.info(format("Migrated %s '%s' from linear -> parallel", type, pipeline.getId()));
}

From source file:com.ikanow.aleph2.analytics.services.TestGraphBuilderEnrichmentService.java

@Test
public void test_delegation() {
    final AtomicInteger wrapper_counter = new AtomicInteger(0);
    final AtomicInteger emit_counter = new AtomicInteger(0);
    final AtomicInteger init_counter = new AtomicInteger(0);
    final AtomicInteger done_counter = new AtomicInteger(0);

    final Streamable<Tuple2<Long, IBatchRecord>> test_stream = Streamable
            .of(Arrays.asList(_mapper.createObjectNode()))
            .<Tuple2<Long, IBatchRecord>>map(j -> Tuples._2T(0L, new BatchRecordUtils.JsonBatchRecord(j)));

    final IEnrichmentBatchModule delegate = Mockito.mock(IEnrichmentBatchModule.class);
    Mockito.doAnswer(__ -> {//from  w  w w  .  ja  v a  2  s  . co m
        init_counter.incrementAndGet();
        return null;
    }).when(delegate).onStageInitialize(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(),
            Mockito.any());
    Mockito.doAnswer(in -> {
        @SuppressWarnings("unchecked")
        final Stream<Tuple2<Long, IBatchRecord>> stream = (Stream<Tuple2<Long, IBatchRecord>>) in
                .getArguments()[0];
        stream.forEach(t2 -> emit_counter.incrementAndGet());
        return null;
    }).when(delegate).onObjectBatch(Mockito.any(), Mockito.any(), Mockito.any());
    Mockito.doAnswer(__ -> {
        done_counter.incrementAndGet();
        return null;
    }).when(delegate).onStageComplete(Mockito.anyBoolean());
    Mockito.when(delegate.cloneForNewGrouping()).thenReturn(delegate);
    Mockito.when(delegate.validateModule(Mockito.any(), Mockito.any(), Mockito.any()))
            .thenReturn(Arrays.asList(ErrorUtils.buildErrorMessage("", "", "")));

    final IGraphService throwing_graph_service = Mockito.mock(IGraphService.class);
    Mockito.when(throwing_graph_service.getUnderlyingPlatformDriver(Mockito.any(), Mockito.any()))
            .thenReturn(Optional.of(delegate));
    final MockServiceContext mock_service_context = new MockServiceContext();
    mock_service_context.addService(IGraphService.class, Optional.empty(), throwing_graph_service);
    final IEnrichmentModuleContext enrich_context = Mockito.mock(IEnrichmentModuleContext.class);
    Mockito.when(enrich_context.getServiceContext()).thenReturn(mock_service_context);
    Mockito.when(enrich_context.emitImmutableObject(Mockito.anyLong(), Mockito.any(), Mockito.any(),
            Mockito.any(), Mockito.any())).thenAnswer(invocation -> {
                wrapper_counter.incrementAndGet();
                return null;
            });

    final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get();
    final DataBucketBean bucket = BeanTemplateUtils
            .build(DataBucketBean.class).with(DataBucketBean::data_schema, BeanTemplateUtils
                    .build(DataSchemaBean.class).with(DataSchemaBean::graph_schema, graph_schema).done().get())
            .done().get();
    final EnrichmentControlMetadataBean control = BeanTemplateUtils.build(EnrichmentControlMetadataBean.class)
            .done().get();

    final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService();
    under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty());
    under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty());
    under_test.onStageComplete(true);
    assertEquals(delegate, under_test.cloneForNewGrouping());
    assertEquals(1, under_test.validateModule(enrich_context, bucket, control).size());
    assertEquals(1, emit_counter.getAndSet(0));
    assertEquals(1, init_counter.getAndSet(0));
    assertEquals(1, done_counter.getAndSet(0));
    assertEquals(1, wrapper_counter.getAndSet(0));
}

From source file:com.b5m.plugin.spout.MetaTestSpout.java

public MetaTestSpout(String url, String topic, String group) {
    this.url = url;
    this.topic = topic;
    this.group = group;
    MessageHelper helper = new MessageHelper(this.url);
    final AtomicInteger counter = new AtomicInteger(0);
    helper.Subscription(topic, group, new MessageListenerTemplate<Object>() {
        @Override/*from w  w w. j av  a2 s . c o  m*/
        public void recieveData(Message Message, Object data) {
            counter.incrementAndGet();
            clq.add(data);
            System.out.println("--->" + data);
        }
    });
}

From source file:com.streamsets.pipeline.stage.origin.jdbc.table.AllTypesIT.java

private static void populateRecords() {
    Record record = RecordCreator.create();
    LinkedHashMap<String, Field> fields;
    AtomicInteger id_field = new AtomicInteger(0);

    //CHAR_AND_BINARY
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);/*from  ww w .  java 2  s.c  o  m*/
    fields.put("char1", Field.create("abcdefghij"));
    fields.put("varchar1", Field.create(UUID.randomUUID().toString()));
    fields.put("clob1", Field.create(UUID.randomUUID().toString()));
    fields.put("varbinary1", Field.create(UUID.randomUUID().toString().getBytes()));
    fields.put("blob1", Field.create(UUID.randomUUID().toString().getBytes()));
    record.set(Field.createListMap(fields));

    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("CHAR_AND_BINARY").getRight().add(record);

    //Date and time
    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    Calendar calendar = Calendar.getInstance();

    calendar.set(Calendar.HOUR_OF_DAY, 0);
    calendar.set(Calendar.MINUTE, 0);
    calendar.set(Calendar.SECOND, 0);
    calendar.set(Calendar.MILLISECOND, 0);
    fields.put("date1", Field.create(Field.Type.DATE, calendar.getTime()));
    calendar.setTimeInMillis(System.currentTimeMillis());

    calendar.set(Calendar.MILLISECOND, 0);
    fields.put("timestamp1", Field.create(Field.Type.DATETIME, calendar.getTime()));
    fields.put("datetime1", Field.create(Field.Type.DATETIME, calendar.getTime()));
    calendar.setTimeInMillis(System.currentTimeMillis());

    calendar.set(Calendar.YEAR, 1970);
    calendar.set(Calendar.MONTH, Calendar.JANUARY);
    calendar.set(Calendar.DAY_OF_MONTH, 1);
    calendar.set(Calendar.MILLISECOND, 0);
    fields.put("time1", Field.create(Field.Type.TIME, calendar.getTime()));
    calendar.setTimeInMillis(System.currentTimeMillis());

    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("DATE_AND_TIME").getRight().add(record);

    //DIFFERENT_INTS
    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("int1", Field.create(Field.Type.INTEGER, Integer.MIN_VALUE));
    fields.put("int2", Field.create(Field.Type.INTEGER, Integer.MIN_VALUE));
    fields.put("mediumint1", Field.create(Field.Type.INTEGER, Integer.MIN_VALUE));
    fields.put("tinyint1", Field.create(Field.Type.SHORT, -128));
    fields.put("smallint1", Field.create(Field.Type.SHORT, Short.MIN_VALUE));
    fields.put("bigint1", Field.create(Field.Type.LONG, Long.MIN_VALUE));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("DIFFERENT_INTS").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("int1", Field.create(Field.Type.INTEGER, Integer.MAX_VALUE));
    fields.put("int2", Field.create(Field.Type.INTEGER, Integer.MAX_VALUE));
    fields.put("mediumint1", Field.create(Field.Type.INTEGER, Integer.MAX_VALUE));
    fields.put("tinyint1", Field.create(Field.Type.SHORT, 127));
    fields.put("smallint1", Field.create(Field.Type.SHORT, Short.MAX_VALUE));
    fields.put("bigint1", Field.create(Field.Type.LONG, Long.MAX_VALUE));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("DIFFERENT_INTS").getRight().add(record);

    //FLOATING_PT_INTS
    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("decimal1", Field.create(Field.Type.DECIMAL, new BigDecimal("12.345")));
    fields.put("number1", Field.create(Field.Type.DECIMAL, new BigDecimal("0.12345")));
    fields.put("double1", Field.create(Field.Type.DOUBLE, 123.456));
    fields.put("real1", Field.create(Field.Type.FLOAT, 12.34));
    fields.put("floatdouble1", Field.create(Field.Type.DOUBLE, Double.MAX_VALUE));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("FLOATING_PT_INTS").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("decimal1", Field.create(Field.Type.DECIMAL, new BigDecimal("-12.345")));
    fields.put("number1", Field.create(Field.Type.DECIMAL, new BigDecimal("-0.12345")));
    fields.put("double1", Field.create(Field.Type.DOUBLE, -123.456));
    fields.put("real1", Field.create(Field.Type.FLOAT, -12.34));
    fields.put("floatdouble1", Field.create(Field.Type.DOUBLE, Double.MIN_VALUE));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("FLOATING_PT_INTS").getRight().add(record);

    //OTHER_TYPES
    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("boolean1", Field.create(Field.Type.BOOLEAN, true));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("OTHER_TYPES").getRight().add(record);

    record = RecordCreator.create();
    fields = new LinkedHashMap<>();
    createIdField(fields, id_field);
    fields.put("boolean1", Field.create(Field.Type.BOOLEAN, false));
    record.set(Field.createListMap(fields));
    TABLE_TO_TEMPLATE_AND_RECORDS_MAP.get("OTHER_TYPES").getRight().add(record);
}

From source file:com.ethlo.geodata.importer.file.FileIpLookupImporter.java

@Override
public long importData() throws IOException {
    final Map.Entry<Date, File> ipDataFile = super.fetchResource(DataType.IP, url);
    final AtomicInteger count = new AtomicInteger(0);

    final File csvFile = ipDataFile.getValue();
    final long total = IoUtils.lineCount(csvFile);
    final ProgressListener prg = new ProgressListener(
            l -> publish(new DataLoadedEvent(this, DataType.IP, Operation.IMPORT, l, total)));

    final IpLookupImporter ipLookupImporter = new IpLookupImporter(csvFile);

    final JsonFactory f = new JsonFactory();
    f.enable(JsonGenerator.Feature.ESCAPE_NON_ASCII);
    f.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
    final ObjectMapper mapper = new ObjectMapper(f);

    final byte newLine = (byte) "\n".charAt(0);

    logger.info("Writing IP data to file {}", getFile().getAbsolutePath());
    try (final OutputStream out = new BufferedOutputStream(new FileOutputStream(getFile()))) {
        ipLookupImporter.processFile(entry -> {
            final String strGeoNameId = findMapValue(entry, "geoname_id", "represented_country_geoname_id",
                    "registered_country_geoname_id");
            final String strGeoNameCountryId = findMapValue(entry, "represented_country_geoname_id",
                    "registered_country_geoname_id");
            final Long geonameId = strGeoNameId != null ? Long.parseLong(strGeoNameId) : null;
            final Long geonameCountryId = strGeoNameCountryId != null ? Long.parseLong(strGeoNameCountryId)
                    : null;//w w  w. j  a  v a2  s.c  om
            if (geonameId != null) {
                final SubnetUtils u = new SubnetUtils(entry.get("network"));
                final long lower = UnsignedInteger
                        .fromIntBits(InetAddresses
                                .coerceToInteger(InetAddresses.forString(u.getInfo().getLowAddress())))
                        .longValue();
                final long upper = UnsignedInteger
                        .fromIntBits(InetAddresses
                                .coerceToInteger(InetAddresses.forString(u.getInfo().getHighAddress())))
                        .longValue();
                final Map<String, Object> paramMap = new HashMap<>(5);
                paramMap.put("geoname_id", geonameId);
                paramMap.put("geoname_country_id", geonameCountryId);
                paramMap.put("first", lower);
                paramMap.put("last", upper);

                try {
                    mapper.writeValue(out, paramMap);
                    out.write(newLine);
                } catch (IOException exc) {
                    throw new DataAccessResourceFailureException(exc.getMessage(), exc);
                }
            }

            if (count.get() % 100_000 == 0) {
                logger.info("Processed {}", count.get());
            }

            count.getAndIncrement();

            prg.update();
        });
    }

    return total;
}

From source file:com.github.cbismuth.fdupes.io.PathOrganizer.java

private void moveUniqueFiles(final Path destination, final Iterable<PathElement> uniqueElements) {
    final AtomicInteger counter = new AtomicInteger(1);

    uniqueElements.forEach(pathElement -> {
        final Optional<Path> timestampPath = pathAnalyser.getTimestampPath(destination, pathElement.getPath());

        if (timestampPath.isPresent()) {
            onTimestampPath(pathElement, timestampPath.get());
        } else {//from  w ww .j  a v a2  s  .c o  m
            onNoTimestampPath(destination, pathElement, counter);
        }
    });
}

From source file:de.uni_rostock.goodod.owl.OntologyCache.java

public OntologyCache(Set<? extends OWLOntologyIRIMapper> IRIMappers, Set<IRI> importsToIgnore, int threads) {
    threadCount = threads;/*from www.j  a v a2 s . com*/
    pendingFutures = new AtomicInteger(0);
    executor = Executors.newFixedThreadPool(threadCount);
    OWLOntologyLoaderConfiguration interimConfig = new OWLOntologyLoaderConfiguration();

    for (IRI theIRI : importsToIgnore) {
        interimConfig = interimConfig.addIgnoredImport(theIRI);
    }
    interimConfig = interimConfig.setMissingImportHandlingStrategy(MissingImportHandlingStrategy.SILENT);
    config = interimConfig;
    mappers = IRIMappers;
    futures = new HashMap<URI, FutureTask<OWLOntology>>(24);
}

From source file:com.alibaba.wasp.executor.TestExecutorService.java

@Test
public void testExecutorService() throws Exception {
    int maxThreads = 5;
    int maxTries = 10;
    int sleepInterval = 10;

    Server mockedServer = mock(Server.class);
    when(mockedServer.getConfiguration()).thenReturn(conf);

    // Start an executor service pool with max 5 threads
    ExecutorService executorService = new ExecutorService("unit_test");
    executorService.startExecutorService(ExecutorType.MASTER_SERVER_OPERATIONS, maxThreads);

    Executor executor = executorService.getExecutor(ExecutorType.MASTER_SERVER_OPERATIONS);
    ThreadPoolExecutor pool = executor.threadPoolExecutor;

    // Assert no threads yet
    assertEquals(0, pool.getPoolSize());

    AtomicBoolean lock = new AtomicBoolean(true);
    AtomicInteger counter = new AtomicInteger(0);

    // Submit maxThreads executors.
    for (int i = 0; i < maxThreads; i++) {
        executorService.submit(new TestEventHandler(mockedServer, EventType.M_SERVER_SHUTDOWN, lock, counter));
    }/*from w  w w  . jav  a 2 s .c  o m*/

    // The TestEventHandler will increment counter when it starts.
    int tries = 0;
    while (counter.get() < maxThreads && tries < maxTries) {
        LOG.info("Waiting for all event handlers to start...");
        Thread.sleep(sleepInterval);
        tries++;
    }

    // Assert that pool is at max threads.
    assertEquals(maxThreads, counter.get());
    assertEquals(maxThreads, pool.getPoolSize());

    ExecutorStatus status = executor.getStatus();
    assertTrue(status.queuedEvents.isEmpty());
    assertEquals(5, status.running.size());
    checkStatusDump(status);

    // Now interrupt the running Executor
    synchronized (lock) {
        lock.set(false);
        lock.notifyAll();
    }

    // Executor increments counter again on way out so.... test that happened.
    while (counter.get() < (maxThreads * 2) && tries < maxTries) {
        System.out.println("Waiting for all event handlers to finish...");
        Thread.sleep(sleepInterval);
        tries++;
    }

    assertEquals(maxThreads * 2, counter.get());
    assertEquals(maxThreads, pool.getPoolSize());

    // Add more than the number of threads items.
    // Make sure we don't get RejectedExecutionException.
    for (int i = 0; i < (2 * maxThreads); i++) {
        executorService.submit(new TestEventHandler(mockedServer, EventType.M_SERVER_SHUTDOWN, lock, counter));
    }
    // Now interrupt the running Executor
    synchronized (lock) {
        lock.set(false);
        lock.notifyAll();
    }

    // Make sure threads are still around even after their timetolive expires.
    Thread.sleep(ExecutorService.Executor.keepAliveTimeInMillis * 2);
    assertEquals(maxThreads, pool.getPoolSize());

    executorService.shutdown();

    assertEquals(0, executorService.getAllExecutorStatuses().size());

    // Test that submit doesn't throw NPEs
    executorService.submit(new TestEventHandler(mockedServer, EventType.M_SERVER_SHUTDOWN, lock, counter));
}

From source file:org.jtheque.core.CoreTest.java

@Test
@DirtiesContext/*w  w  w. j av  a  2  s. c o m*/
public void applicationListener() {
    final AtomicInteger counter = new AtomicInteger(0);

    final Application launchedApplication = new TestApplication();

    core.addApplicationListener(new ApplicationListener() {
        @Override
        public void applicationLaunched(Application application) {
            assertEquals(application, launchedApplication);
            assertEquals(application, core.getApplication());

            counter.incrementAndGet();
        }
    });

    core.launchApplication(launchedApplication);

    assertEquals(1, counter.intValue());
}