Example usage for java.util.concurrent.atomic AtomicInteger getAndSet

List of usage examples for java.util.concurrent.atomic AtomicInteger getAndSet

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicInteger getAndSet.

Prototype

public final int getAndSet(int newValue) 

Source Link

Document

Atomically sets the value to newValue and returns the old value, with memory effects as specified by VarHandle#getAndSet .

Usage

From source file:Main.java

public static int ThrsafeExchange(AtomicInteger paoDestination, int aoExchange) {
    //      atomicord32 aoDestinationValue = *paoDestination;
    //      *paoDestination = aoExchange;
    //      return aoDestinationValue;
    return paoDestination.getAndSet(aoExchange);
}

From source file:com.bigdata.dastor.utils.FBUtilities.java

public static void atomicSetMax(AtomicInteger atomic, int i) {
    int j;//from  w  ww  .  ja v  a 2 s .c  om
    while (true) {
        if ((j = atomic.getAndSet(i)) <= i)
            break;
        i = j;
    }
}

From source file:com.adobe.acs.commons.mcp.impl.processes.DeepPrune.java

private void purgeJobs(ActionManager manager) {
    ActionBatch batch = new ActionBatch(manager, batchSize);
    batch.setRetryCount(retryCount);/*w ww . jav a  2s  .  c o m*/
    batch.setRetryWait(retryWait);
    TreeFilteringResourceVisitor visitor = new TreeFilteringResourceVisitor();
    visitor.setDepthFirstMode();
    visitor.setTraversalFilter(res -> visitor.isFolder(res) && !shouldIgnore(res));
    AtomicInteger lastLevel = new AtomicInteger(0);
    visitor.setResourceVisitor((res, level) -> {
        if (level >= minPurgeDepth && !shouldIgnore(res) && folderRule.matcher.apply(res.getName())) {
            if (lastLevel.getAndSet(level) != level) {
                batch.commitBatch();
            }
            String path = res.getPath();
            batch.add(rr -> deleteResource(rr, path));
        }
    });
    visitor.setLeafVisitor((res, level) -> {
        if (!shouldIgnore(res)) {
            if (lastLevel.getAndSet(level) != level) {
                batch.commitBatch();
            }
            String path = res.getPath();
            batch.add(rr -> deleteResource(rr, path));
        }
    });
    manager.deferredWithResolver(rr -> {
        Resource res = rr.getResource(startingFolder);
        if (res != null) {
            visitor.accept(res);
        }
        batch.commitBatch();
    });
}

From source file:info.archinnov.achilles.it.TestDSLSimpleEntity.java

@Test
public void should_dsl_select_with_options() throws Exception {
    //Given//  w w  w .j  av a  2 s.  c  o  m
    final Map<String, Object> values = new HashMap<>();
    final long id = RandomUtils.nextLong(0L, Long.MAX_VALUE);
    values.put("id", id);
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
    dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
    final Date date1 = dateFormat.parse("2015-10-01 00:00:00 GMT");
    final Date date2 = dateFormat.parse("2015-10-02 00:00:00 GMT");
    final Date date3 = dateFormat.parse("2015-10-03 00:00:00 GMT");
    final Date date6 = dateFormat.parse("2015-10-06 00:00:00 GMT");

    values.put("date1", "'2015-10-01 00:00:00+0000'");
    values.put("date2", "'2015-10-02 00:00:00+0000'");
    values.put("date3", "'2015-10-03 00:00:00+0000'");
    values.put("date4", "'2015-10-04 00:00:00+0000'");
    values.put("date5", "'2015-10-05 00:00:00+0000'");
    values.put("date6", "'2015-10-06 00:00:00+0000'");
    values.put("date7", "'2015-10-07 00:00:00+0000'");
    values.put("date8", "'2015-10-08 00:00:00+0000'");
    values.put("date9", "'2015-10-09 00:00:00+0000'");
    scriptExecutor.executeScriptTemplate("SimpleEntity/insert_many_rows.cql", values);

    final AtomicInteger rsCount = new AtomicInteger(0);
    final AtomicInteger rowCounter = new AtomicInteger(0);

    final CassandraLogAsserter logAsserter = new CassandraLogAsserter();
    logAsserter.prepareLogLevelForDriverConnection();

    //When
    final List<SimpleEntity> found = manager.dsl().select().value().fromBaseTable().where().id_Eq(id)
            .date_IN(date1, date2, date3, date6).orderByDateDescending().limit(3).withConsistencyLevel(THREE)
            .withRetryPolicy(DowngradingConsistencyRetryPolicy.INSTANCE).withResultSetAsyncListener(rs -> {
                rsCount.getAndSet(rs.getAvailableWithoutFetching());
                return rs;
            }).withRowAsyncListener(row -> {
                rowCounter.getAndIncrement();
                return row;
            }).getList();

    //Then
    assertThat(found).hasSize(3);
    assertThat(found.get(0).getValue()).isEqualTo("id - date6");
    assertThat(found.get(1).getValue()).isEqualTo("id - date3");
    assertThat(found.get(2).getValue()).isEqualTo("id - date2");
    assertThat(rsCount.get()).isEqualTo(3);
    assertThat(rowCounter.get()).isEqualTo(3);
    logAsserter.assertConsistencyLevels(THREE, ONE);
}

From source file:com.linkedin.pinot.perf.QueryRunner.java

/**
 * Use multiple threads to run query at an increasing target QPS.
 *
 * Use a concurrent linked queue to buffer the queries to be sent. Use the main thread to insert queries into the
 * queue at the target QPS, and start {numThreads} worker threads to fetch queries from the queue and send them.
 * We start with the start QPS, and keep adding delta QPS to the start QPS during the test. The main thread is
 * responsible for collecting the statistic information and log them periodically.
 *
 * @param conf perf benchmark driver config.
 * @param queryFile query file.//w  w w  . j  a  v  a  2 s . c  o m
 * @param numThreads number of threads sending queries.
 * @param startQPS start QPS
 * @param deltaQPS delta QPS
 * @throws Exception
 */
@SuppressWarnings("InfiniteLoopStatement")
public static void targetQPSQueryRunner(PerfBenchmarkDriverConf conf, String queryFile, int numThreads,
        double startQPS, double deltaQPS) throws Exception {
    final long randomSeed = 123456789L;
    final Random random = new Random(randomSeed);
    final int timePerTargetQPSMillis = 60000;
    final int queueLengthThreshold = Math.max(20, (int) startQPS);

    final List<String> queries;
    try (FileInputStream input = new FileInputStream(new File(queryFile))) {
        queries = IOUtils.readLines(input);
    }
    final int numQueries = queries.size();

    final PerfBenchmarkDriver driver = new PerfBenchmarkDriver(conf);
    final AtomicInteger counter = new AtomicInteger(0);
    final AtomicLong totalResponseTime = new AtomicLong(0L);
    final ExecutorService executorService = Executors.newFixedThreadPool(numThreads);

    final ConcurrentLinkedQueue<String> queryQueue = new ConcurrentLinkedQueue<>();
    double currentQPS = startQPS;
    int intervalMillis = (int) (MILLIS_PER_SECOND / currentQPS);

    for (int i = 0; i < numThreads; i++) {
        executorService.submit(new Runnable() {
            @Override
            public void run() {
                while (true) {
                    String query = queryQueue.poll();
                    if (query == null) {
                        try {
                            Thread.sleep(1);
                            continue;
                        } catch (InterruptedException e) {
                            LOGGER.error("Interrupted.", e);
                            return;
                        }
                    }
                    long startTime = System.currentTimeMillis();
                    try {
                        driver.postQuery(query);
                        counter.getAndIncrement();
                        totalResponseTime.getAndAdd(System.currentTimeMillis() - startTime);
                    } catch (Exception e) {
                        LOGGER.error("Caught exception while running query: {}", query, e);
                        return;
                    }
                }
            }
        });
    }

    LOGGER.info("Start with QPS: {}, delta QPS: {}", startQPS, deltaQPS);
    while (true) {
        long startTime = System.currentTimeMillis();
        while (System.currentTimeMillis() - startTime <= timePerTargetQPSMillis) {
            if (queryQueue.size() > queueLengthThreshold) {
                executorService.shutdownNow();
                throw new RuntimeException("Cannot achieve target QPS of: " + currentQPS);
            }
            queryQueue.add(queries.get(random.nextInt(numQueries)));
            Thread.sleep(intervalMillis);
        }
        double timePassedSeconds = ((double) (System.currentTimeMillis() - startTime)) / MILLIS_PER_SECOND;
        int count = counter.getAndSet(0);
        double avgResponseTime = ((double) totalResponseTime.getAndSet(0)) / count;
        LOGGER.info("Target QPS: {}, Interval: {}ms, Actual QPS: {}, Avg Response Time: {}ms", currentQPS,
                intervalMillis, count / timePassedSeconds, avgResponseTime);

        // Find a new interval
        int newIntervalMillis;
        do {
            currentQPS += deltaQPS;
            newIntervalMillis = (int) (MILLIS_PER_SECOND / currentQPS);
        } while (newIntervalMillis == intervalMillis);
        intervalMillis = newIntervalMillis;
    }
}

From source file:com.ikanow.aleph2.analytics.services.TestGraphBuilderEnrichmentService.java

@Test
public void test_empty() {

    final AtomicInteger counter = new AtomicInteger(0);

    final Streamable<Tuple2<Long, IBatchRecord>> test_stream = Streamable
            .of(Arrays.asList(_mapper.createObjectNode()))
            .<Tuple2<Long, IBatchRecord>>map(j -> Tuples._2T(0L, new BatchRecordUtils.JsonBatchRecord(j)));

    final IGraphService throwing_graph_service = Mockito.mock(IGraphService.class);
    Mockito.when(throwing_graph_service.getUnderlyingPlatformDriver(Mockito.any(), Mockito.any()))
            .thenThrow(new RuntimeException("getUnderlyingPlatformDriver"));
    final MockServiceContext mock_service_context = new MockServiceContext();
    final IEnrichmentModuleContext enrich_context = Mockito.mock(IEnrichmentModuleContext.class);
    Mockito.when(enrich_context.getServiceContext()).thenReturn(mock_service_context);
    Mockito.when(enrich_context.emitImmutableObject(Mockito.anyLong(), Mockito.any(), Mockito.any(),
            Mockito.any(), Mockito.any())).thenAnswer(invocation -> {
                counter.incrementAndGet();
                return null;
            });//w w  w.jav a2s. c  o m

    // Bucket enabled but no graph service
    {
        final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService();

        final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get();
        final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                        .with(DataSchemaBean::graph_schema, graph_schema).done().get())
                .done().get();
        final EnrichmentControlMetadataBean control = BeanTemplateUtils
                .build(EnrichmentControlMetadataBean.class).done().get();

        under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty());
        under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty());
        under_test.onStageComplete(true);
        assertEquals(under_test, under_test.cloneForNewGrouping());
        assertEquals(Collections.emptyList(), under_test.validateModule(enrich_context, bucket, control));
        assertEquals(1, counter.getAndSet(0));
    }
    // Use override
    {
        final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService();

        final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get();
        final GraphConfigBean graph_config = BeanTemplateUtils.build(GraphConfigBean.class)
                .with(GraphConfigBean::graph_schema_override, graph_schema).done().get();
        final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                        .with(DataSchemaBean::graph_schema, graph_schema).done().get())
                .done().get();
        final EnrichmentControlMetadataBean control = BeanTemplateUtils
                .build(EnrichmentControlMetadataBean.class).with(EnrichmentControlMetadataBean::config,
                        new LinkedHashMap<String, Object>(BeanTemplateUtils.toMap(graph_config)))
                .done().get();

        under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty());
        under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty());
        under_test.onStageComplete(true);
        assertEquals(under_test, under_test.cloneForNewGrouping());
        assertEquals(Collections.emptyList(), under_test.validateModule(enrich_context, bucket, control));
        assertEquals(1, counter.getAndSet(0));
    }

    mock_service_context.addService(IGraphService.class, Optional.empty(), throwing_graph_service);

    // Add graph service, check it starts failing (bucket enabled)
    {
        final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService();

        final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get();
        final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                        .with(DataSchemaBean::graph_schema, graph_schema).done().get())
                .done().get();
        final EnrichmentControlMetadataBean control = BeanTemplateUtils
                .build(EnrichmentControlMetadataBean.class).done().get();

        try {
            under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null),
                    Optional.empty());
            fail("Should have thrown");
        } catch (Exception e) {
        }
    }
    // Add graph service, check it starts failing (override)
    {
        final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService();

        final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get();
        final GraphConfigBean graph_config = BeanTemplateUtils.build(GraphConfigBean.class)
                .with(GraphConfigBean::graph_schema_override, graph_schema).done().get();
        final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                        .with(DataSchemaBean::graph_schema, graph_schema).done().get())
                .done().get();
        final EnrichmentControlMetadataBean control = BeanTemplateUtils
                .build(EnrichmentControlMetadataBean.class).with(EnrichmentControlMetadataBean::config,
                        new LinkedHashMap<String, Object>(BeanTemplateUtils.toMap(graph_config)))
                .done().get();

        try {
            under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null),
                    Optional.empty());
            fail("Should have thrown");
        } catch (Exception e) {
        }
    }
    // From bucket, graph service disabled, won't fail
    {
        final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService();

        final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class).done().get(); // (no data_schema.graph_schema)
        final EnrichmentControlMetadataBean control = BeanTemplateUtils
                .build(EnrichmentControlMetadataBean.class).done().get();

        under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty());
        under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty());
        under_test.onStageComplete(true);
        assertEquals(under_test, under_test.cloneForNewGrouping());
        assertEquals(Collections.emptyList(), under_test.validateModule(enrich_context, bucket, control));
        assertEquals(1, counter.getAndSet(0));
    }
    // From override, graph service disabled, won't fail
    {
        final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService();

        final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class)
                .with(GraphSchemaBean::enabled, false).done().get();
        final GraphConfigBean graph_config = BeanTemplateUtils.build(GraphConfigBean.class)
                .with(GraphConfigBean::graph_schema_override, graph_schema).done().get();
        final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                        .with(DataSchemaBean::graph_schema, graph_schema).done().get())
                .done().get();
        final EnrichmentControlMetadataBean control = BeanTemplateUtils
                .build(EnrichmentControlMetadataBean.class).with(EnrichmentControlMetadataBean::config,
                        new LinkedHashMap<String, Object>(BeanTemplateUtils.toMap(graph_config)))
                .done().get();

        under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty());
        under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty());
        under_test.onStageComplete(true);
        assertEquals(under_test, under_test.cloneForNewGrouping());
        assertEquals(Collections.emptyList(), under_test.validateModule(enrich_context, bucket, control));
        assertEquals(1, counter.getAndSet(0));
    }

}

From source file:com.ikanow.aleph2.analytics.services.TestGraphBuilderEnrichmentService.java

@Test
public void test_delegation() {
    final AtomicInteger wrapper_counter = new AtomicInteger(0);
    final AtomicInteger emit_counter = new AtomicInteger(0);
    final AtomicInteger init_counter = new AtomicInteger(0);
    final AtomicInteger done_counter = new AtomicInteger(0);

    final Streamable<Tuple2<Long, IBatchRecord>> test_stream = Streamable
            .of(Arrays.asList(_mapper.createObjectNode()))
            .<Tuple2<Long, IBatchRecord>>map(j -> Tuples._2T(0L, new BatchRecordUtils.JsonBatchRecord(j)));

    final IEnrichmentBatchModule delegate = Mockito.mock(IEnrichmentBatchModule.class);
    Mockito.doAnswer(__ -> {//from   w  w  w  .  ja va2 s.co  m
        init_counter.incrementAndGet();
        return null;
    }).when(delegate).onStageInitialize(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(),
            Mockito.any());
    Mockito.doAnswer(in -> {
        @SuppressWarnings("unchecked")
        final Stream<Tuple2<Long, IBatchRecord>> stream = (Stream<Tuple2<Long, IBatchRecord>>) in
                .getArguments()[0];
        stream.forEach(t2 -> emit_counter.incrementAndGet());
        return null;
    }).when(delegate).onObjectBatch(Mockito.any(), Mockito.any(), Mockito.any());
    Mockito.doAnswer(__ -> {
        done_counter.incrementAndGet();
        return null;
    }).when(delegate).onStageComplete(Mockito.anyBoolean());
    Mockito.when(delegate.cloneForNewGrouping()).thenReturn(delegate);
    Mockito.when(delegate.validateModule(Mockito.any(), Mockito.any(), Mockito.any()))
            .thenReturn(Arrays.asList(ErrorUtils.buildErrorMessage("", "", "")));

    final IGraphService throwing_graph_service = Mockito.mock(IGraphService.class);
    Mockito.when(throwing_graph_service.getUnderlyingPlatformDriver(Mockito.any(), Mockito.any()))
            .thenReturn(Optional.of(delegate));
    final MockServiceContext mock_service_context = new MockServiceContext();
    mock_service_context.addService(IGraphService.class, Optional.empty(), throwing_graph_service);
    final IEnrichmentModuleContext enrich_context = Mockito.mock(IEnrichmentModuleContext.class);
    Mockito.when(enrich_context.getServiceContext()).thenReturn(mock_service_context);
    Mockito.when(enrich_context.emitImmutableObject(Mockito.anyLong(), Mockito.any(), Mockito.any(),
            Mockito.any(), Mockito.any())).thenAnswer(invocation -> {
                wrapper_counter.incrementAndGet();
                return null;
            });

    final GraphSchemaBean graph_schema = BeanTemplateUtils.build(GraphSchemaBean.class).done().get();
    final DataBucketBean bucket = BeanTemplateUtils
            .build(DataBucketBean.class).with(DataBucketBean::data_schema, BeanTemplateUtils
                    .build(DataSchemaBean.class).with(DataSchemaBean::graph_schema, graph_schema).done().get())
            .done().get();
    final EnrichmentControlMetadataBean control = BeanTemplateUtils.build(EnrichmentControlMetadataBean.class)
            .done().get();

    final GraphBuilderEnrichmentService under_test = new GraphBuilderEnrichmentService();
    under_test.onStageInitialize(enrich_context, bucket, control, Tuples._2T(null, null), Optional.empty());
    under_test.onObjectBatch(test_stream.stream(), Optional.empty(), Optional.empty());
    under_test.onStageComplete(true);
    assertEquals(delegate, under_test.cloneForNewGrouping());
    assertEquals(1, under_test.validateModule(enrich_context, bucket, control).size());
    assertEquals(1, emit_counter.getAndSet(0));
    assertEquals(1, init_counter.getAndSet(0));
    assertEquals(1, done_counter.getAndSet(0));
    assertEquals(1, wrapper_counter.getAndSet(0));
}

From source file:ome.security.auth.LoginAttemptListener.java

public void onApplicationEvent(LoginAttemptMessage lam) {

    if (lam.success == null) {
        return; // EARLY EXIT.
    }//from w  ww . j  a va2  s  .c  o m

    if (!counts.containsKey(lam.user)) {
        counts.putIfAbsent(lam.user, new AtomicInteger(0));
    }

    AtomicInteger ai = counts.get(lam.user);
    if (lam.success) {
        int previous = ai.getAndSet(0);
        if (previous > 0) {
            log.info(String.format("Resetting failed login count of %s for %s", previous, lam.user));
        }
    } else {
        int value = ai.incrementAndGet();
        if (value > throttleCount) {
            log.warn(
                    String.format("%s failed logins for %s. Throttling for %s", value, lam.user, throttleTime));
            if (throttleTime > 0) {
                try {
                    Thread.sleep(throttleTime); // TODO something nicer
                } catch (InterruptedException e) {
                    log.debug("Interrupt while throttling for " + lam.user);
                }
            }
        }
    }
}

From source file:org.apache.activemq.web.AjaxTest.java

protected CountDownLatch asyncRequest(final HttpClient httpClient, final String url, final StringBuffer buffer,
        final AtomicInteger status) {
    final CountDownLatch latch = new CountDownLatch(1);
    httpClient.newRequest(url).send(new BufferingResponseListener() {
        @Override//w  w  w.  j  av  a  2  s  . c o  m
        public void onComplete(Result result) {
            status.getAndSet(result.getResponse().getStatus());
            buffer.append(getContentAsString());
            latch.countDown();
        }
    });
    return latch;
}

From source file:org.apache.activemq.web.RestTest.java

@Test(timeout = 15 * 1000)
public void testPost() throws Exception {
    int port = getPort();

    HttpClient httpClient = new HttpClient();
    httpClient.start();/*from w w  w .java2 s .  c o m*/

    final CountDownLatch latch = new CountDownLatch(1);
    final StringBuffer buf = new StringBuffer();
    final AtomicInteger status = new AtomicInteger();
    httpClient.newRequest("http://localhost:" + port + "/message/testPost?type=queue").method(HttpMethod.POST)
            .send(new BufferingResponseListener() {
                @Override
                public void onComplete(Result result) {
                    status.getAndSet(result.getResponse().getStatus());
                    buf.append(getContentAsString());
                    latch.countDown();
                }
            });

    latch.await();
    assertTrue("success status", HttpStatus.isSuccess(status.get()));

    final StringBuffer buf2 = new StringBuffer();
    final AtomicInteger status2 = new AtomicInteger();
    final CountDownLatch latch2 = asyncRequest(httpClient,
            "http://localhost:" + port + "/message/testPost?readTimeout=1000&type=Queue", buf2, status2);

    latch2.await();
    assertTrue("success status", HttpStatus.isSuccess(status2.get()));
}