Example usage for java.util.concurrent.atomic AtomicLong getAndIncrement

List of usage examples for java.util.concurrent.atomic AtomicLong getAndIncrement

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicLong getAndIncrement.

Prototype

public final long getAndIncrement() 

Source Link

Document

Atomically increments the current value, with memory effects as specified by VarHandle#getAndAdd .

Usage

From source file:Main.java

public static void main(String[] argv) {
    AtomicLong nextId = new AtomicLong();

    nextId.compareAndSet(122L, 123L);//from   w ww.  j a  v a2  s .c  o  m
    System.out.println(nextId.getAndIncrement());

}

From source file:Main.java

public static void main(String[] argv) {
    AtomicLong nextId = new AtomicLong();

    nextId.weakCompareAndSet(122L, 123L);
    System.out.println(nextId.getAndIncrement());

}

From source file:Main.java

public static void main(String[] argv) {
    AtomicLong nextId = new AtomicLong();

    System.out.println(nextId.getAndSet(123123L));
    System.out.println(nextId.getAndIncrement());
}

From source file:Main.java

public static void main(String[] argv) {
    AtomicLong nextId = new AtomicLong();

    System.out.println(nextId.getAndAdd(12312L));

    System.out.println(nextId.getAndIncrement());
}

From source file:eu.trentorise.smartcampus.ac.provider.repository.memory.AcDaoMemoryImpl.java

private Long getNextId(Class<?> cls) {
    AtomicLong id;
    synchronized (ids) {
        id = ids.get(cls);/*w w  w .j a v  a2s.co  m*/
        if (id == null) {
            id = new AtomicLong();
            ids.put(cls, id);
        }
    }
    return id.getAndIncrement();
}

From source file:fr.gouv.vitam.mdbes.QueryBench.java

private static String getRequest(JsonNode request, List<TypeField> fields, AtomicLong rank,
        BenchContext bench) {/*from  w w  w.  j  a  v  a  2s  .  c  o m*/
    if (fields != null && !fields.isEmpty()) {
        String finalRequest = request.toString();
        ThreadLocalRandom rnd = ThreadLocalRandom.current();
        for (TypeField field : fields) {
            String val = null;
            switch (field.type) {
            case save:
                finalRequest = getFinalRequest(field, "", bench.savedNames, finalRequest);
                break;
            case liste:
                int rlist = rnd.nextInt(field.listeValeurs.length);
                val = field.listeValeurs[rlist];
                finalRequest = getFinalRequest(field, val, bench.savedNames, finalRequest);
                break;
            case listeorder:
                long i = rank.getAndIncrement();
                if (i >= field.listeValeurs.length) {
                    i = field.listeValeurs.length - 1;
                }
                val = field.listeValeurs[(int) i];
                finalRequest = getFinalRequest(field, val, bench.savedNames, finalRequest);
                break;
            case serie:
                AtomicLong newcpt = rank;
                if (field.idcpt != null) {
                    newcpt = bench.cpts.get(field.idcpt);
                    if (newcpt == null) {
                        newcpt = rank;
                        System.err.println("wrong cpt name: " + field.idcpt);
                    }
                }
                long j = newcpt.getAndIncrement();
                if (field.modulo > 0) {
                    j = j % field.modulo;
                }
                val = (field.prefix != null ? field.prefix : "") + j;
                finalRequest = getFinalRequest(field, val, bench.savedNames, finalRequest);
                break;
            case interval:
                int newval = rnd.nextInt(field.low, field.high + 1);
                finalRequest = getFinalRequest(field, "" + newval, bench.savedNames, finalRequest);
                break;
            default:
                break;
            }
        }
        return finalRequest;
    }
    return null;
}

From source file:com.github.rinde.datgen.pdptw.DatasetGenerator.java

static void submitJob(final AtomicLong currentJobs, final ListeningExecutorService service,
        final ScenarioCreator job, final int numInstances, final Dataset<GeneratedScenario> dataset,
        final Map<GeneratorSettings, IdSeedGenerator> rngMap, final AtomicLong datasetSize) {

    if (service.isShutdown()) {
        return;/*w ww . j av  a 2  s  .  c  o m*/
    }
    currentJobs.getAndIncrement();
    final ListenableFuture<GeneratedScenario> future = service.submit(job);
    Futures.addCallback(future, new FutureCallback<GeneratedScenario>() {
        @Override
        public void onSuccess(@Nullable GeneratedScenario result) {
            LOGGER.info(" - Job finished!");
            currentJobs.decrementAndGet();
            if (result == null) {
                final ScenarioCreator newJob = ScenarioCreator.create(rngMap.get(job.getSettings()).next(),
                        job.getSettings(), job.getGenerator());

                LOGGER.info(" - Job result was NULL, submitting new job");

                submitJob(currentJobs, service, newJob, numInstances, dataset, rngMap, datasetSize);
                return;
            }
            final GeneratedScenario res = verifyNotNull(result);
            if (dataset.get(res.getDynamismBin(), res.getSettings().getUrgency(), res.getSettings().getScale())
                    .size() < numInstances) {

                datasetSize.getAndIncrement();
                LOGGER.info(" - Job Putting dataset...");
                dataset.put(res.getDynamismBin(), res.getSettings().getUrgency(), res.getSettings().getScale(),
                        res);
            } else {
                // TODO check if this job should be respawned by seeing if it uses the
                // correct TSG

                // TODO respawn more tasks if currentJobs < numThreads
                final Collection<Double> dynamismLevels = job.getSettings().getDynamismRangeCenters()
                        .asMapOfRanges().values();

                boolean needMore = false;
                for (final Double d : dynamismLevels) {
                    if (dataset.get(d, res.getSettings().getUrgency(), res.getSettings().getScale())
                            .size() < numInstances) {
                        needMore = true;
                        break;
                    }
                }

                if (needMore) {
                    // respawn job

                    final ScenarioCreator newJob = ScenarioCreator.create(rngMap.get(job.getSettings()).next(),
                            job.getSettings(), job.getGenerator());

                    if (!service.isShutdown()) {
                        submitJob(currentJobs, service, newJob, numInstances, dataset, rngMap, datasetSize);
                    }
                }
            }
        }

        @Override
        public void onFailure(Throwable t) {
            throw new IllegalStateException(t);
        }
    }, MoreExecutors.directExecutor());
}

From source file:com.joyent.manta.benchmark.Benchmark.java

/**
 * Method used to run a multi-threaded benchmark.
 *
 * @param method to measure/*from  w ww  . java  2  s .c o  m*/
 * @param path path to store benchmarking test data
 * @param iterations number of iterations to run
 * @param concurrency number of threads to run
 * @throws IOException thrown when we can't communicate with the server
 */
private static void multithreadedBenchmark(final String method, final String path, final int iterations,
        final int concurrency) throws IOException {
    final AtomicLong fullAggregation = new AtomicLong(0L);
    final AtomicLong serverAggregation = new AtomicLong(0L);
    final AtomicLong count = new AtomicLong(0L);
    final long perThreadCount = perThreadCount(iterations, concurrency);

    System.out.printf("Running %d iterations per thread\n", perThreadCount);

    final long testStart = System.nanoTime();

    Runtime.getRuntime().addShutdownHook(new Thread(Benchmark::cleanUp));

    final Callable<Void> worker = () -> {
        for (int i = 0; i < perThreadCount; i++) {
            Duration[] durations;

            if (method.equals("put")) {
                durations = measurePut(sizeInBytesOrNoOfDirs);
            } else if (method.equals("putDir")) {
                durations = measurePutDir(sizeInBytesOrNoOfDirs);
            } else {
                durations = measureGet(path);
            }

            long fullLatency = durations[0].toMillis();
            long serverLatency = durations[1].toMillis();
            fullAggregation.addAndGet(fullLatency);
            serverAggregation.addAndGet(serverLatency);

            System.out.printf("%s %d full=%dms, server=%dms, thread=%s\n", method, count.getAndIncrement(),
                    fullLatency, serverLatency, Thread.currentThread().getName());
        }

        return null;
    };

    final Thread.UncaughtExceptionHandler handler = (t, e) -> LOG.error("Error when executing benchmark", e);

    final AtomicInteger threadCounter = new AtomicInteger(0);
    ThreadFactory threadFactory = r -> {
        Thread t = new Thread(r);
        t.setDaemon(true);
        t.setUncaughtExceptionHandler(handler);
        t.setName(String.format("benchmark-%d", threadCounter.incrementAndGet()));

        return t;
    };

    ExecutorService executor = Executors.newFixedThreadPool(concurrency, threadFactory);

    List<Callable<Void>> workers = new ArrayList<>(concurrency);
    for (int i = 0; i < concurrency; i++) {
        workers.add(worker);
    }

    try {
        List<Future<Void>> futures = executor.invokeAll(workers);

        boolean completed = false;
        while (!completed) {
            try (Stream<Future<Void>> stream = futures.stream()) {
                completed = stream.allMatch((f) -> f.isDone() || f.isCancelled());

                if (!completed) {
                    Thread.sleep(CHECK_INTERVAL);
                }
            }
        }

    } catch (InterruptedException e) {
        return;
    } finally {
        System.err.println("Shutting down the thread pool");
        executor.shutdown();
    }

    final long testEnd = System.nanoTime();

    final long fullAverage = Math.round(fullAggregation.get() / iterations);
    final long serverAverage = Math.round(serverAggregation.get() / iterations);
    final long totalTime = Duration.ofNanos(testEnd - testStart).toMillis();

    System.out.printf("Average full latency: %d ms\n", fullAverage);
    System.out.printf("Average server latency: %d ms\n", serverAverage);
    System.out.printf("Total test time: %d ms\n", totalTime);
    System.out.printf("Total invocations: %d\n", count.get());
}

From source file:eu.eubrazilcc.lvl.service.rest.TaskResource.java

@Path("progress/{id}")
@GET//from   ww  w.  jav  a2 s  .co  m
@Produces(SseFeature.SERVER_SENT_EVENTS)
public EventOutput getServerSentEvents(final @PathParam("id") String id,
        final @QueryParam("refresh") @DefaultValue("30") int refresh,
        final @QueryParam("token") @DefaultValue("") String token, final @Context HttpServletRequest request,
        final @Context HttpHeaders headers) {
    if (isBlank(id) || !REFRESH_RANGE.contains(refresh)) {
        throw new WebApplicationException("Missing required parameters", Response.Status.BAD_REQUEST);
    }
    OAuth2SecurityManager.login(request, null, isBlank(token) ? headers : ssehHttpHeaders(token), RESOURCE_NAME)
            .requiresPermissions("tasks:*:*:" + id.trim() + ":view");
    // get from task storage
    final CancellableTask<?> task = TASK_STORAGE.get(fromString(id));
    if (task == null) {
        throw new WebApplicationException("Element not found", Response.Status.NOT_FOUND);
    }
    final String client = getClientAddress(request);
    LOGGER.info("Subscribed to progress events: " + client);
    final AtomicLong eventId = new AtomicLong(0l);
    final EventOutput eventOutput = new EventOutput();
    TASK_RUNNER.submit(new Callable<Void>() {
        @Override
        public Void call() throws Exception {
            try {
                do {
                    final ListenableScheduledFuture<?> future = TASK_SCHEDULER.schedule(
                            checkTaskProgress(eventOutput, eventId, task),
                            eventId.getAndIncrement() == 0 ? 0 : refresh, SECONDS);
                    future.get();
                } while (!task.isDone());
            } catch (Exception e) {
                LOGGER.error("Failed to get task status", e);
            } finally {
                try {
                    eventOutput.close();
                } catch (Exception ignored) {
                }
                LOGGER.info("Closing progress events where subscriber is: " + client);
            }
            return null;
        }
    });
    return eventOutput;
}

From source file:io.druid.java.util.common.CompressionUtilsTest.java

@Test(expected = IOException.class)
public void testStreamErrorGzip() throws Exception {
    final File tmpDir = temporaryFolder.newFolder("testGoodGzipByteSource");
    final File gzFile = new File(tmpDir, testFile.getName() + ".gz");
    Assert.assertFalse(gzFile.exists());
    final AtomicLong flushes = new AtomicLong(0L);
    CompressionUtils.gzip(new FileInputStream(testFile), new FileOutputStream(gzFile) {
        @Override//from w w w . j  a va2  s . c  om
        public void flush() throws IOException {
            if (flushes.getAndIncrement() > 0) {
                super.flush();
            } else {
                throw new IOException("Test exception");
            }
        }
    });
}