Example usage for java.util.concurrent.atomic AtomicLong incrementAndGet

List of usage examples for java.util.concurrent.atomic AtomicLong incrementAndGet

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicLong incrementAndGet.

Prototype

public final long incrementAndGet() 

Source Link

Document

Atomically increments the current value, with memory effects as specified by VarHandle#getAndAdd .

Usage

From source file:edu.tum.cs.vis.model.util.algorithm.ACCUM.java

/**
 * Diffuse a vector field at 1 vertex, weighted by a Gaussian of width 1/sqrt(invsigma2) Ported
 * from trimesh2 (2.12)//from www  .  j a va2s . co m
 */
@SuppressWarnings("javadoc")
private static void diffuse_vert_field(final Model m, HashMap<Vertex, Curvature> curvatures,
        Map<Vertex, Long> flags, AtomicLong flag_curr, final ACCUM accum, int v, float invsigma2, Vertex flt) {
    Vertex vert = m.getVertices().get(v);
    if (vert.getNeighbors().size() == 0) {
        // flt.set(0, 0, 0);
        accum.a(m, curvatures, vert, flt, 1.0f, vert);
        return;
    }

    // flt.set(0, 0, 0);
    accum.a(m, curvatures, vert, flt, vert.getPointarea(), vert);
    float sum_w = vert.getPointarea();
    final Vector3f nv = vert.getNormalVector();

    long flag_curr_val = flag_curr.incrementAndGet();
    flags.put(vert, flag_curr_val);
    LinkedList<Vertex> boundary = new LinkedList<Vertex>();
    boundary.addAll(vert.getNeighbors());
    while (boundary.size() > 0) {
        Vertex n = boundary.pop();
        if (flags.get(n) != null && flags.get(n) == flag_curr_val)
            continue;
        flags.put(n, flag_curr_val);
        if (nv.dot(n.getNormalVector()) <= 0.0f)
            continue;
        // Gaussian weight
        float w = wt(n, vert, invsigma2);
        if (w == 0.0f)
            continue;
        // Downweight things pointing in different directions
        w *= nv.dot(n.getNormalVector());
        // Surface area "belonging" to each point
        w *= n.getPointarea();
        // Accumulate weight times field at neighbor
        accum.a(m, curvatures, vert, flt, w, n);
        sum_w += w;
        for (Vertex nn : n.getNeighbors()) {
            if (flags.get(nn) != null && flags.get(nn) == flag_curr_val)
                continue;
            boundary.push(nn);
        }
    }
    flt.scale(1 / sum_w);
}

From source file:de.khiem.offsite.tree.tri.Main.java

void test1() {
    PatriciaTrie<O> t = new PatriciaTrie<>();

    AtomicLong id = new AtomicLong();
    D r = new D(id.incrementAndGet()); //1
    r.children.add(new M(id.incrementAndGet())); //1.2
    r.children.add(new M(id.incrementAndGet())); //1.3
    r.children.add(new F(id.incrementAndGet())); //1.4

    D f = new D(id.incrementAndGet()); //1.5
    f.children.add(new F(id.incrementAndGet())); //1.5.6
    f.children.add(new M(id.incrementAndGet()));//1.5.7
    r.children.add(f);//  ww w  .  ja  va2s  .  c o m

    add(t, r, "");
    FT ft = new FT(r);

    search(t, "1");
    System.out.println("FT: " + ft.findByKey("1"));
    System.out.println("==============");
    search(t, "1.2");
    System.out.println("FT: " + ft.findByKey("1.2"));
    System.out.println("==============");
    search(t, "1.3");
    System.out.println("FT: " + ft.findByKey("1.3"));
    System.out.println("==============");
    search(t, "1.4");
    System.out.println("FT: " + ft.findByKey("1.4"));
    System.out.println("==============");
    search(t, "1.5");
    System.out.println("FT: " + ft.findByKey("1.5"));
    System.out.println("==============");
    search(t, "1.5.6");
    System.out.println("FT: " + ft.findByKey("1.5.6"));
    System.out.println("==============");
    search(t, "1.5.7");
    System.out.println("FT: " + ft.findByKey("1.5.7"));

    System.out.println("==================");

    //System.out.println("from 1, 3 layers:"  + ft.findSubs("1", 3));
    System.out.println("from 1, 1 layers:" + ft.findSubs("1", 1));
    //System.out.println("from 1.5, 1 layers:"  + ft.findSubs("1.5", 1));
    //System.out.println("from 1.5, 2 layers:"  + ft.findSubs("1.5", 2));

    /*
    searchWithId(t, 1l);
    searchWithId(t, 2l);
    searchWithId(t, 3l);
    searchWithId(t, 4l);
    searchWithId(t, 5l);
    searchWithId(t, 6l);
    searchWithId(t, 7l);
    */

}

From source file:jduagui.Controller.java

public static long getSize(String startPath, Map<String, Long> dirs, Map<String, Long> files)
        throws IOException {
    final AtomicLong size = new AtomicLong(0);
    final AtomicLong subdirs = new AtomicLong(0);
    final AtomicLong fs = new AtomicLong(0);
    final File f = new File(startPath);
    final String str = "";
    Path path = Paths.get(startPath);

    Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
        @Override/*from   w w  w .j  a  v a  2s .  c  o  m*/
        public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {
            subdirs.incrementAndGet();
            return FileVisitResult.CONTINUE;
        }

        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            fs.incrementAndGet();
            size.addAndGet(attrs.size());
            return FileVisitResult.CONTINUE;
        }

        @Override
        public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
            fs.incrementAndGet();
            return FileVisitResult.CONTINUE;
        }
    });
    if (subdirs.decrementAndGet() == -1)
        subdirs.incrementAndGet();

    if (f.isDirectory()) {
        dirs.put(startPath, subdirs.get());
        files.put(startPath, fs.get());
    }
    return size.get();
}

From source file:eu.fthevenet.binjr.data.codec.CsvDecoder.java

@Override
public Map<TimeSeriesInfo<T>, TimeSeriesProcessor<T>> decode(InputStream in, List<TimeSeriesInfo<T>> seriesInfo)
        throws IOException, DecodingDataFromAdapterException {
    try (Profiler ignored = Profiler.start("Building time series from csv data", logger::trace)) {
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, encoding))) {
            CSVFormat csvFormat = CSVFormat.DEFAULT.withAllowMissingColumnNames(false).withFirstRecordAsHeader()
                    .withSkipHeaderRecord().withDelimiter(delimiter);
            Iterable<CSVRecord> records = csvFormat.parse(reader);
            Map<TimeSeriesInfo<T>, TimeSeriesProcessor<T>> series = new HashMap<>();
            final AtomicLong nbpoints = new AtomicLong(0);
            for (CSVRecord csvRecord : records) {
                nbpoints.incrementAndGet();
                ZonedDateTime timeStamp = dateParser.apply(csvRecord.get(0));
                for (TimeSeriesInfo<T> info : seriesInfo) {
                    T val = numberParser.apply(csvRecord.get(info.getBinding().getLabel()));
                    XYChart.Data<ZonedDateTime, T> point = new XYChart.Data<>(timeStamp, val);
                    TimeSeriesProcessor<T> l = series.computeIfAbsent(info, k -> timeSeriesFactory.create());
                    l.addSample(point);/*from   w w  w.  j a  v  a 2  s .  c  om*/
                }
            }
            logger.trace(() -> String.format("Built %d series with %d samples each (%d total samples)",
                    seriesInfo.size(), nbpoints.get(), seriesInfo.size() * nbpoints.get()));
            return series;
        }
    }
}

From source file:com.baidu.fsg.uid.utils.NamingThreadFactory.java

/**
 * Get sequence for different naming prefix
 * // w ww .  ja va 2s  .c om
 * @param invoker
 * @return
 */
private long getSequence(String invoker) {
    AtomicLong r = this.sequences.get(invoker);
    if (r == null) {
        r = new AtomicLong(0);
        AtomicLong previous = this.sequences.putIfAbsent(invoker, r);
        if (previous != null) {
            r = previous;
        }
    }

    return r.incrementAndGet();
}

From source file:com.nesscomputing.service.discovery.client.internal.ServiceDiscoveryAnnouncer.java

@Override
void determineGeneration(final AtomicLong generation, final long tick) {
    final long currentAnnouncementGeneration = announcementGeneration.get();

    // Trigger a run through the work loop if the last announcement was before
    // the current generation.
    if (lastAnnouncementGeneration < currentAnnouncementGeneration) {
        generation.incrementAndGet();
        lastAnnouncementGeneration = currentAnnouncementGeneration;
    }/*from w  w w. jav  a2 s .c o  m*/
}

From source file:com.github.jackygurui.vertxredissonrepository.repository.SaveAndSearchAndGetCallInConcurrentTest.java

@Test
public void test2SaveAndSearchAndGetCallIn(TestContext context) throws Exception {
    Async async = context.async();/*from ww w.  ja va  2s . c  om*/
    JsonNode source = JsonLoader.fromResource("/CallIn.json");
    int records = 1000;
    AtomicLong total = new AtomicLong(0);
    ConcurrentHashMap<JsonObject, String> m = new ConcurrentHashMap<>();
    Stream<JsonObject> stream = IntStream.rangeClosed(0, records).mapToObj(e -> {
        JsonObject clone = new JsonObject(Json.encode(source));
        Long number = Long.parseLong(clone.getString("phoneNumber")) + e;
        clone.put("phoneNumber", number + "");
        Long callTime = clone.getLong("callTime") + e;
        clone.put("callTime", callTime);
        return clone;
    });
    StopWatch sw = new StopWatch();
    sw.start();
    stream.parallel().forEach(e -> {
        org.simondean.vertx.async.Async.waterfall().<String>task(t -> {
            callInRepository.create(Json.encode(e), t);
        }).<List<CallIn>>task((id, t) -> {
            m.put(e, id);
            AtomicLong idc = new AtomicLong(0);
            org.simondean.vertx.async.Async.retry().<List<CallIn>>task(tt -> {
                callInRepository.searchIndexByScoreAndGet("callTime", e.getDouble("callTime"),
                        e.getDouble("callTime"), 0, 1, ttt -> {
                            logger.info("id = " + id + " | retry count: " + idc.incrementAndGet());
                            tt.handle(ttt.succeeded() && ttt.result() != null && !ttt.result().isEmpty()
                                    ? Future.succeededFuture(ttt.result())
                                    : Future.failedFuture(ttt.cause()));
                        });
            }).times(100000).run(t);
        }).run(r -> {
            context.assertTrue(r.succeeded());
            if (r.succeeded()) {
                context.assertFalse(r.result().isEmpty());
                context.assertEquals(1, r.result().size());
                CallIn ci = r.result().iterator().next();
                context.assertNotNull(ci);
                logger.info(Json.encode(ci));
                CallIn cii = Json.decodeValue(e.put("id", m.get(e)).encode(), CallIn.class);
                context.assertEquals(Json.encode(cii), Json.encode(ci));
            }
            long t;
            if ((t = total.incrementAndGet()) == records) {
                sw.stop();
                logger.info("time to concurrently save and search and get " + records + " call in records: "
                        + sw.getTime());
                async.complete();
            } else {
                logger.info("t = " + t);
            }
        });
    });

}

From source file:com.jivesoftware.os.routing.bird.deployable.TenantRoutingBirdProviderBuilder.java

public ConnectionDescriptorsProvider build(OAuthSigner signer) {
    HttpClientConfig httpClientConfig = HttpClientConfig.newBuilder().build();
    final HttpClient httpClient = new HttpClientFactoryProvider()
            .createHttpClientFactory(Collections.singletonList(httpClientConfig), false)
            .createClient(signer, routesHost, routesPort);

    AtomicLong activeCount = new AtomicLong();
    final ObjectMapper mapper = new ObjectMapper();
    mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
    ConnectionDescriptorsProvider connectionsProvider = (connectionsRequest, expectedReleaseGroup) -> {
        activeCount.incrementAndGet();
        try {//  ww w.  j  av a  2 s  .  c  o m
            LOG.debug("Requesting connections:{}", connectionsRequest);

            String postEntity;
            try {
                postEntity = mapper.writeValueAsString(connectionsRequest);
            } catch (JsonProcessingException e) {
                LOG.error("Error serializing request parameters object to a string.  Object " + "was "
                        + connectionsRequest + " " + e.getMessage());
                return null;
            }

            HttpResponse response;
            try {
                response = httpClient.postJson(routesPath, postEntity, null);
            } catch (HttpClientException e) {
                LOG.error(
                        "Error posting query request to server.  The entity posted was {} and the endpoint posted to was {}",
                        new Object[] { postEntity, routesPath }, e);
                return null;
            }

            int statusCode = response.getStatusCode();
            if (statusCode >= 200 && statusCode < 300) {
                byte[] responseBody = response.getResponseBody();
                try {
                    ConnectionDescriptorsResponse connectionDescriptorsResponse = mapper.readValue(responseBody,
                            ConnectionDescriptorsResponse.class);
                    if (!connectionsRequest.getRequestUuid()
                            .equals(connectionDescriptorsResponse.getRequestUuid())) {
                        LOG.warn("Request UUIDs are misaligned, request:{} response:{}", connectionsRequest,
                                connectionDescriptorsResponse);
                    }
                    if (connectionDescriptorsResponse.getReturnCode() >= 0 && expectedReleaseGroup != null
                            && !expectedReleaseGroup.equals(connectionDescriptorsResponse.getReleaseGroup())) {
                        String responseEntity = new String(responseBody, StandardCharsets.UTF_8);
                        LOG.warn(
                                "Release group changed, active:{} request:{} requestEntity:{} responseEntity:{} response:{}",
                                activeCount.get(), connectionsRequest, postEntity, responseEntity,
                                connectionDescriptorsResponse);
                    }
                    LOG.debug("Request:{} ConnectionDescriptors:{}", connectionsRequest,
                            connectionDescriptorsResponse);
                    return connectionDescriptorsResponse;
                } catch (IOException x) {
                    LOG.error("Failed to deserialize response:" + new String(responseBody) + " "
                            + x.getMessage());
                    return null;
                }
            }
            return null;
        } finally {
            activeCount.decrementAndGet();
        }
    };
    return connectionsProvider;
}

From source file:org.knowrob.vis.model.util.algorithm.ACCUM.java

/**
 * Diffuses a vector field around one vertex, weighted by a Gaussian of width 
 * {@code 1/sqrt(invsigma2)}. Ported from trimesh2 (2.12) (Szymon Rusinkiewicz Princeton University)
 * //  w  w w .j  a v a 2 s .co m
 * @see <a href="https://github.com/fcole/qrtsc/tree/master/trimesh2">trimesh2</a>
 * 
 * @param m
 *          CAD model analyzed
 * @param curvatures
 *          curvatures HashMap with model vertices as keys and their cuvatures as values
 * @param flags
 *          map of vertex to long value (default initialized with 0f and of the same length with the list of vertices of the model)
 * @param flag_curr
 *          atomic long value
 * @param accum
 *          accumulator
 * @param invsigma
 *          spread of the Gaussian used in weighting
 * @param flt
 *          3D vector field diffused based on the curvature
 */
@SuppressWarnings("javadoc")
private static void diffuse_vert_field(final Model m, HashMap<Vertex, Curvature> curvatures,
        Map<Vertex, Long> flags, AtomicLong flag_curr, final ACCUM accum, int v, float invsigma2, Vertex flt) {
    Vertex vert = m.getVertices().get(v);
    if (vert.getNeighbors().size() == 0) {
        // flt.set(0, 0, 0);
        accum.a(m, curvatures, vert, flt, .5f, vert);
        return;
    }

    // flt.set(0, 0, 0);
    accum.a(m, curvatures, vert, flt, vert.getPointarea(), vert);
    float sum_w = vert.getPointarea();
    final Vector3f nv = vert.getNormalVector();

    long flag_curr_val = flag_curr.incrementAndGet();
    flags.put(vert, flag_curr_val);
    LinkedList<Vertex> boundary = new LinkedList<Vertex>();
    boundary.addAll(vert.getNeighbors());
    while (boundary.size() > 0) {
        Vertex n = boundary.pop();
        if (flags.get(n) != null && flags.get(n) == flag_curr_val)
            continue;
        flags.put(n, flag_curr_val);
        if (nv.dot(n.getNormalVector()) <= 0.0f)
            continue;
        // Gaussian weight
        float w = wt(n, vert, invsigma2);
        if (w == 0.0f)
            continue;
        // Downweight things pointing in different directions
        w *= nv.dot(n.getNormalVector());
        // Surface area "belonging" to each point
        w *= n.getPointarea();
        // Accumulate weight times field at neighbor
        accum.a(m, curvatures, vert, flt, w, n);
        sum_w += w;
        for (Vertex nn : n.getNeighbors()) {
            if (flags.get(nn) != null && flags.get(nn) == flag_curr_val)
                continue;
            boundary.push(nn);
        }
    }
    flt.scale(1 / sum_w);
}

From source file:org.talend.dataprep.cache.file.FileSystemContentCacheJanitor.java

/**
 * A clean up process that starts a minute after the previous ended.
 *//*from www  .  j  a va 2s . c om*/
@Scheduled(fixedDelay = 60000)
public void janitor() {
    if (!Paths.get(location).toFile().exists()) {
        LOGGER.debug("No cache content to clean.");
        return;
    }
    final long start = System.currentTimeMillis();
    final AtomicLong deletedCount = new AtomicLong();
    final AtomicLong totalCount = new AtomicLong();
    LOGGER.debug("Janitor process started @ {}.", start);
    try {
        final BiConsumer<Path, String> deleteOld = (file, suffix) -> {
            try {
                final long time = Long.parseLong(suffix);
                if (time < start) {
                    try {
                        Files.delete(file);
                        deletedCount.incrementAndGet();
                    } catch (NoSuchFileException e) {
                        LOGGER.debug("Ignored delete issue for '{}'.", file.getFileName(), e);
                    } catch (IOException e) {
                        LOGGER.warn("Unable to delete '{}'.", file.getFileName());
                        LOGGER.debug("Unable to delete '{}'.", file.getFileName(), e);
                    }
                }
            } catch (NumberFormatException e) {
                LOGGER.debug("Ignore file '{}'", file);
            }
            totalCount.incrementAndGet();
        };
        Files.walkFileTree(Paths.get(location), new FileSystemVisitor(deleteOld));
    } catch (IOException e) {
        LOGGER.error("Unable to clean up cache", e);
    }
    LOGGER.debug("Janitor process ended @ {} ({}/{} files successfully deleted).", System.currentTimeMillis(),
            deletedCount, totalCount);
}