Example usage for com.google.common.util.concurrent AtomicDouble set

List of usage examples for com.google.common.util.concurrent AtomicDouble set

Introduction

In this page you can find the example usage for com.google.common.util.concurrent AtomicDouble set.

Prototype

public final void set(double newValue) 

Source Link

Document

Sets to the given value.

Usage

From source file:it.stilo.g.algo.Leadership.java

public static ArrayList<DoubleValues> compute(final WeightedGraph g, double sigma, int runner) {
    long time = System.currentTimeMillis();
    double[] hub = new double[g.size];
    double[] auth = new double[g.size];
    double[] oldAuth;

    double iV = 1; //Hubness & Authority scores initialized by dafault to 1.

    for (int i = 0; i < g.size; i++) {
        if (g.in[i] != null || g.out[i] != null) {
            auth[i] = iV;// w  ww. j  a v  a2s. co  m
            hub[i] = iV;
        }
    }

    //Shared accoumulator for normalization phase.
    AtomicDouble SAuth = new AtomicDouble(0.0);
    AtomicDouble SHub = new AtomicDouble(0.0);
    int count = 0;
    do {
        SAuth.set(0.0);
        SHub.set(0.0);
        CountDownLatch authority = new CountDownLatch(runner);
        CountDownLatch hubness = new CountDownLatch(runner);
        CountDownLatch normalizationStep = new CountDownLatch(runner);

        oldAuth = Arrays.copyOf(auth, auth.length);

        Thread[] workers = new Thread[runner];
        for (int i = 0; i < runner; i++) {
            workers[i] = new Thread(
                    new Leadership(g, authority, hubness, normalizationStep, hub, auth, SHub, SAuth, i, runner),
                    "" + i);
            workers[i].start();
        }

        try {
            normalizationStep.await();
        } catch (InterruptedException e) {
            logger.debug(e);
        }
        count++;
    } while (ArraysUtil.L1(ArraysUtil.sub(oldAuth, auth)) > sigma); //Repeat until reach the desiderd precision.

    ArrayList<DoubleValues> leader = new ArrayList<DoubleValues>();

    //Create ranking lists for Hubness & Authority
    for (int i = 0; i < auth.length; i++) {
        if (g.in[i] != null || g.out[i] != null) {
            leader.add(new DoubleValues(i, (auth[i] + hub[i]) / 2.0d));
        }
    }

    Collections.sort(leader);

    logger.trace(((System.currentTimeMillis() - time) / 1000d) + "s\t" + count);
    return leader;
}

From source file:it.stilo.g.algo.HubnessAuthority.java

public static ArrayList<ArrayList<DoubleValues>> compute(final WeightedGraph g, double sigma, int runner) {
    long time = System.currentTimeMillis();
    double[] hub = new double[g.size];
    double[] auth = new double[g.size];
    double[] oldAuth;

    double iV = 1; //Hubness & Authority scores initialized by dafault to 1.

    for (int i = 0; i < g.size; i++) {
        if (g.in[i] != null || g.out[i] != null) {
            auth[i] = iV;// w w  w. j av a2s .  c o m
            hub[i] = iV;
        }
    }

    //Shared accoumulator for normalization phase.
    AtomicDouble SAuth = new AtomicDouble(0.0);
    AtomicDouble SHub = new AtomicDouble(0.0);
    int count = 0;
    do {
        SAuth.set(0.0);
        SHub.set(0.0);
        CountDownLatch authority = new CountDownLatch(runner);
        CountDownLatch hubness = new CountDownLatch(runner);
        CountDownLatch normalizationStep = new CountDownLatch(runner);

        oldAuth = Arrays.copyOf(auth, auth.length);

        Thread[] workers = new Thread[runner];
        for (int i = 0; i < runner; i++) {
            workers[i] = new Thread(new HubnessAuthority(g, authority, hubness, normalizationStep, hub, auth,
                    SHub, SAuth, i, runner), "" + i);
            workers[i].start();
        }

        try {
            normalizationStep.await();
        } catch (InterruptedException e) {
            logger.debug(e);
        }
        count++;
    } while (ArraysUtil.L1(ArraysUtil.sub(oldAuth, auth)) > sigma); //Repeat until reach the desiderd precision.

    ArrayList<DoubleValues> listAuth = new ArrayList<DoubleValues>();
    ArrayList<DoubleValues> listHub = new ArrayList<DoubleValues>();

    //Create ranking lists for Hubness & Authority
    for (int i = 0; i < auth.length; i++) {
        if (g.in[i] != null || g.out[i] != null) {
            listAuth.add(new DoubleValues(i, auth[i]));
            listHub.add(new DoubleValues(i, hub[i]));
        }
    }

    Collections.sort(listAuth);
    Collections.sort(listHub);

    ArrayList<ArrayList<DoubleValues>> list = new ArrayList<ArrayList<DoubleValues>>();
    list.add(new ArrayList(listAuth));
    list.add(new ArrayList(listHub));

    logger.trace(((System.currentTimeMillis() - time) / 1000d) + "s\t" + count);
    return list;
}

From source file:com.netflix.spinnaker.kork.metrics.SpectatorMetricWriter.java

@Override
public void set(Metric<?> value) {
    if (value.getName().startsWith("histogram.")) {
        registry.distributionSummary(value.getName()).record(value.getValue().longValue());
    } else if (value.getName().startsWith("timer.")) {
        registry.timer(value.getName()).record(value.getValue().longValue(), TimeUnit.MILLISECONDS);
    } else {//w  w  w. j  av a  2 s.c  o  m
        final Id id = registry.createId(value.getName());
        final AtomicDouble gauge = getGaugeStorage(id);
        gauge.set(value.getValue().doubleValue());

        registry.gauge(id, gauge);
    }

}

From source file:com.srotya.tau.nucleus.metrics.MetricsSink.java

public void publishFloatMetric(String metricName, double value) {
    AtomicDouble val = floatMetrics.get(metricName);
    if (val == null) {
        val = new AtomicDouble(0);
        floatMetrics.put(metricName, val);
    }/*from   w w  w  . j  a  v  a2 s.c  om*/
    val.set(value);
}

From source file:org.loklak.susi.SusiTransfer.java

/**
 * A conclusion from choices is done by the application of a function on the choice set.
 * This may be done by i.e. counting the number of choices or extracting a maximum element.
 * @param choices the given set of json objects from the data object of a SusiThought
 * @returnan array of json objects which are the extraction of given choices according to the given mapping
 *//*w  w w.j  a  v a 2 s . co  m*/
public JSONArray conclude(JSONArray choices) {
    JSONArray a = new JSONArray();
    if (this.selectionMapping != null && this.selectionMapping.size() == 1) {
        // test if this has an aggregation key: AVG, COUNT, MAX, MIN, SUM
        final String aggregator = this.selectionMapping.keySet().iterator().next();
        final String aggregator_as = this.selectionMapping.get(aggregator);
        if (aggregator.startsWith("COUNT(") && aggregator.endsWith(")")) { // TODO: there should be a special pattern for this to make it more efficient
            return a.put(new JSONObject().put(aggregator_as, choices.length()));
        }
        if (aggregator.startsWith("MAX(") && aggregator.endsWith(")")) {
            final AtomicDouble max = new AtomicDouble(Double.MIN_VALUE);
            String c = aggregator.substring(4, aggregator.length() - 1);
            choices.forEach(json -> max.set(Math.max(max.get(), ((JSONObject) json).getDouble(c))));
            return a.put(new JSONObject().put(aggregator_as, max.get()));
        }
        if (aggregator.startsWith("MIN(") && aggregator.endsWith(")")) {
            final AtomicDouble min = new AtomicDouble(Double.MAX_VALUE);
            String c = aggregator.substring(4, aggregator.length() - 1);
            choices.forEach(json -> min.set(Math.min(min.get(), ((JSONObject) json).getDouble(c))));
            return a.put(new JSONObject().put(aggregator_as, min.get()));
        }
        if (aggregator.startsWith("SUM(") && aggregator.endsWith(")")) {
            final AtomicDouble sum = new AtomicDouble(0.0d);
            String c = aggregator.substring(4, aggregator.length() - 1);
            choices.forEach(json -> sum.addAndGet(((JSONObject) json).getDouble(c)));
            return a.put(new JSONObject().put(aggregator_as, sum.get()));
        }
        if (aggregator.startsWith("AVG(") && aggregator.endsWith(")")) {
            final AtomicDouble sum = new AtomicDouble(0.0d);
            String c = aggregator.substring(4, aggregator.length() - 1);
            choices.forEach(json -> sum.addAndGet(((JSONObject) json).getDouble(c)));
            return a.put(new JSONObject().put(aggregator_as, sum.get() / choices.length()));
        }
    }
    if (this.selectionMapping != null && this.selectionMapping.size() == 2) {
        Iterator<String> ci = this.selectionMapping.keySet().iterator();
        String aggregator = ci.next();
        String column = ci.next();
        if (column.indexOf('(') >= 0) {
            String s = aggregator;
            aggregator = column;
            column = s;
        }
        final String aggregator_as = this.selectionMapping.get(aggregator);
        final String column_as = this.selectionMapping.get(column);
        final String column_final = column;
        if (aggregator.startsWith("PERCENT(") && aggregator.endsWith(")")) {
            final AtomicDouble sum = new AtomicDouble(0.0d);
            String c = aggregator.substring(8, aggregator.length() - 1);
            choices.forEach(json -> sum.addAndGet(((JSONObject) json).getDouble(c)));
            choices.forEach(json -> a.put(
                    new JSONObject().put(aggregator_as, 100.0d * ((JSONObject) json).getDouble(c) / sum.get())
                            .put(column_as, ((JSONObject) json).get(column_final))));
            return a;
        }
    }
    for (Object json : choices) {
        JSONObject extraction = this.extract((JSONObject) json);
        if (extraction.length() > 0)
            a.put(extraction);
    }
    return a;
}

From source file:ai.susi.mind.SusiTransfer.java

/**
 * A conclusion from choices is done by the application of a function on the choice set.
 * This may be done by i.e. counting the number of choices or extracting a maximum element.
 * @param choices the given set of json objects from the data object of a SusiThought
 * @returnan array of json objects which are the extraction of given choices according to the given mapping
 */// w ww.j  a v  a  2s.c om
public JSONArray conclude(JSONArray choices) {
    JSONArray a = new JSONArray();
    if (this.selectionMapping != null && this.selectionMapping.size() == 1) {
        // test if this has an aggregation key: AVG, COUNT, MAX, MIN, SUM
        final String aggregator = this.selectionMapping.keySet().iterator().next();
        final String aggregator_as = this.selectionMapping.get(aggregator);
        if (aggregator.startsWith("COUNT(") && aggregator.endsWith(")")) { // TODO: there should be a special pattern for this to make it more efficient
            return a.put(new JSONObject().put(aggregator_as, choices.length()));
        }
        if (aggregator.startsWith("MAX(") && aggregator.endsWith(")")) {
            final AtomicDouble max = new AtomicDouble(Double.MIN_VALUE);
            String c = aggregator.substring(4, aggregator.length() - 1);
            choices.forEach(json -> max.set(Math.max(max.get(), ((JSONObject) json).getDouble(c))));
            return a.put(new JSONObject().put(aggregator_as, max.get()));
        }
        if (aggregator.startsWith("MIN(") && aggregator.endsWith(")")) {
            final AtomicDouble min = new AtomicDouble(Double.MAX_VALUE);
            String c = aggregator.substring(4, aggregator.length() - 1);
            choices.forEach(json -> min.set(Math.min(min.get(), ((JSONObject) json).getDouble(c))));
            return a.put(new JSONObject().put(aggregator_as, min.get()));
        }
        if (aggregator.startsWith("SUM(") && aggregator.endsWith(")")) {
            final AtomicDouble sum = new AtomicDouble(0.0d);
            String c = aggregator.substring(4, aggregator.length() - 1);
            choices.forEach(json -> sum.addAndGet(((JSONObject) json).getDouble(c)));
            return a.put(new JSONObject().put(aggregator_as, sum.get()));
        }
        if (aggregator.startsWith("AVG(") && aggregator.endsWith(")")) {
            final AtomicDouble sum = new AtomicDouble(0.0d);
            String c = aggregator.substring(4, aggregator.length() - 1);
            choices.forEach(json -> sum.addAndGet(((JSONObject) json).getDouble(c)));
            return a.put(new JSONObject().put(aggregator_as, sum.get() / choices.length()));
        }
    }
    if (this.selectionMapping != null && this.selectionMapping.size() == 2) {
        Iterator<String> ci = this.selectionMapping.keySet().iterator();
        String aggregator = ci.next();
        String column = ci.next();
        if (column.indexOf('(') >= 0) {
            String s = aggregator;
            aggregator = column;
            column = s;
        }
        final String aggregator_as = this.selectionMapping.get(aggregator);
        final String column_as = this.selectionMapping.get(column);
        final String column_final = column;
        if (aggregator.startsWith("PERCENT(") && aggregator.endsWith(")")) {
            final AtomicDouble sum = new AtomicDouble(0.0d);
            String c = aggregator.substring(8, aggregator.length() - 1);
            choices.forEach(json -> sum.addAndGet(((JSONObject) json).getDouble(c)));
            choices.forEach(json -> a.put(
                    new JSONObject().put(aggregator_as, 100.0d * ((JSONObject) json).getDouble(c) / sum.get())
                            .put(column_as, ((JSONObject) json).get(column_final))));
            return a;
        }
    }
    // this.selectionMapping == null -> extract everything
    for (Object json : choices) {
        JSONObject extraction = this.extract((JSONObject) json);
        if (extraction.length() > 0)
            a.put(extraction);
    }
    return a;
}

From source file:com.globocom.grou.report.ts.opentsdb.OpenTSDBClient.java

@SuppressWarnings("unchecked")
@Override/*from   w ww . j  a v  a 2  s . c o m*/
public Map<String, Double> makeReport(Test test) {
    final TreeMap<String, Double> mapOfResult = new TreeMap<>();
    ArrayList<HashMap<String, Object>> metrics = Optional.ofNullable(metrics(test)).orElse(new ArrayList<>());
    metrics.stream().filter(metric -> Objects.nonNull(metric.get("metric"))).forEach(metric -> {
        String key = (String) metric.get("metric");
        String aggr = (String) metric.get("aggr");
        int durationTimeMillis = test.getDurationTimeMillis();
        Map<String, Double> dps = Optional.ofNullable((Map<String, Double>) metric.get("dps"))
                .orElse(Collections.emptyMap());
        final AtomicDouble reduceSum = new AtomicDouble(0.0);
        final AtomicDouble reduceMax = new AtomicDouble(0.0);
        dps.entrySet().stream().mapToDouble(Map.Entry::getValue).forEach(delta -> {
            reduceSum.addAndGet(delta);
            if (reduceMax.get() < delta)
                reduceMax.set(delta);
        });
        double value = reduceSum.get();
        double max = reduceMax.get();
        if (!Double.isNaN(value)) {
            if ("sum".equals(aggr)) {
                int durationTimeSecs = durationTimeMillis / 1000;
                double avg = value / (double) durationTimeSecs;
                mapOfResult.put(key + " (total)", formatValue(value));
                mapOfResult.put(key + " (avg tps)", formatValue(avg));
                mapOfResult.put(key + " (max tps)",
                        formatValue(max / Math.max(1.0, (double) durationTimeSecs / (double) NUM_SAMPLES)));
            } else {
                value = value / (double) dps.size();
                mapOfResult.put(key, formatValue(value));
            }
        }
    });
    if (mapOfResult.isEmpty())
        LOGGER.error("Test {}.{}: makeReport return NULL", test.getProject(), test.getName());
    return mapOfResult;
}

From source file:pt.ua.ri.search.ProximitySearch.java

@Override
public Iterable<Result> search(String query) {

    String nquery = null;//ww w  . ja v  a 2s. co  m
    int dist = 1;

    Matcher m = queryPattern.matcher(query);
    if (m.matches()) {
        nquery = m.group("query");
        try {
            dist = Integer.parseInt(m.group("dist"));
        } catch (NumberFormatException ex) {
            dist = 1;
        }
    }

    if (nquery == null) {
        return super.search(query);
    }

    List<Result> ret = new ArrayList<>();
    Map<String, IndexTuple> tokenInfos = new HashMap<>();
    TObjectFloatHashMap<String> palavrasNLize = new TObjectFloatHashMap<>();
    TIntFloatMap docsnLize = new TIntFloatHashMap();
    tok.setText(query);

    List<String> palavras = new ArrayList<>();
    while (tok.hasNext()) {
        palavras.add(tok.next().getString());
    }

    final AtomicDouble queryLength = new AtomicDouble(0.0);

    for (String palavra : palavras) {
        index.get(palavra).ifPresent(postingList -> {
            tokenInfos.put(palavra, postingList);
            int df = postingList.getDocumentFrequency();
            float idf = (float) (Math.log(index.numberOfDocuments()) - Math.log(df));
            queryLength.addAndGet(idf * idf);
            palavrasNLize.put(palavra, idf);
        });
    }

    queryLength.set(Math.sqrt(queryLength.doubleValue()));
    palavrasNLize.transformValues(new TransformationFunction(queryLength.floatValue()));
    Iterator<String> it = palavras.iterator();

    // for each two words
    if (it.hasNext()) {
        String p_actual;
        String p_next = it.next();
        ProximityIndexTuple t_ac;
        ProximityIndexTuple t_nx = (ProximityIndexTuple) tokenInfos.get(p_next);
        while (it.hasNext()) {
            p_actual = p_next;
            p_next = it.next();
            t_ac = t_nx;
            t_nx = (ProximityIndexTuple) tokenInfos.get(p_next);
            // get all documents from both words
            // see documents in common

            Collection<Integer> isect = CollectionUtils.intersection(t_ac.getDocumentsID(),
                    t_nx.getDocumentsID());

            // for each document get positions of words

            for (int doc_id : isect) {
                Iterable<Integer> lp_ac = t_ac.getDocumentPositions(doc_id);
                Iterable<Integer> lp_nx = t_nx.getDocumentPositions(doc_id);

                Iterator<Integer> it_ac = lp_ac.iterator();
                Iterator<Integer> it_nx = lp_nx.iterator();

                if (!it_ac.hasNext() || !it_nx.hasNext()) {
                    break;
                }

                int pos_ac = it_ac.next(), pos_nx = it_nx.next();
                float score = docsnLize.containsKey(doc_id) ? docsnLize.get(doc_id) : 0;

                score += comparePos(pos_ac, pos_nx, dist, doc_id, palavrasNLize, p_actual, p_next, t_ac, t_nx);

                while (score <= 0.0f && (it_ac.hasNext() || it_nx.hasNext())) {
                    if (pos_ac < pos_nx) {
                        if (it_ac.hasNext()) {
                            pos_ac = it_ac.next();
                        } else {
                            pos_nx = it_nx.next();
                        }
                    } else {
                        if (it_nx.hasNext()) {
                            pos_nx = it_nx.next();
                        } else {
                            pos_ac = it_ac.next();
                        }
                    }

                    score += comparePos(pos_ac, pos_nx, dist, doc_id, palavrasNLize, p_actual, p_next, t_ac,
                            t_nx);
                }
                if (score > 0.0f) {
                    docsnLize.put(doc_id, score);
                }
            }
        }
    }

    docsnLize.forEachEntry((int doc_id, float score) -> ret.add(new SimpleResult(doc_id, score)));
    Collections.sort(ret);
    return ret;
}

From source file:blusunrize.immersiveengineering.api.ApiUtils.java

public static Connection raytraceWires(World world, Vec3d start, Vec3d end, @Nullable Connection ignored) {
    Map<BlockPos, ImmersiveNetHandler.BlockWireInfo> inDim = ImmersiveNetHandler.INSTANCE.blockWireMap
            .lookup(world.provider.getDimension());
    AtomicReference<Connection> ret = new AtomicReference<>();
    AtomicDouble minDistSq = new AtomicDouble(Double.POSITIVE_INFINITY);
    if (inDim != null) {
        Utils.rayTrace(start, end, world, (pos) -> {
            if (inDim.containsKey(pos)) {
                ImmersiveNetHandler.BlockWireInfo info = inDim.get(pos);
                for (int i = 0; i < 2; i++) {
                    Set<Triple<Connection, Vec3d, Vec3d>> conns = i == 0 ? info.in : info.near;
                    for (Triple<Connection, Vec3d, Vec3d> conn : conns) {
                        Connection c = conn.getLeft();
                        if (ignored == null || !c.hasSameConnectors(ignored)) {
                            Vec3d startRelative = start.add(-pos.getX(), -pos.getY(), -pos.getZ());
                            Vec3d across = conn.getRight().subtract(conn.getMiddle());
                            double t = Utils.getCoeffForMinDistance(startRelative, conn.getMiddle(), across);
                            t = MathHelper.clamp(0, t, 1);
                            Vec3d closest = conn.getMiddle().add(t * across.x, t * across.y, t * across.z);
                            double distSq = closest.squareDistanceTo(startRelative);
                            if (distSq < minDistSq.get()) {
                                ret.set(c);
                                minDistSq.set(distSq);
                            }//from   ww w. j a  v a 2 s  .  c  om
                        }
                    }
                }
            }
        });
    }

    return ret.get();
}

From source file:com.facebook.stats.QuantileDigest.java

public synchronized List<Bucket> getHistogram(List<Long> bucketUpperBounds) {
    checkArgument(Ordering.natural().isOrdered(bucketUpperBounds),
            "buckets must be sorted in increasing order");

    final ImmutableList.Builder<Bucket> builder = ImmutableList.builder();
    final PeekingIterator<Long> iterator = Iterators.peekingIterator(bucketUpperBounds.iterator());

    final AtomicDouble sum = new AtomicDouble();
    final AtomicDouble lastSum = new AtomicDouble();

    // for computing weighed average of values in bucket
    final AtomicDouble bucketWeightedSum = new AtomicDouble();

    final double normalizationFactor = weight(TimeUnit.MILLISECONDS.toSeconds(clock.getMillis()));

    postOrderTraversal(root, new Callback() {
        public boolean process(Node node) {

            while (iterator.hasNext() && iterator.peek() <= node.getUpperBound()) {
                double bucketCount = sum.get() - lastSum.get();

                Bucket bucket = new Bucket(bucketCount / normalizationFactor,
                        bucketWeightedSum.get() / bucketCount);

                builder.add(bucket);/*w  ww  . j  a va  2  s .com*/
                lastSum.set(sum.get());
                bucketWeightedSum.set(0);
                iterator.next();
            }

            bucketWeightedSum.addAndGet(node.getMiddle() * node.weightedCount);
            sum.addAndGet(node.weightedCount);
            return iterator.hasNext();
        }
    });

    while (iterator.hasNext()) {
        double bucketCount = sum.get() - lastSum.get();
        Bucket bucket = new Bucket(bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount);

        builder.add(bucket);

        iterator.next();
    }

    return builder.build();
}