Example usage for org.apache.commons.math.stat.descriptive SummaryStatistics getN

List of usage examples for org.apache.commons.math.stat.descriptive SummaryStatistics getN

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive SummaryStatistics getN.

Prototype

public long getN() 

Source Link

Document

Returns the number of available values

Usage

From source file:com.yahoo.ycsb.measurements.OneMeasurementStatistics.java

/**
 * Calculates the 95% confidence interval. If less than 30 measurements where
 * recorded, 0.0 is returned.//from   w  w  w  .  java2  s.  c  o m
 * 
 * @param summaryStatistics The statistics object holding the recorded values
 * @return The 95% confidence interval or 0 if summaryStatistics.getN() is
 * less than 30 
 */
public static double get95ConfidenceIntervalWidth(SummaryStatistics summaryStatistics) {
    double a = 1.960; // 95% confidence interval width for standard deviation
    return a * summaryStatistics.getStandardDeviation() / Math.sqrt(summaryStatistics.getN());
}

From source file:de.escidoc.core.om.performance.Statistics.java

/**
 * @param key the name of package.class.method
 * @return the Statistics of the method//from  ww w .  j a va  2  s . com
 */
private SummaryStatistics getStatistics(final String key) {
    SummaryStatistics statistics = statisticsMap.get(key);
    if (statistics == null || statistics.getN() >= (long) this.maxValues) {
        statistics = new SummaryStatistics();
        statisticsMap.put(key, statistics);
    }
    return statistics;
}

From source file:de.escidoc.core.om.performance.Statistics.java

/**
 * @return the statistics of all measured methods.
 */// w w w  .  j  av  a  2 s . c o  m
@ManagedAttribute(description = "Get all currently available statistics")
public String getKeys() {
    final StringBuilder b = new StringBuilder();
    for (final String key : this.statisticsMap.keySet()) {
        final SummaryStatistics s = getStatistics(key);
        if (s != null) {
            b.append(key).append(", #:").append(s.getN()).append(", min (ms):").append((long) s.getMin())
                    .append(", max (ms):").append((long) s.getMax()).append(", mean (ms):")
                    .append((long) s.getMean()).append(", stddev (ms):").append((long) s.getStandardDeviation())
                    .append(", total (ms):").append((long) s.getSum()).append('\n');
        }
    }
    System.gc();
    return b.toString();
}

From source file:boa.aggregators.ConfidenceIntervalAggregator.java

/** {@inheritDoc} */
@Override/*from w w w  . ja  va  2 s.c o m*/
public void finish() throws IOException, InterruptedException {
    if (this.isCombining()) {
        String s = "";
        for (final Long key : map.keySet())
            s += key + ":" + map.get(key) + ";";
        this.collect(s, null);
        return;
    }

    try {
        final SummaryStatistics summaryStatistics = new SummaryStatistics();

        for (final Long key : map.keySet())
            for (int i = 0; i < map.get(key); i++)
                summaryStatistics.addValue(key);

        final double a = new TDistributionImpl(summaryStatistics.getN() - 1)
                .inverseCumulativeProbability(1.0 - n / 200.0);

        this.collect(a * summaryStatistics.getStandardDeviation() / Math.sqrt(summaryStatistics.getN()));
    } catch (final MathException e) {
    }
}

From source file:boa.aggregators.StatisticsAggregator.java

/** {@inheritDoc} */
@Override//from   w  w  w . java  2s.  c o  m
public void finish() throws IOException, InterruptedException {
    if (this.isCombining()) {
        String s = "";
        for (final Long key : map.keySet())
            s += key + ":" + map.get(key) + ";";
        this.collect(s, null);
        return;
    }

    float median = 0;

    long medianPos = count / 2L;
    long curPos = 0;
    long prevPos = 0;
    long prevKey = 0;

    for (final Long key : map.keySet()) {
        curPos = prevPos + map.get(key);

        if (prevPos <= medianPos && medianPos < curPos) {
            if (curPos % 2 == 0 && prevPos == medianPos)
                median = (float) (key + prevKey) / 2.0f;
            else
                median = key;
            break;
        }

        prevKey = key;
        prevPos = curPos;
    }

    double s1 = 0;
    double s2 = 0;
    double s3 = 0;
    double s4 = 0;

    final SummaryStatistics summaryStatistics = new SummaryStatistics();

    for (final Long key : map.keySet()) {
        s1 += key * map.get(key);
        s2 += key * key * map.get(key);
        s3 += key * key * key * map.get(key);
        s4 += key * key * key * key * map.get(key);
        for (int i = 0; i < map.get(key); i++)
            summaryStatistics.addValue(key);
    }

    final double mean = s1 / (double) count;
    final double var = s2 / (double) (count - 1) - s1 * s1 / (double) (count * (count - 1));
    final double stdev = Math.sqrt(var);
    final double skewness = (s3 - 3 * s1 * s2 / (double) count + s1 * s1 * s1 * 2 / (count * count))
            / (count * stdev * var);
    final double kurtosis = (s4 - s3 * s1 * 4 / count + s2 * s1 * s1 * 6 / (double) (count * count)
            - s1 * s1 * s1 * s1 * 3 / (double) (count * count * count)) / (count * var * var);

    double ci = 0.0;
    try {
        final TDistributionImpl tDist = new TDistributionImpl(summaryStatistics.getN() - 1);
        final double a = tDist.inverseCumulativeProbability(1.0 - 0.025);
        ci = a * summaryStatistics.getStandardDeviation() / Math.sqrt(summaryStatistics.getN());
    } catch (final MathException e) {
    }

    this.collect(s1 + ", " + mean + ", " + median + ", " + stdev + ", " + var + ", " + kurtosis + ", "
            + skewness + ", " + ci);
}

From source file:com.netflix.curator.framework.recipes.atomic.TestDistributedAtomicLong.java

@Test
public void testSimulation() throws Exception {
    final int threadQty = 20;
    final int executionQty = 50;

    final AtomicInteger optimisticTries = new AtomicInteger();
    final AtomicInteger promotedLockTries = new AtomicInteger();
    final AtomicInteger failures = new AtomicInteger();
    final AtomicInteger errors = new AtomicInteger();

    final SummaryStatistics timingStats = new SynchronizedSummaryStatistics();
    List<Future<Void>> procs = Lists.newArrayList();
    ExecutorService executorService = Executors.newFixedThreadPool(threadQty);
    for (int i = 0; i < threadQty; ++i) {
        Callable<Void> proc = new Callable<Void>() {
            @Override//w  w w  .  j a v  a  2  s . co m
            public Void call() throws Exception {
                doSimulation(executionQty, timingStats, optimisticTries, promotedLockTries, failures, errors);
                return null;
            }
        };
        procs.add(executorService.submit(proc));
    }

    for (Future<Void> f : procs) {
        f.get();
    }

    System.out.println("OptimisticTries: " + optimisticTries.get());
    System.out.println("PromotedLockTries: " + promotedLockTries.get());
    System.out.println("Failures: " + failures.get());
    System.out.println("Errors: " + errors.get());
    System.out.println();

    System.out.println("Avg time: " + timingStats.getMean());
    System.out.println("Max time: " + timingStats.getMax());
    System.out.println("Min time: " + timingStats.getMin());
    System.out.println("Qty: " + timingStats.getN());

    Assert.assertEquals(errors.get(), 0);
    Assert.assertTrue(optimisticTries.get() > 0);
    Assert.assertTrue(promotedLockTries.get() > 0);
}

From source file:org.apache.hadoop.hive.ql.exec.tez.TestHostAffinitySplitLocationProvider.java

private double testHashDistribution(int locs, final int missCount, FileSplit[] splits,
        AtomicInteger errorCount) {
    // This relies heavily on what method determineSplits ... calls and doesn't.
    // We could do a wrapper with only size() and get() methods instead of List, to be sure.
    @SuppressWarnings("unchecked")
    List<String> partLocs = (List<String>) Mockito.mock(List.class);
    Mockito.when(partLocs.size()).thenReturn(locs);
    final AtomicInteger state = new AtomicInteger(0);
    Mockito.when(partLocs.get(Mockito.anyInt())).thenAnswer(new Answer<String>() {
        @Override//ww w . j a v a2  s. co m
        public String answer(InvocationOnMock invocation) throws Throwable {
            return (state.getAndIncrement() == missCount) ? "not-null" : null;
        }
    });
    int[] hitCounts = new int[locs];
    for (int splitIx = 0; splitIx < splits.length; ++splitIx) {
        state.set(0);
        int index = HostAffinitySplitLocationProvider.determineLocation(partLocs,
                splits[splitIx].getPath().toString(), splits[splitIx].getStart(), null);
        ++hitCounts[index];
    }
    SummaryStatistics ss = new SummaryStatistics();
    for (int hitCount : hitCounts) {
        ss.addValue(hitCount);
    }
    // All of this is completely bogus and mostly captures the following function:
    // f(output) = I-eyeballed-the(output) == they-look-ok.
    // It's pretty much a golden file... 
    // The fact that stdev doesn't increase with increasing missCount is captured outside.
    double avg = ss.getSum() / ss.getN(), stdev = ss.getStandardDeviation(), cv = stdev / avg;
    double allowedMin = avg - 2.5 * stdev, allowedMax = avg + 2.5 * stdev;
    if (allowedMin > ss.getMin() || allowedMax < ss.getMax() || cv > 0.22) {
        LOG.info("The distribution for " + locs + " locations, " + missCount + " misses isn't to "
                + "our liking: avg " + avg + ", stdev " + stdev + ", cv " + cv + ", min " + ss.getMin()
                + ", max " + ss.getMax());
        errorCount.incrementAndGet();
    }
    return cv;
}

From source file:org.apache.mahout.freqtermsets.fpgrowth.FPGrowth.java

private double getConfidenceIntervalHalfWidth(SummaryStatistics summaryStatistics, double significance)
        throws MathException {
    TDistributionImpl tDist = new TDistributionImpl(summaryStatistics.getN() - 1);
    double a = tDist.inverseCumulativeProbability(1.0 - significance / 2);
    return a * summaryStatistics.getStandardDeviation() / Math.sqrt(summaryStatistics.getN());
}

From source file:org.apache.sling.engine.impl.RequestProcessorMBeanImplTest.java

/**
 * Asserts that the simple standard deviation algorithm used by the
 * RequestProcessorMBeanImpl is equivalent to the Commons Math
 * SummaryStatistics implementation.//  ww w  .  java  2s .  c o  m
 * 
 * It also tests that resetStatistics method, actually resets all the statistics
 *
 * @throws NotCompliantMBeanException not expected
 */
@Test
public void test_statistics() throws NotCompliantMBeanException {
    final SummaryStatistics durationStats = new SummaryStatistics();
    final SummaryStatistics servletCallCountStats = new SummaryStatistics();
    final SummaryStatistics peakRecursionDepthStats = new SummaryStatistics();
    final RequestProcessorMBeanImpl bean = new RequestProcessorMBeanImpl();

    assertEquals(0l, bean.getRequestsCount());
    assertEquals(Long.MAX_VALUE, bean.getMinRequestDurationMsec());
    assertEquals(0l, bean.getMaxRequestDurationMsec());
    assertEquals(0.0, bean.getMeanRequestDurationMsec(), 0);
    assertEquals(0.0, bean.getStandardDeviationDurationMsec(), 0);

    assertEquals(Integer.MAX_VALUE, bean.getMinServletCallCount());
    assertEquals(0l, bean.getMaxServletCallCount());
    assertEquals(0.0, bean.getMeanServletCallCount(), 0);
    assertEquals(0.0, bean.getStandardDeviationServletCallCount(), 0);

    assertEquals(Integer.MAX_VALUE, bean.getMinPeakRecursionDepth());
    assertEquals(0l, bean.getMaxPeakRecursionDepth());
    assertEquals(0.0, bean.getMeanPeakRecursionDepth(), 0);
    assertEquals(0.0, bean.getStandardDeviationPeakRecursionDepth(), 0);

    final Random random = new Random(System.currentTimeMillis() / 17);
    final int num = 10000;
    final int min = 85;
    final int max = 250;
    for (int i = 0; i < num; i++) {
        final long durationValue = min + random.nextInt(max - min);
        final int callCountValue = min + random.nextInt(max - min);
        final int peakRecursionDepthValue = min + random.nextInt(max - min);
        durationStats.addValue(durationValue);
        servletCallCountStats.addValue(callCountValue);
        peakRecursionDepthStats.addValue(peakRecursionDepthValue);

        final RequestData requestData = context.mock(RequestData.class, "requestData" + i);
        context.checking(new Expectations() {
            {
                one(requestData).getElapsedTimeMsec();
                will(returnValue(durationValue));

                one(requestData).getServletCallCount();
                will(returnValue(callCountValue));

                one(requestData).getPeakRecusionDepth();
                will(returnValue(peakRecursionDepthValue));
            }
        });

        bean.addRequestData(requestData);
    }

    assertEquals("Number of points must be the same", durationStats.getN(), bean.getRequestsCount());

    assertEquals("Min Duration must be equal", (long) durationStats.getMin(), bean.getMinRequestDurationMsec());
    assertEquals("Max Duration must be equal", (long) durationStats.getMax(), bean.getMaxRequestDurationMsec());
    assertAlmostEqual("Mean Duration", durationStats.getMean(), bean.getMeanRequestDurationMsec(), num);
    assertAlmostEqual("Standard Deviation Duration", durationStats.getStandardDeviation(),
            bean.getStandardDeviationDurationMsec(), num);

    assertEquals("Min Servlet Call Count must be equal", (long) servletCallCountStats.getMin(),
            bean.getMinServletCallCount());
    assertEquals("Max Servlet Call Count must be equal", (long) servletCallCountStats.getMax(),
            bean.getMaxServletCallCount());
    assertAlmostEqual("Mean Servlet Call Count", servletCallCountStats.getMean(),
            bean.getMeanServletCallCount(), num);
    assertAlmostEqual("Standard Deviation Servlet Call Count", servletCallCountStats.getStandardDeviation(),
            bean.getStandardDeviationServletCallCount(), num);

    assertEquals("Min Peak Recursion Depth must be equal", (long) peakRecursionDepthStats.getMin(),
            bean.getMinPeakRecursionDepth());
    assertEquals("Max Peak Recursion Depth must be equal", (long) peakRecursionDepthStats.getMax(),
            bean.getMaxPeakRecursionDepth());
    assertAlmostEqual("Mean Peak Recursion Depth", peakRecursionDepthStats.getMean(),
            bean.getMeanPeakRecursionDepth(), num);
    assertAlmostEqual("Standard Deviation Peak Recursion Depth", peakRecursionDepthStats.getStandardDeviation(),
            bean.getStandardDeviationPeakRecursionDepth(), num);

    //check method resetStatistics 
    //In the previous test, some requests have been processed, now we reset the statistics so everything statistic is reinitialized
    bean.resetStatistics();

    //Simulate a single request 
    final long durationValue = min + random.nextInt(max - min);
    final int callCountValue = min + random.nextInt(max - min);
    final int peakRecursionDepthValue = min + random.nextInt(max - min);

    final RequestData requestData = context.mock(RequestData.class, "requestDataAfterReset");
    context.checking(new Expectations() {
        {
            one(requestData).getElapsedTimeMsec();
            will(returnValue(durationValue));

            one(requestData).getServletCallCount();
            will(returnValue(callCountValue));

            one(requestData).getPeakRecusionDepth();
            will(returnValue(peakRecursionDepthValue));
        }
    });

    bean.addRequestData(requestData);

    //As only one request has been simulated since resetStatiscts: min, max and mean statistics should be equals to the request data
    assertEquals("After resetStatistics Number of requests must be one", 1, bean.getRequestsCount());
    assertEquals("After resetStatistics Min Duration must be equal", bean.getMinRequestDurationMsec(),
            (long) durationValue);
    assertEquals("After resetStatistics Max Duration must be equal", bean.getMaxRequestDurationMsec(),
            (long) durationValue);
    assertEquals("After resetStatistics Mean Duration must be equal", bean.getMeanRequestDurationMsec(),
            (double) durationValue, 0d);

    assertEquals("After resetStatistics Min Servlet Call Count must be equal", bean.getMinServletCallCount(),
            callCountValue);
    assertEquals("After resetStatistics Max Servlet Call Count must be equal", bean.getMaxServletCallCount(),
            callCountValue);
    assertEquals("After resetStatistics Mean Servlet Call Count", bean.getMeanServletCallCount(),
            (double) callCountValue, 0d);

    assertEquals("After resetStatistics Min Peak Recursion Depth must be equal",
            bean.getMinPeakRecursionDepth(), peakRecursionDepthValue);
    assertEquals("After resetStatistics Max Peak Recursion Depth must be equal",
            bean.getMinPeakRecursionDepth(), peakRecursionDepthValue);
    assertEquals("After resetStatistics Mean Peak Recursion Depth", bean.getMeanPeakRecursionDepth(),
            (double) peakRecursionDepthValue, 0d);
}

From source file:se.sics.gvod.simulator.vod.VodSimulator.java

private void logStatistics() {
    //        snapshot.generateGraphVizReport();
    SummaryStatistics downloadTimeNotFree = new SummaryStatistics();
    SummaryStatistics downloadTime99NotFree = new SummaryStatistics();
    SummaryStatistics nbBufferingNotFree = new SummaryStatistics();
    SummaryStatistics nbNBufferingNotFree = new SummaryStatistics();
    SummaryStatistics statsWaitingNotFree = new SummaryStatistics();
    SummaryStatistics downloadTimeFree = new SummaryStatistics();
    SummaryStatistics downloadTime99Free = new SummaryStatistics();
    SummaryStatistics nbBufferingFree = new SummaryStatistics();
    SummaryStatistics nbNBufferingFree = new SummaryStatistics();
    SummaryStatistics statsWaitingFree = new SummaryStatistics();
    SummaryStatistics nbWaiting = new SummaryStatistics();
    SummaryStatistics nbMisConnect = new SummaryStatistics();

    Map<Long, SummaryStatistics> downloadNotFree = new HashMap<Long, SummaryStatistics>();
    Map<Long, SummaryStatistics> download99NotFree = new HashMap<Long, SummaryStatistics>();
    Map<Long, ArrayList<Long>> list99NotFree = new HashMap<Long, ArrayList<Long>>();
    Map<Long, SummaryStatistics> totalUploadUse = new HashMap<Long, SummaryStatistics>();
    Map<Long, SummaryStatistics> waitingNotFree = new HashMap<Long, SummaryStatistics>();
    Map<Long, SummaryStatistics> bufferingNotFree = new HashMap<Long, SummaryStatistics>();
    Map<Long, SummaryStatistics> notBufferingNotFree = new HashMap<Long, SummaryStatistics>();
    Map<Long, SummaryStatistics> downloadFree = new HashMap<Long, SummaryStatistics>();
    Map<Long, SummaryStatistics> download99Free = new HashMap<Long, SummaryStatistics>();
    Map<Long, ArrayList<Long>> list99Free = new HashMap<Long, ArrayList<Long>>();
    Map<Long, SummaryStatistics> waitingFree = new HashMap<Long, SummaryStatistics>();
    Map<Long, SummaryStatistics> bufferingFree = new HashMap<Long, SummaryStatistics>();
    Map<Long, SummaryStatistics> notBufferingFree = new HashMap<Long, SummaryStatistics>();

    ArrayList<Long> temp = new ArrayList<Long>();
    long totalDownload = 0;
    long totalUpload = 0;
    for (Integer node : nodeDownloadTimeNotFree.keySet()) {
        downloadTimeNotFree.addValue(nodeDownloadTimeNotFree.get(node));
        temp.add(nodeDownloadTimeNotFree.get(node));
        if (!downloadNotFree.containsKey(downloadLink.get(node).getCapacity())) {
            downloadNotFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            download99NotFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            list99NotFree.put(downloadLink.get(node).getCapacity(), new ArrayList<Long>());
            totalUploadUse.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            waitingNotFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            bufferingNotFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            notBufferingNotFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
        }/* ww  w.  ja  v  a 2s.  com*/
        downloadNotFree.get(downloadLink.get(node).getCapacity()).addValue(nodeDownloadTimeNotFree.get(node));
        list99NotFree.get(downloadLink.get(node).getCapacity()).add(nodeDownloadTimeNotFree.get(node));
        totalUploadUse.get(downloadLink.get(node).getCapacity())
                .addValue(uploadLink.get(node).getBwUsePs() / 1024);
        waitingNotFree.get(downloadLink.get(node).getCapacity()).addValue(nodeWaitingTimeNotFree.get(node));
        bufferingNotFree.get(downloadLink.get(node).getCapacity()).addValue(nodeNbBufferingNotFree.get(node));
        if (nodeNbBufferingNotFree.get(node) == 0) {
            notBufferingNotFree.get(downloadLink.get(node).getCapacity()).addValue(1);
        } else {
            notBufferingNotFree.get(downloadLink.get(node).getCapacity()).addValue(0);
        }
        totalDownload += downloadLink.get(node).getBwUse();
        totalUpload += uploadLink.get(node).getBwUse();
    }

    Collections.sort(temp);
    int i = temp.size() * 5 / 100;
    while (i > 0) {
        temp.remove(temp.size() - 1);
        i--;
    }
    for (Long val : temp) {
        downloadTime99NotFree.addValue(val);
    }

    for (Long key : list99NotFree.keySet()) {
        Collections.sort(list99NotFree.get(key));
        i = list99NotFree.get(key).size() / 100;
        while (i > 0) {
            long toRemove = list99NotFree.get(key).size() - 1;
            list99NotFree.get(key).remove(toRemove);
            i--;
        }
        for (Long val : list99NotFree.get(key)) {
            download99NotFree.get(key).addValue(val);
        }
    }

    if (downloadLink.get(seedId) != null) {
        totalDownload += downloadLink.get(seedId).getBwUse();
        totalUpload += uploadLink.get(seedId).getBwUse();

        if (totalUploadUse.get(downloadLink.get(seedId).getCapacity()) == null) {
            totalUploadUse.put(downloadLink.get(seedId).getCapacity(), new SummaryStatistics());
        }
        totalUploadUse.get(downloadLink.get(seedId).getCapacity())
                .addValue(uploadLink.get(seedId).getBwUsePs() / 1280);
    }
    for (int bufferingNb : nodeNbBufferingNotFree.values()) {
        nbBufferingNotFree.addValue(bufferingNb);
        if (bufferingNb == 0) {
            nbNBufferingNotFree.addValue(1);
        } else {
            nbNBufferingNotFree.addValue(0);
        }
    }
    for (int waitingTime : nodeNbWaiting.values()) {
        nbWaiting.addValue(waitingTime);
    }

    for (int misConnect : nodeNbMisConnect.values()) {
        nbMisConnect.addValue(misConnect);
    }

    for (long waitingTime : nodeWaitingTimeNotFree.values()) {
        statsWaitingNotFree.addValue(waitingTime);
    }
    int messages = 0;
    long traffic = 0;
    for (ReceivedMessage rm : messageHistogram.values()) {
        messages += rm.getTotalCount();
        traffic += rm.getTotalSize();
    }

    temp = new ArrayList<Long>();
    for (Integer node : nodeDownloadTimeFree.keySet()) {
        downloadTimeFree.addValue(nodeDownloadTimeFree.get(node));
        temp.add(nodeDownloadTimeFree.get(node));
        if (!downloadFree.containsKey(downloadLink.get(node).getCapacity())) {
            downloadFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            download99Free.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            list99Free.put(downloadLink.get(node).getCapacity(), new ArrayList<Long>());
            totalUploadUse.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            waitingFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            bufferingFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
            notBufferingFree.put(downloadLink.get(node).getCapacity(), new SummaryStatistics());
        }
        downloadFree.get(downloadLink.get(node).getCapacity()).addValue(nodeDownloadTimeFree.get(node));
        list99Free.get(downloadLink.get(node).getCapacity()).add(nodeDownloadTimeFree.get(node));
        totalUploadUse.get(downloadLink.get(node).getCapacity())
                .addValue(uploadLink.get(node).getBwUsePs() / 1024);
        waitingFree.get(downloadLink.get(node).getCapacity()).addValue(nodeWaitingTimeFree.get(node));
        bufferingFree.get(downloadLink.get(node).getCapacity()).addValue(nodeNbBufferingFree.get(node));
        if (nodeNbBufferingFree.get(node) == 0) {
            notBufferingFree.get(downloadLink.get(node).getCapacity()).addValue(1);
        } else {
            notBufferingFree.get(downloadLink.get(node).getCapacity()).addValue(0);
        }
        totalDownload += downloadLink.get(node).getBwUse();
        totalUpload += uploadLink.get(node).getBwUse();
    }

    Collections.sort(temp);
    i = temp.size() * 5 / 100;
    while (i > 0) {
        temp.remove(temp.size() - 1);
        i--;
    }
    for (Long val : temp) {
        downloadTime99Free.addValue(val);
    }

    for (Long key : list99Free.keySet()) {
        Collections.sort(list99Free.get(key));
        i = list99Free.get(key).size() / 100;
        while (i > 0) {
            long toRemove = list99Free.get(key).size() - 1;
            list99Free.get(key).remove(toRemove);
            i--;
        }
        for (Long val : list99Free.get(key)) {
            download99Free.get(key).addValue(val);
        }
    }

    for (int bufferingNb : nodeNbBufferingFree.values()) {
        nbBufferingFree.addValue(bufferingNb);
        if (bufferingNb == 0) {
            nbNBufferingFree.addValue(1);
        } else {
            nbNBufferingFree.addValue(0);
        }
    }

    for (long waitingTime : nodeWaitingTimeFree.values()) {
        statsWaitingFree.addValue(waitingTime);
    }

    logger.info("=================================================");
    logger.info("Total Upload : {}", totalUpload);
    logger.info("Total Download : {}", totalDownload);
    logger.info("diff : {}", Math.abs(totalUpload - totalDownload));
    logger.info("=================================================");
    logger.info("NOT FREE");
    logger.info("Number of nodes: {}", downloadTimeNotFree.getN());
    logger.info("Min download time:  {} ms ({})", downloadTimeNotFree.getMin(),
            durationToString(Math.round(downloadTimeNotFree.getMin())));
    logger.info("Max download time:  {} ms ({})", downloadTimeNotFree.getMax(),
            durationToString(Math.round(downloadTimeNotFree.getMax())));
    logger.info("Avg download time:  {} ms ({})", downloadTimeNotFree.getMean(),
            durationToString(Math.round(downloadTimeNotFree.getMean())));
    logger.info("Std download time:  {} ms ({})", downloadTimeNotFree.getStandardDeviation(),
            durationToString(Math.round(downloadTimeNotFree.getStandardDeviation())));
    logger.info("=================================================");
    logger.info("FREE");
    logger.info("Number of nodes: {}", downloadTimeFree.getN());
    logger.info("Min download time:  {} ms ({})", downloadTimeFree.getMin(),
            durationToString(Math.round(downloadTimeFree.getMin())));
    logger.info("Max download time:  {} ms ({})", downloadTimeFree.getMax(),
            durationToString(Math.round(downloadTimeFree.getMax())));
    logger.info("Avg download time:  {} ms ({})", downloadTimeFree.getMean(),
            durationToString(Math.round(downloadTimeFree.getMean())));
    logger.info("Std download time:  {} ms ({})", downloadTimeFree.getStandardDeviation(),
            durationToString(Math.round(downloadTimeFree.getStandardDeviation())));

    int nbNodes = nodeDownloadTimeNotFree.size() + nodeDownloadTimeFree.size();
    try {
        FileWriter writer = new FileWriter("gvod" + VodConfig.PERCENTAGE_FREERIDERS // arg
                + "NotFree_superseed.out", true);
        String text = seed + "\t" + nbNodes + "\t" + downloadTimeNotFree.getMean() + "\t"
                + downloadTimeNotFree.getMax() + "\t" + downloadTimeNotFree.getMin() + "\t"
                + downloadTime99NotFree.getMax() + "\t" + statsWaitingNotFree.getMean() + "\t"
                + nbBufferingNotFree.getMean() + "\t" + nbNBufferingNotFree.getMean() + "\n";
        writer.write(text, 0, text.length());
        writer.close();
    } catch (Exception e) {
        e.printStackTrace();
    }

    try {
        FileWriter writer = new FileWriter("gvod" + VodConfig.PERCENTAGE_FREERIDERS + "Free_superseed.out",
                true);
        String text = seed + "\t" + nbNodes + "\t" + downloadTimeFree.getMean() + "\t"
                + downloadTimeFree.getMax() + "\t" + downloadTimeFree.getMin() + "\t"
                + downloadTime99Free.getMax() + "\t" + statsWaitingFree.getMean() + "\t"
                + nbBufferingFree.getMean() + "\t" + nbNBufferingFree.getMean() + "\n";
        writer.write(text, 0, text.length());
        writer.close();
    } catch (Exception e) {
        e.printStackTrace();
    }

    for (long bW : downloadNotFree.keySet()) {
        logger.info("=================================================");

        SummaryStatistics stats = downloadNotFree.get(bW);
        SummaryStatistics stats99 = download99NotFree.get(bW);
        SummaryStatistics statsUpTotal = totalUploadUse.get(bW);
        SummaryStatistics statsDown = downloadUse.get(bW);
        SummaryStatistics statsUp = uploadUse.get(bW);
        SummaryStatistics statsWait = waitingNotFree.get(bW);
        SummaryStatistics statsBuf = bufferingNotFree.get(bW);
        SummaryStatistics statsNBuf = notBufferingNotFree.get(bW);

        try {
            FileWriter writer = new FileWriter(
                    bW + "gvod" + VodConfig.PERCENTAGE_FREERIDERS + "NotFree_superseed.out", true);
            String text = nbNodes + "\t" + stats.getMean() + "\t" + stats.getMax() + "\t" + stats.getMin()
                    + "\t" + stats99.getMax() + "\t" + statsWait.getMean() + "\t" + statsBuf.getMean() + "\t"
                    + statsNBuf.getMean() + "\n";
            writer.write(text, 0, text.length());
            writer.close();
        } catch (Exception e) {
            e.printStackTrace();
        }

        Map<Integer, SummaryStatistics> map = utilitySetSize.get(bW);

        logger.info(
                "BandWith down {}KB/S, nb of nodes {} ("
                        + ((float) stats.getN() / downloadTimeNotFree.getN()) * 100 + "%)",
                bW / 1024, stats.getN());
        logger.info("Number of nodes: {}", stats.getN());
        logger.info("Min download time:  {} ms ({})", stats.getMin(),
                durationToString(Math.round(stats.getMin())));
        logger.info("Max download time:  {} ms ({})", stats.getMax(),
                durationToString(Math.round(stats.getMax())));
        logger.info("Avg download time:  {} ms ({})", stats.getMean(),
                durationToString(Math.round(stats.getMean())));
        logger.info("Std download time:  {} ms ({})", stats.getStandardDeviation(),
                durationToString(Math.round(stats.getStandardDeviation())));
        logger.info("Avg upload Use Total: {} KBytes/s", statsUpTotal.getMean());
        logger.info("Avg upload Use during download: {} KBytes/s", statsUp.getMean());
        logger.info("Max upload Use during download: {} KBytes/s", statsUp.getMax());
        logger.info("Avg download Use Total during downloag: {} KBytes/s", statsDown.getMean());
        logger.info("Min download Use Total during downloag: {} KBytes/s", statsDown.getMin());
        logger.info("-----------------------------------------------");
        logger.info("Avg buffering Time: {} ms ({})", statsWait.getMean(),
                durationToString(Math.round(statsWait.getMean())));
        logger.info("Avg number of buffering : {}", statsBuf.getMean());
    }

    for (long bW : downloadFree.keySet()) {
        logger.info("=================================================");

        SummaryStatistics stats = downloadFree.get(bW);
        SummaryStatistics stats99 = download99Free.get(bW);
        SummaryStatistics statsUpTotal = totalUploadUse.get(bW);
        SummaryStatistics statsDown = downloadUse.get(bW);
        SummaryStatistics statsUp = uploadUse.get(bW);
        SummaryStatistics statsWait = waitingFree.get(bW);
        SummaryStatistics statsBuf = bufferingFree.get(bW);
        SummaryStatistics statsNBuf = notBufferingFree.get(bW);

        try {
            FileWriter writer = new FileWriter(bW + "gvod" //                        + arg + "Free_superseed.out"
                    , true);
            String text = nbNodes
                    //                        + "\t" + gvodConfiguration.getArg()
                    + "\t" + stats.getMean() + "\t" + stats.getMax() + "\t" + stats.getMin() + "\t"
                    + stats99.getMax() + "\t" + statsWait.getMean() + "\t" + statsBuf.getMean() + "\t"
                    + statsNBuf.getMean() + "\n";
            writer.write(text, 0, text.length());
            writer.close();
        } catch (Exception e) {
            e.printStackTrace();
        }

        Map<Integer, SummaryStatistics> map = utilitySetSize.get(bW);

        //            logger.info("BandWith down {}KB/S, nb of nodes {} (" +
        //                    ((float) stats.getN() / downloadTimeNotFree.getN()) * 100 + "%)", bW / 1024, stats.getN());
        //            logger.info("Number of nodes: {}", stats.getN());
        //            logger.info("Min download time:  {} ms ({})", stats.getMin(),
        //                    durationToString(Math.round(stats.getMin())));
        //            logger.info("Max download time:  {} ms ({})", stats.getMax(),
        //                    durationToString(Math.round(stats.getMax())));
        //            logger.info("Avg download time:  {} ms ({})", stats.getMean(),
        //                    durationToString(Math.round(stats.getMean())));
        //            logger.info("Std download time:  {} ms ({})",
        //                    stats.getStandardDeviation(),
        //                    durationToString(Math.round(stats.getStandardDeviation())));
        //            logger.info("Avg upload Use Total: {} KBytes/s", statsUpTotal.getMean());
        //            logger.info("Avg upload Use during download: {} KBytes/s", statsUp.getMean());
        //            logger.info("Max upload Use during download: {} KBytes/s", statsUp.getMax());
        //            logger.info("Avg download Use Total during downloag: {} KBytes/s", statsDown.getMean());
        //            logger.info("Min download Use Total during downloag: {} KBytes/s", statsDown.getMin());
        //            logger.info("-----------------------------------------------");
        //            logger.info("Avg buffering Time: {} ms ({})", statsWait.getMean(),
        //                    durationToString(Math.round(statsWait.getMean())));
        //            logger.info("Avg number of buffering : {}", statsBuf.getMean());
        //            logger.info("do not buffer : {}%", donotbuffer * 100);
        //            for (int size : map.keySet()) {
        //                logger.info("UtilitySet of Size " + size + " during {}ms ({})", map.get(size).getMean(),
        //                        durationToString(Math.round(map.get(size).getMean())));
        //            }
    }

    logger.info("=================================================");
    logger.info("Min nb of buffering: {}", nbBufferingNotFree.getMin());
    logger.info("Max nb of buffering: {}", nbBufferingNotFree.getMax());
    logger.info("Avg nb of buffering:  {}", nbBufferingNotFree.getMean());
    logger.info("percent of nonbuffering:  {}", nbNBufferingNotFree.getMean());
    logger.info("Std nb of buffering:  {}", nbBufferingNotFree.getStandardDeviation());
    logger.info("=================================================");
    logger.info("Min waiting: {} ms ({})", statsWaitingNotFree.getMin(),
            durationToString(Math.round(statsWaitingNotFree.getMin())));
    logger.info("Max waiting: {} ms ({})", statsWaitingNotFree.getMax(),
            durationToString(Math.round(statsWaitingNotFree.getMax())));
    logger.info("Avg waiting:  {} ms ({})", statsWaitingNotFree.getMean(),
            durationToString(Math.round(statsWaitingNotFree.getMean())));
    logger.info("Avg waiting (free):  {} ms ({})", statsWaitingFree.getMean(),
            durationToString(Math.round(statsWaitingFree.getMean())));
    logger.info("Std of waiting:  {} ms ({})", statsWaitingNotFree.getStandardDeviation(),
            durationToString(Math.round(statsWaitingNotFree.getStandardDeviation())));
    logger.info("=================================================");
    logger.info("Min jumpTime : {} ms ({})", jumpForwardStats.getMin(),
            durationToString(Math.round(jumpForwardStats.getMin())));
    logger.info("Max jumpTime : {} ms ({})", jumpForwardStats.getMax(),
            durationToString(Math.round(jumpForwardStats.getMax())));
    logger.info("Avg jumpTime : {} ms ({})", jumpForwardStats.getMean(),
            durationToString(Math.round(jumpForwardStats.getMean())));
    //        logger.info("=================================================");
    //        logger.info("Min nb of waiting: {}", nbWaiting.getMin());
    //        logger.info("Max nb of waiting: {}", nbWaiting.getMax());
    //        logger.info("Avg nb of waiting:  {}", nbWaiting.getMean());
    //        logger.info("Std nb of waiting:  {}",
    //                nbWaiting.getStandardDeviation());
    //        logger.info("=================================================");
    //        logger.info("Min nb of MisConnect: {}", nbMisConnect.getMin());
    //        logger.info("Max nb of MisConnect: {}", nbMisConnect.getMax());
    //        logger.info("Avg nb of MisConnect:  {}", nbMisConnect.getMean());
    //        logger.info("Std nb of MisConnect:  {}",
    //                nbMisConnect.getStandardDeviation());
    //        logger.info("Total nb of MisConnect:  {}",
    //                nbMisConnect.getN());
    logger.info("=================================================");
    logger.info("Total number of messages: {}", messages);
    logger.info("Total amount of traffic:  {} bytes", traffic);
    for (Map.Entry<Class<? extends RewriteableMsg>, ReceivedMessage> entry : messageHistogram.entrySet()) {
        logger.info("{}: #={}  \t bytes={}", new Object[] { String.format("%22s", entry.getKey().getName()),
                entry.getValue().getTotalCount(), entry.getValue().getTotalSize() });
    }
    logger.info("=================================================");

}