Example usage for org.apache.commons.math3.distribution BinomialDistribution inverseCumulativeProbability

List of usage examples for org.apache.commons.math3.distribution BinomialDistribution inverseCumulativeProbability

Introduction

In this page you can find the example usage for org.apache.commons.math3.distribution BinomialDistribution inverseCumulativeProbability.

Prototype

public int inverseCumulativeProbability(final double p) throws OutOfRangeException 

Source Link

Document

The default implementation returns
  • #getSupportLowerBound() for p = 0 ,
  • #getSupportUpperBound() for p = 1 , and
  • #solveInverseCumulativeProbability(double,int,int) for 0 < p < 1 .

Usage

From source file:com.facebook.presto.execution.resourceGroups.TestWeightedFairQueue.java

@Test
public void testMultipleWinners() {
    WeightedFairQueue<String> queue = new WeightedFairQueue<>();
    String item1 = "1";
    String item2 = "2";
    queue.addOrUpdate(item1, new Usage(2, 0));
    queue.addOrUpdate(item2, new Usage(1, 0));

    int count1 = 0;
    int count2 = 0;
    for (int i = 0; i < 1000; i++) {
        if (queue.poll().equals(item1)) {
            queue.addOrUpdate(item1, new Usage(2, 0));
            count1++;/* w  w w  . j  a v  a  2s . c om*/
        } else {
            queue.addOrUpdate(item2, new Usage(1, 0));
            count2++;
        }
    }

    BinomialDistribution binomial = new BinomialDistribution(1000, 2.0 / 3.0);
    int lowerBound = binomial.inverseCumulativeProbability(0.000001);
    int upperBound = binomial.inverseCumulativeProbability(0.999999);

    assertBetweenInclusive(count1, lowerBound, upperBound);
    assertBetweenInclusive((1000 - count2), lowerBound, upperBound);
}

From source file:com.facebook.presto.execution.resourceGroups.TestStochasticPriorityQueue.java

@Test
public void testPollDistribution() {
    StochasticPriorityQueue<String> queue = new StochasticPriorityQueue<>();
    for (int i = 0; i < 100; i++) {
        assertTrue(queue.addOrUpdate("foo" + i, 1));
    }/*from   w ww .  j  a va2 s. c om*/
    for (int i = 0; i < 100; i++) {
        assertTrue(queue.addOrUpdate("bar" + i, 1));
    }
    int foo = 0;
    for (int i = 0; i < 1000; i++) {
        String value = queue.poll();
        if (value.startsWith("foo")) {
            foo++;
        }
        assertTrue(queue.addOrUpdate(value, 1));
    }
    BinomialDistribution binomial = new BinomialDistribution(1000, 0.5);
    int lowerBound = binomial.inverseCumulativeProbability(0.000001);
    int upperBound = binomial.inverseCumulativeProbability(0.999999);
    assertLessThan(foo, upperBound);
    assertGreaterThan(foo, lowerBound);

    // Update foo weights to 2:1 distribution
    for (int i = 0; i < 100; i++) {
        assertFalse(queue.addOrUpdate("foo" + i, 2));
    }
    foo = 0;
    for (int i = 0; i < 1000; i++) {
        String value = queue.poll();
        if (value.startsWith("foo")) {
            foo++;
            assertTrue(queue.addOrUpdate(value, 2));
        } else {
            assertTrue(queue.addOrUpdate(value, 1));
        }
    }
    binomial = new BinomialDistribution(1000, 2.0 / 3.0);
    lowerBound = binomial.inverseCumulativeProbability(0.000001);
    upperBound = binomial.inverseCumulativeProbability(0.999999);
    assertLessThan(foo, upperBound);
    assertGreaterThan(foo, lowerBound);
}

From source file:com.facebook.presto.operator.aggregation.AbstractTestApproximateAggregationFunction.java

private void testCorrectnessOfErrorFunction(List<Number> inputList) throws Exception {
    int inRange = 0;
    int numberOfRuns = 1000;
    double sampleRatio = 1 / (double) WEIGHT;
    double actual = getExpectedValue(inputList);
    Random rand = new Random(1);

    for (int i = 0; i < numberOfRuns; i++) {
        //Compute Sampled Value using sampledList (numberOfRuns times)
        ImmutableList.Builder<Number> sampledList = ImmutableList.builder();
        for (Number x : inputList) {
            if (rand.nextDouble() < sampleRatio) {
                sampledList.add(x);//from   ww  w  . j a  v a 2 s.c o  m
            }
        }

        ImmutableList<Number> list = sampledList.build();
        BlockBuilder builder = getType().createBlockBuilder(new BlockBuilderStatus(), list.size());
        for (Number sample : list) {
            if (getType().equals(BIGINT)) {
                BIGINT.writeLong(builder, sample.longValue());
            } else if (getType().equals(DOUBLE)) {
                DOUBLE.writeDouble(builder, sample.doubleValue());
            } else {
                throw new AssertionError("Can only handle longs and doubles");
            }
        }
        Page page = new Page(builder.build());
        page = OperatorAssertion.appendSampleWeight(ImmutableList.of(page), WEIGHT).get(0);
        Accumulator accumulator = getFunction().bind(ImmutableList.of(0), Optional.empty(),
                Optional.of(page.getChannelCount() - 1), getConfidence()).createAccumulator();

        accumulator.addInput(page);
        Block result = getFinalBlock(accumulator);

        String approxValue = BlockAssertions.toValues(accumulator.getFinalType(), result).get(0).toString();
        double approx = Double.parseDouble(approxValue.split(" ")[0]);
        double error = Double.parseDouble(approxValue.split(" ")[2]);

        //Check if actual answer lies within [approxAnswer - error, approxAnswer + error]
        if (Math.abs(approx - actual) <= error) {
            inRange++;
        }
    }

    BinomialDistribution binomial = new BinomialDistribution(numberOfRuns, getConfidence());
    int lowerBound = binomial.inverseCumulativeProbability(0.01);
    int upperBound = binomial.inverseCumulativeProbability(0.99);
    assertTrue(lowerBound < inRange && inRange < upperBound, String
            .format("%d out of %d passed. Expected [%d, %d]", inRange, numberOfRuns, lowerBound, upperBound));
}

From source file:com.facebook.presto.execution.resourceGroups.TestResourceGroups.java

@Test(timeOut = 10_000)
public void testWeightedScheduling() {
    RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> {
    }, directExecutor());/* w  w  w  .j  a v  a  2 s.c om*/
    root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    root.setMaxQueuedQueries(4);
    // Start with zero capacity, so that nothing starts running until we've added all the queries
    root.setMaxRunningQueries(0);
    root.setSchedulingPolicy(WEIGHTED);
    InternalResourceGroup group1 = root.getOrCreateSubGroup("1");
    group1.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group1.setMaxQueuedQueries(2);
    group1.setMaxRunningQueries(2);
    InternalResourceGroup group2 = root.getOrCreateSubGroup("2");
    group2.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group2.setMaxQueuedQueries(2);
    group2.setMaxRunningQueries(2);
    group2.setSchedulingWeight(2);

    Set<MockQueryExecution> group1Queries = fillGroupTo(group1, ImmutableSet.of(), 2);
    Set<MockQueryExecution> group2Queries = fillGroupTo(group2, ImmutableSet.of(), 2);
    root.setMaxRunningQueries(1);

    int group2Ran = 0;
    for (int i = 0; i < 1000; i++) {
        for (Iterator<MockQueryExecution> iterator = group1Queries.iterator(); iterator.hasNext();) {
            MockQueryExecution query = iterator.next();
            if (query.getState() == RUNNING) {
                query.complete();
                iterator.remove();
            }
        }
        for (Iterator<MockQueryExecution> iterator = group2Queries.iterator(); iterator.hasNext();) {
            MockQueryExecution query = iterator.next();
            if (query.getState() == RUNNING) {
                query.complete();
                iterator.remove();
                group2Ran++;
            }
        }
        root.processQueuedQueries();
        group1Queries = fillGroupTo(group1, group1Queries, 2);
        group2Queries = fillGroupTo(group2, group2Queries, 2);
    }

    // group1 has a weight of 1 and group2 has a weight of 2, so group2 should account for (2 / (1 + 2)) of the queries.
    // since this is stochastic, we check that the result of 1000 trials are 2/3 with 99.9999% confidence
    BinomialDistribution binomial = new BinomialDistribution(1000, 2.0 / 3.0);
    int lowerBound = binomial.inverseCumulativeProbability(0.000001);
    int upperBound = binomial.inverseCumulativeProbability(0.999999);
    assertLessThan(group2Ran, upperBound);
    assertGreaterThan(group2Ran, lowerBound);
}

From source file:io.prestosql.execution.resourceGroups.TestResourceGroups.java

@Test(timeOut = 10_000)
public void testWeightedScheduling() {
    RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> {
    }, directExecutor());/*from   w ww. j a  v a2  s  .c  o  m*/
    root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    root.setMaxQueuedQueries(4);
    // Start with zero capacity, so that nothing starts running until we've added all the queries
    root.setHardConcurrencyLimit(0);
    root.setSchedulingPolicy(WEIGHTED);
    InternalResourceGroup group1 = root.getOrCreateSubGroup("1");
    group1.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group1.setMaxQueuedQueries(2);
    group1.setHardConcurrencyLimit(2);
    group1.setSoftConcurrencyLimit(2);
    InternalResourceGroup group2 = root.getOrCreateSubGroup("2");
    group2.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group2.setMaxQueuedQueries(2);
    group2.setHardConcurrencyLimit(2);
    group2.setSoftConcurrencyLimit(2);
    group2.setSchedulingWeight(2);

    Set<MockQueryExecution> group1Queries = fillGroupTo(group1, ImmutableSet.of(), 2);
    Set<MockQueryExecution> group2Queries = fillGroupTo(group2, ImmutableSet.of(), 2);
    root.setHardConcurrencyLimit(1);

    int group2Ran = 0;
    for (int i = 0; i < 1000; i++) {
        for (Iterator<MockQueryExecution> iterator = group1Queries.iterator(); iterator.hasNext();) {
            MockQueryExecution query = iterator.next();
            if (query.getState() == RUNNING) {
                query.complete();
                iterator.remove();
            }
        }
        group2Ran += completeGroupQueries(group2Queries);
        root.processQueuedQueries();
        group1Queries = fillGroupTo(group1, group1Queries, 2);
        group2Queries = fillGroupTo(group2, group2Queries, 2);
    }

    // group1 has a weight of 1 and group2 has a weight of 2, so group2 should account for (2 / (1 + 2)) of the queries.
    // since this is stochastic, we check that the result of 1000 trials are 2/3 with 99.9999% confidence
    BinomialDistribution binomial = new BinomialDistribution(1000, 2.0 / 3.0);
    int lowerBound = binomial.inverseCumulativeProbability(0.000001);
    int upperBound = binomial.inverseCumulativeProbability(0.999999);
    assertLessThan(group2Ran, upperBound);
    assertGreaterThan(group2Ran, lowerBound);
}

From source file:io.prestosql.execution.resourceGroups.TestResourceGroups.java

@Test(timeOut = 10_000)
public void testWeightedFairSchedulingEqualWeights() {
    RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> {
    }, directExecutor());//from w  ww .j  a va 2  s  .  co  m
    root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    root.setMaxQueuedQueries(50);
    // Start with zero capacity, so that nothing starts running until we've added all the queries
    root.setHardConcurrencyLimit(0);
    root.setSchedulingPolicy(WEIGHTED_FAIR);

    InternalResourceGroup group1 = root.getOrCreateSubGroup("1");
    group1.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group1.setMaxQueuedQueries(50);
    group1.setHardConcurrencyLimit(2);
    group1.setSoftConcurrencyLimit(2);
    group1.setSchedulingWeight(1);

    InternalResourceGroup group2 = root.getOrCreateSubGroup("2");
    group2.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group2.setMaxQueuedQueries(50);
    group2.setHardConcurrencyLimit(2);
    group2.setSoftConcurrencyLimit(2);
    group2.setSchedulingWeight(1);

    InternalResourceGroup group3 = root.getOrCreateSubGroup("3");
    group3.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group3.setMaxQueuedQueries(50);
    group3.setHardConcurrencyLimit(2);
    group3.setSoftConcurrencyLimit(2);
    group3.setSchedulingWeight(2);

    Set<MockQueryExecution> group1Queries = fillGroupTo(group1, ImmutableSet.of(), 4);
    Set<MockQueryExecution> group2Queries = fillGroupTo(group2, ImmutableSet.of(), 4);
    Set<MockQueryExecution> group3Queries = fillGroupTo(group3, ImmutableSet.of(), 4);
    root.setHardConcurrencyLimit(4);

    int group1Ran = 0;
    int group2Ran = 0;
    int group3Ran = 0;
    for (int i = 0; i < 1000; i++) {
        group1Ran += completeGroupQueries(group1Queries);
        group2Ran += completeGroupQueries(group2Queries);
        group3Ran += completeGroupQueries(group3Queries);
        root.processQueuedQueries();
        group1Queries = fillGroupTo(group1, group1Queries, 4);
        group2Queries = fillGroupTo(group2, group2Queries, 4);
        group3Queries = fillGroupTo(group3, group3Queries, 4);
    }

    // group 3 should run approximately 2x the number of queries of 1 and 2
    BinomialDistribution binomial = new BinomialDistribution(4000, 1.0 / 4.0);
    int lowerBound = binomial.inverseCumulativeProbability(0.000001);
    int upperBound = binomial.inverseCumulativeProbability(0.999999);

    assertBetweenInclusive(group1Ran, lowerBound, upperBound);
    assertBetweenInclusive(group2Ran, lowerBound, upperBound);
    assertBetweenInclusive(group3Ran, 2 * lowerBound, 2 * upperBound);
}

From source file:com.facebook.presto.operator.aggregation.TestBootstrappedAggregation.java

@Test
public void testErrorBound() throws Exception {
    int trials = 20;
    BinomialDistribution binomial = new BinomialDistribution(trials, 0.5);

    int successes = 0;
    Random rand = new Random(0);
    for (int i = 0; i < trials; i++) {
        int sum = 1_000;
        PageBuilder builder = new PageBuilder(ImmutableList.of(BIGINT, BIGINT));
        for (int j = 0; j < sum; j++) {
            if (rand.nextDouble() < 0.5) {
                builder.getBlockBuilder(0).appendLong(1);
                builder.getBlockBuilder(1).appendLong(2);
            }//from  w  ww  .  j  a v a  2s  . c  o m
        }

        AggregationFunction function = new DeterministicBootstrappedAggregation(
                createTestingBlockEncodingManager(), LONG_SUM);

        successes += approximateAggregationWithinErrorBound(function, 1, 0.5, (double) sum, builder.build()) ? 1
                : 0;
    }

    // Since we used a confidence of 0.5, successes should have a binomial distribution B(n=20, p=0.5)
    assertTrue(binomial.inverseCumulativeProbability(0.01) < successes
            && successes < binomial.inverseCumulativeProbability(0.99));
}

From source file:org.wso2.extension.siddhi.execution.var.backtest.BacktestDaily.java

private void runStandardCoverageTest() {

    BinomialDistribution dist = new BinomialDistribution(VAR_PER_SAMPLE, 1 - VAR_CI);
    double leftEnd = dist.inverseCumulativeProbability(BACKTEST_CI / 2);
    double rightEnd = dist.inverseCumulativeProbability(1 - (BACKTEST_CI / 2));

    System.out.println("Left End :" + leftEnd);
    System.out.println("Right End :" + rightEnd);

    int numberOfExceptions;
    int successCount = 0;
    for (int j = 0; j < SAMPLE_SIZE; j++) {
        numberOfExceptions = 0;// ww  w  .  j a  va 2s  . c  o m
        for (int i = j * VAR_PER_SAMPLE; i < (j + 1) * VAR_PER_SAMPLE; i++) {
            //System.out.println(actualVarList.get(i) + " " + calculatedVarList.get(i));
            if (actualVarList.get(i) <= calculatedVarList.get(i)) {
                numberOfExceptions++;
            }
        }
        System.out.println("Sample Set : " + (j + 1) + " Exceptions : " + numberOfExceptions);
        if (rightEnd >= numberOfExceptions && leftEnd <= numberOfExceptions) {
            successCount++;
        }
    }
    System.out.println("Success Percentage : " + (((double) successCount) / SAMPLE_SIZE) * 100);
}

From source file:org.wso2.extension.siddhi.execution.var.backtest.BacktestIncrementalTest.java

private void runStandardCoverageTest() {

    BinomialDistribution dist = new BinomialDistribution(VAR_PER_SAMPLE, 1 - VAR_CI);
    double leftEnd = dist.inverseCumulativeProbability(BACKTEST_CI / 2);
    double rightEnd = dist.inverseCumulativeProbability(1 - (BACKTEST_CI / 2));

    System.out.println("Left End :" + leftEnd);
    System.out.println("Right End :" + rightEnd);

    NUMBER_OF_SAMPLES = lossList.size() / VAR_PER_SAMPLE;

    int numberOfExceptions;
    int failCount = 0;
    for (int j = 0; j < NUMBER_OF_SAMPLES; j++) {
        numberOfExceptions = 0;/*from   w ww. j  a va 2  s . com*/
        for (int i = j * VAR_PER_SAMPLE; i < (j + 1) * VAR_PER_SAMPLE; i++) {
            if (lossList.get(i + 1) < 0) {
                if (lossList.get(i + 1) < varList.get(i)) {
                    numberOfExceptions++;
                }
            }
        }
        System.out.println("Sample Set : " + (j + 1) + " Exceptions : " + numberOfExceptions);

        if (numberOfExceptions < leftEnd || rightEnd < numberOfExceptions) {
            failCount++;
        }
    }
    System.out.println(
            "Success Rate : " + (((double) NUMBER_OF_SAMPLES - failCount) / (NUMBER_OF_SAMPLES)) * 100 + " %");
}

From source file:org.wso2.extension.siddhi.execution.var.backtest.BacktestRealTime.java

private void runStandardCoverageTest() {

    BinomialDistribution dist = new BinomialDistribution(VAR_PER_SAMPLE, 1 - VAR_CI);
    double leftEnd = dist.inverseCumulativeProbability(BACKTEST_CI / 2);
    double rightEnd = dist.inverseCumulativeProbability(1 - (BACKTEST_CI / 2));

    System.out.println("Left End :" + leftEnd);
    System.out.println("Right End :" + rightEnd);

    int numberOfExceptions = 0;
    //        int successCount = 0;
    for (int j = 0; j < SAMPLE_SIZE * NUMBER_OF_ASSETS; j++) {
        for (int i = j * VAR_PER_SAMPLE; i < (j + 1) * VAR_PER_SAMPLE; i++) {
            //System.out.println(actualVarList.get(i) + " " + calculatedVarList.get(i));
            if (actualVarList.get(i) <= calculatedVarList.get(i)) {
                numberOfExceptions++;//from w  w  w.j  a  v a  2  s . c  o  m
            }
        }
        System.out.println("Sample Set : " + (j + 1) + " Exceptions : " + numberOfExceptions);

        //            if (rightEnd >= numberOfExceptions && leftEnd <= numberOfExceptions) {
        //                successCount++;
        //            }
    }
    System.out.println("Failure Rate : " + (((double) numberOfExceptions) / (VAR_PER_SAMPLE)) * 100);

}