Example usage for org.apache.commons.math.util MathUtils factorial

List of usage examples for org.apache.commons.math.util MathUtils factorial

Introduction

In this page you can find the example usage for org.apache.commons.math.util MathUtils factorial.

Prototype

public static long factorial(final int n) 

Source Link

Document

Returns n!.

Usage

From source file:com.opengamma.analytics.financial.riskfactor.TaylorExpansionMultiplierCalculator.java

public static double getMultiplier(final Underlying underlying) {
    Validate.notNull(underlying, "underlying");
    if (underlying instanceof NthOrderUnderlying) {
        final NthOrderUnderlying nthOrder = (NthOrderUnderlying) underlying;
        final int n = nthOrder.getOrder();
        if (n == 0) {
            return 1;
        }//from w  w w  . ja  va  2  s  . com
        return 1. / MathUtils.factorial(n);
    } else if (underlying instanceof MixedOrderUnderlying) {
        final MixedOrderUnderlying mixedOrder = (MixedOrderUnderlying) underlying;
        double result = 1;
        for (final NthOrderUnderlying underlyingOrder : mixedOrder.getUnderlyingOrders()) {
            result *= getMultiplier(underlyingOrder);
        }
        return result;
    }
    throw new IllegalArgumentException(
            "Order was neither NthOrderUnderlying nor MixedOrderUnderlying: have " + underlying.getClass());
}

From source file:cooperativegametheory.solutionfunctions.ShapleyValue.java

public ShapleyValue(CoalitionFunction c) {
    cf = c;/*from  w  ww  .j  a  v  a 2 s .  c o  m*/
    int n = cf.getPlayers().size();
    long nFac = MathUtils.factorial(n);
    payoffs = new HashMap<Integer, Float>();
    Set<Coalition> coalitions = cf.getPlayers().getAllSubsets();
    for (int i : cf.getPlayers()) {
        float payoff = 0;
        for (Coalition coalition : coalitions) {
            payoff += ((((double) MathUtils.factorial(n - coalition.size())
                    * MathUtils.factorial(coalition.size() - 1)) / nFac)
                    * (c.getValue(coalition) - c.getValue(coalition.removeFluidCopy(i))));
        }
        payoffs.put(i, payoff);
    }
}

From source file:com.linkedin.pinot.core.startree.TestOffheapStarTreeBuilder.java

private void testSimpleCore(int numDimensions, int numMetrics, int numSkipMaterializationDimensions)
        throws Exception {
    int ROWS = (int) MathUtils.factorial(numDimensions);
    StarTreeBuilderConfig builderConfig = new StarTreeBuilderConfig();
    Schema schema = new Schema();
    builderConfig.dimensionsSplitOrder = new ArrayList<>();
    builderConfig.setSkipMaterializationForDimensions(new HashSet<String>());
    Set<String> skipMaterializationForDimensions = builderConfig.getSkipMaterializationForDimensions();
    for (int i = 0; i < numDimensions; i++) {
        String dimName = "d" + (i + 1);
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, DataType.STRING, true);
        schema.addField(dimensionFieldSpec);

        if (i < (numDimensions - numSkipMaterializationDimensions)) {
            builderConfig.dimensionsSplitOrder.add(dimName);
        } else {// w w  w .ja va2  s  .c om
            builderConfig.getSkipMaterializationForDimensions().add(dimName);
        }
    }

    schema.setTimeFieldSpec(new TimeFieldSpec("daysSinceEpoch", DataType.INT, TimeUnit.DAYS));
    for (int i = 0; i < numMetrics; i++) {
        String metricName = "m" + (i + 1);
        MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, DataType.INT);
        schema.addField(metricFieldSpec);
    }
    builderConfig.maxLeafRecords = 10;
    builderConfig.schema = schema;
    builderConfig.outDir = new File("/tmp/startree");
    OffHeapStarTreeBuilder builder = new OffHeapStarTreeBuilder();
    builder.init(builderConfig);
    HashMap<String, Object> map = new HashMap<>();
    for (int row = 0; row < ROWS; row++) {
        for (int i = 0; i < numDimensions; i++) {
            String dimName = schema.getDimensionFieldSpecs().get(i).getName();
            map.put(dimName, dimName + "-v" + row % (numDimensions - i));
        }
        //time
        map.put("daysSinceEpoch", 1);
        for (int i = 0; i < numMetrics; i++) {
            String metName = schema.getMetricFieldSpecs().get(i).getName();
            map.put(metName, 1);
        }
        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        builder.append(genericRow);
    }
    builder.build();
    int totalDocs = builder.getTotalRawDocumentCount() + builder.getTotalAggregateDocumentCount();
    Iterator<GenericRow> iterator = builder.iterator(0, totalDocs);
    while (iterator.hasNext()) {
        GenericRow row = iterator.next();
        System.out.println(row);
    }

    iterator = builder.iterator(builder.getTotalRawDocumentCount(), totalDocs);
    while (iterator.hasNext()) {
        GenericRow row = iterator.next();
        for (String skipDimension : skipMaterializationForDimensions) {
            String rowValue = (String) row.getValue(skipDimension);
            assert (rowValue.equals("ALL"));
        }
    }

    FileUtils.deleteDirectory(builderConfig.outDir);
}

From source file:com.linkedin.pinot.core.startree.OffHeapStarTreeBuilderTest.java

private void testSimpleCore(int numDimensions, int numMetrics, int numSkipMaterializationDimensions)
        throws Exception {
    int ROWS = (int) MathUtils.factorial(numDimensions);
    Schema schema = new Schema();
    List<String> dimensionsSplitOrder = new ArrayList<>();
    Set<String> skipMaterializationDimensions = new HashSet<>();
    for (int i = 0; i < numDimensions; i++) {
        String dimName = "d" + (i + 1);
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, DataType.STRING, true);
        schema.addField(dimensionFieldSpec);

        if (i < (numDimensions - numSkipMaterializationDimensions)) {
            dimensionsSplitOrder.add(dimName);
        } else {/*from   w ww .j  a  va2  s  .c o m*/
            skipMaterializationDimensions.add(dimName);
        }
    }

    schema.addField(new TimeFieldSpec("daysSinceEpoch", DataType.INT, TimeUnit.DAYS));
    for (int i = 0; i < numMetrics; i++) {
        String metricName = "m" + (i + 1);
        MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, DataType.INT);
        schema.addField(metricFieldSpec);
    }
    StarTreeBuilderConfig builderConfig = new StarTreeBuilderConfig();
    builderConfig.setOutDir(TEMP_DIR);
    builderConfig.setSchema(schema);
    builderConfig.setDimensionsSplitOrder(dimensionsSplitOrder);
    builderConfig.setSkipMaterializationDimensions(skipMaterializationDimensions);
    builderConfig.setMaxNumLeafRecords(10);

    OffHeapStarTreeBuilder builder = new OffHeapStarTreeBuilder();
    builder.init(builderConfig);
    HashMap<String, Object> map = new HashMap<>();
    for (int row = 0; row < ROWS; row++) {
        for (int i = 0; i < numDimensions; i++) {
            String dimName = schema.getDimensionFieldSpecs().get(i).getName();
            map.put(dimName, dimName + "-v" + row % (numDimensions - i));
        }
        //time
        map.put("daysSinceEpoch", 1);
        for (int i = 0; i < numMetrics; i++) {
            String metName = schema.getMetricFieldSpecs().get(i).getName();
            map.put(metName, 1);
        }
        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        builder.append(genericRow);
    }
    builder.build();
    int totalDocs = builder.getTotalRawDocumentCount() + builder.getTotalAggregateDocumentCount();
    Iterator<GenericRow> iterator = builder.iterator(builder.getTotalRawDocumentCount(), totalDocs);
    while (iterator.hasNext()) {
        GenericRow row = iterator.next();
        for (String skipMaterializationDimension : skipMaterializationDimensions) {
            String rowValue = (String) row.getValue(skipMaterializationDimension);
            Assert.assertEquals(rowValue, "null");
        }
    }

    FileUtils.deleteDirectory(TEMP_DIR);
}

From source file:com.linkedin.pinot.core.startree.StarTreeIndexTestSegmentHelper.java

private static Schema buildSegment(String segmentDirName, String segmentName, HllConfig hllConfig,
        boolean enableOffHeapFormat) throws Exception {
    final int rows = (int) MathUtils.factorial(NUM_DIMENSIONS) * 100;
    Schema schema = new Schema();

    for (int i = 0; i < NUM_DIMENSIONS; i++) {
        String dimName = "d" + (i + 1);
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, FieldSpec.DataType.STRING,
                true);/*from  w w  w .ja va 2 s. co m*/
        schema.addField(dimName, dimensionFieldSpec);
    }

    schema.setTimeFieldSpec(new TimeFieldSpec(TIME_COLUMN_NAME, FieldSpec.DataType.INT, TimeUnit.DAYS));
    for (int i = 0; i < NUM_METRICS; i++) {
        String metricName = "m" + (i + 1);
        MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, FieldSpec.DataType.INT);
        schema.addField(metricName, metricFieldSpec);
    }

    SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
    config.setEnableStarTreeIndex(true);
    config.setOutDir(segmentDirName);
    config.setFormat(FileFormat.AVRO);
    config.setSegmentName(segmentName);
    config.setHllConfig(hllConfig);
    config.setStarTreeIndexSpec(buildStarTreeIndexSpec(enableOffHeapFormat));

    Random random = new Random(RANDOM_SEED);
    final List<GenericRow> data = new ArrayList<>();
    for (int row = 0; row < rows; row++) {
        HashMap<String, Object> map = new HashMap<>();
        // Dim columns.
        for (int i = 0; i < NUM_DIMENSIONS / 2; i++) {
            String dimName = schema.getDimensionFieldSpecs().get(i).getName();
            map.put(dimName, dimName + "-v" + row % (NUM_DIMENSIONS - i));
        }
        // Random values make cardinality of d3, d4 column values larger to better test hll
        for (int i = NUM_DIMENSIONS / 2; i < NUM_DIMENSIONS; i++) {
            String dimName = schema.getDimensionFieldSpecs().get(i).getName();
            map.put(dimName, dimName + "-v" + random.nextInt(i * 100));
        }

        // Metric columns.
        for (int i = 0; i < NUM_METRICS; i++) {
            String metName = schema.getMetricFieldSpecs().get(i).getName();
            map.put(metName, random.nextInt(METRIC_MAX_VALUE));
        }

        // Time column.
        map.put(TIME_COLUMN_NAME, row % 7);

        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        data.add(genericRow);
    }

    SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
    RecordReader reader = createReader(schema, data);
    driver.init(config, reader);
    driver.build();

    LOGGER.info("Built segment {} at {}", segmentName, segmentDirName);
    return schema;
}

From source file:com.linkedin.pinot.core.startree.TestStarTreeIntegrationTest.java

@Test
public void testSimple() throws Exception {
    int numDimensions = 4;
    int numMetrics = 2;
    int ROWS = (int) MathUtils.factorial(numDimensions);
    final Schema schema = new Schema();
    for (int i = 0; i < numDimensions; i++) {
        String dimName = "d" + (i + 1);
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, DataType.STRING, true);
        schema.addField(dimName, dimensionFieldSpec);
    }/*w  w  w  .j a  v  a 2 s.c  o m*/
    schema.setTimeFieldSpec(new TimeFieldSpec("daysSinceEpoch", DataType.INT, TimeUnit.DAYS));
    for (int i = 0; i < numMetrics; i++) {
        String metricName = "m" + (i + 1);
        MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, DataType.INT);
        schema.addField(metricName, metricFieldSpec);
    }

    SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
    config.setEnableStarTreeIndex(true);
    String tempOutputDir = "/tmp/star-tree-index";
    config.setOutDir(tempOutputDir);
    config.setFormat(FileFormat.AVRO);
    config.setSegmentName("testSimple");
    SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
    final List<GenericRow> data = new ArrayList<>();
    for (int row = 0; row < ROWS; row++) {
        HashMap<String, Object> map = new HashMap<>();
        for (int i = 0; i < numDimensions; i++) {
            String dimName = schema.getDimensionFieldSpecs().get(i).getName();
            map.put(dimName, dimName + "-v" + row % (numDimensions - i));
        }
        //time
        map.put("daysSinceEpoch", 1);
        for (int i = 0; i < numMetrics; i++) {
            String metName = schema.getMetricFieldSpecs().get(i).getName();
            map.put(metName, 1);
        }
        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        data.add(genericRow);
    }
    RecordReader reader = createReader(schema, data);
    driver.init(config, reader);
    driver.build();

    ReadMode mode = ReadMode.heap;
    //query to test
    String[] metricNames = new String[] { "m1" };
    String query = "select sum(m1) from T";
    Pql2Compiler compiler = new Pql2Compiler();
    BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query);

    IndexSegment segment = Loaders.IndexSegment.load(new File(tempOutputDir, driver.getSegmentName()), mode);

    FilterPlanNode planNode = new FilterPlanNode(segment, brokerRequest);
    Operator rawOperator = planNode.run();
    BlockDocIdIterator rawDocIdIterator = rawOperator.nextBlock().getBlockDocIdSet().iterator();
    double[] expectedSums = computeSum(segment, rawDocIdIterator, metricNames);
    System.out.println("expectedSums=" + Arrays.toString(expectedSums));
    //dump contents
    Iterator<GenericRow> rowIterator = ((IndexSegmentImpl) segment).iterator(0,
            segment.getSegmentMetadata().getTotalDocs());
    int counter = 0;
    while (rowIterator.hasNext()) {

        GenericRow genericRow = rowIterator.next();
        StringBuilder sb = new StringBuilder().append(counter++).append(": \t");
        for (String dimName : schema.getDimensionNames()) {
            sb.append(dimName).append(":").append(genericRow.getValue(dimName)).append(", ");
        }
        if (schema.getTimeColumnName() != null) {
            sb.append(schema.getTimeColumnName()).append(":")
                    .append(genericRow.getValue(schema.getTimeColumnName())).append(", ");
        }
        for (String metName : schema.getMetricNames()) {
            sb.append(metName).append(":").append(genericRow.getValue(metName)).append(", ");
        }
        System.out.println(sb);
    }

    StarTreeIndexOperator starTreeOperator = new StarTreeIndexOperator(segment, brokerRequest);
    starTreeOperator.open();
    BlockDocIdIterator starTreeDocIdIterator = starTreeOperator.nextBlock().getBlockDocIdSet().iterator();

    double[] actualSums = computeSum(segment, starTreeDocIdIterator, metricNames);
    System.out.println("actualSums=" + Arrays.toString(actualSums));
}

From source file:com.linkedin.pinot.core.startree.BaseStarTreeIndexTest.java

/**
 * Helper method to build the segment./*from w  ww  .j ava2 s  . c o  m*/
 *
 * @param segmentDirName
 * @param segmentName
 * @throws Exception
 */
Schema buildSegment(String segmentDirName, String segmentName) throws Exception {
    int ROWS = (int) MathUtils.factorial(NUM_DIMENSIONS);
    Schema schema = new Schema();

    for (int i = 0; i < NUM_DIMENSIONS; i++) {
        String dimName = "d" + (i + 1);
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, FieldSpec.DataType.STRING,
                true);
        schema.addField(dimName, dimensionFieldSpec);
    }

    schema.setTimeFieldSpec(new TimeFieldSpec(TIME_COLUMN_NAME, FieldSpec.DataType.INT, TimeUnit.DAYS));
    for (int i = 0; i < NUM_METRICS; i++) {
        String metricName = "m" + (i + 1);
        MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, FieldSpec.DataType.INT);
        schema.addField(metricName, metricFieldSpec);
    }

    SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
    config.setEnableStarTreeIndex(true);
    config.setOutDir(segmentDirName);
    config.setFormat(FileFormat.AVRO);
    config.setSegmentName(segmentName);

    final List<GenericRow> data = new ArrayList<>();
    for (int row = 0; row < ROWS; row++) {
        HashMap<String, Object> map = new HashMap<>();
        for (int i = 0; i < NUM_DIMENSIONS; i++) {
            String dimName = schema.getDimensionFieldSpecs().get(i).getName();
            map.put(dimName, dimName + "-v" + row % (NUM_DIMENSIONS - i));
        }

        Random random = new Random(_randomSeed);
        for (int i = 0; i < NUM_METRICS; i++) {
            String metName = schema.getMetricFieldSpecs().get(i).getName();
            map.put(metName, random.nextInt(METRIC_MAX_VALUE));
        }

        // Time column.
        map.put("daysSinceEpoch", row % 7);

        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        data.add(genericRow);
    }

    SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
    RecordReader reader = createReader(schema, data);
    driver.init(config, reader);
    driver.build();

    LOGGER.info("Built segment {} at {}", segmentName, segmentDirName);
    return schema;
}

From source file:com.xiantrimble.combinatorics.CombMathUtilsImpl.java

private long p(int k, DistinctM dm) {
    long result = 0;

    // create a stack for the calculation.
    FastList<PartialCombinationCount> stack = stackFactory.object();

    // add the initial partial combination.
    // /*from   w w  w . j a va  2s .  c  om*/
    stack.addFirst(pccFactory.object().init(k, dm, dm.m, 0, 1, 1));

    while (!stack.isEmpty()) {
        // get the next combination to expand.
        PartialCombinationCount pc = stack.removeFirst();

        //System.out.println(pc);

        // Start the expansion of this partial combination.
        // pc.k = the number of elements that still need to be added to the combination.
        // pc.dm = the next distinct m to consider.
        // pc.dmk = the size of the next combination of elements to add.
        // pc.ldm = the number of distinct unused elements to the left of mdi minus the number of distinct used elements at mdi.
        // pc.size = the number of combinations already in the solution (in k - pc.k)
        // pc.pd = the permutation count denominator.

        // get the current distinct m
        DistinctM cdm = pc.dm;
        //System.out.println(cdm);

        // if there could never be an answer, then bail out.
        if (pc.k > (cdm.count + pc.ldm) * pc.dmk + cdm.rn) {
            //System.out.println("OPTIMIZED DUE TO LACK OF ELEMENTS.");
            pccFactory.recycle(pc);
            continue;
        }

        // for each number of pc.dmk sized sets that we can create, add new partial combinations.
        for (int e = 0; e <= pc.dm.count + pc.ldm && e * pc.dmk <= pc.k; e++) {
            int nextK = pc.k - (e * pc.dmk);
            int nextDmk = pc.dmk - 1;
            long nextSize = pc.size * MathUtils.binomialCoefficient(pc.dm.count + pc.ldm, e);
            long nextPd = pc.pd * MathUtils.pow(MathUtils.factorial(pc.dmk), e);

            //System.out.println("e:"+e+", nextK:"+nextK+", nextDmk:"+nextDmk+", nextDmi:"+nextDmi+", nextSize:"+nextSize);

            // if nextK is zero, then this set of combinations is complete.
            if (nextK == 0) {
                result += (nextSize * (MathUtils.factorial(k) / nextPd));
                continue;
            }

            // if nextDmk is zero, then we have run out of items to place into k.
            else if (nextDmk == 0)
                continue;

            // if we are on the last distinct m, or the next distinct m is not big enough, stay at dmi.
            else if (pc.dm.next == null || pc.dm.next.m < nextDmk) {
                int nextLdm = pc.ldm - e;
                stack.addFirst(pccFactory.object().init(nextK, pc.dm, nextDmk, nextLdm, nextSize, nextPd));
            }

            // we need to advance to the next dmi.
            else {
                int nextLdm = pc.ldm - e + cdm.count;
                stack.addFirst(pccFactory.object().init(nextK, pc.dm.next, nextDmk, nextLdm, nextSize, nextPd));
            }
        }
        pccFactory.recycle(pc);
    }

    stackFactory.recycle(stack);

    //System.out.println("Result: "+result);
    return result;
}

From source file:org.netxilia.functions.MathFunctions.java

public long COMBIN(int count1, int count2) {
    if (count2 > count1) {
        throw new IllegalArgumentException("Second argument should be smaller the the first argument");
    }/*from   w  w w .jav  a2s .  co  m*/
    return MathUtils.factorial(count1) / (MathUtils.factorial(count2) * MathUtils.factorial(count1 - count2));
}

From source file:org.netxilia.functions.MathFunctions.java

public long FACT(int number) {
    return MathUtils.factorial(number);
}