Example usage for org.apache.commons.math3.distribution NormalDistribution NormalDistribution

List of usage examples for org.apache.commons.math3.distribution NormalDistribution NormalDistribution

Introduction

In this page you can find the example usage for org.apache.commons.math3.distribution NormalDistribution NormalDistribution.

Prototype

public NormalDistribution() 

Source Link

Document

Create a normal distribution with mean equal to zero and standard deviation equal to one.

Usage

From source file:org.nmdp.ngs.tools.GenerateReadsTest.java

@Before
public void setUp() throws Exception {
    random = new JDKRandomGenerator();
    length = new NormalDistribution();
    quality = new RealDistributionQualityStrategy(new NormalDistribution());
    coverage = GenerateReads.DEFAULT_COVERAGE;
    mutationRate = 0.0d;//from   w ww.ja  v  a2  s .com
    mutation = GenerateReads.DEFAULT_MUTATION;
}

From source file:org.wso2.carbon.analytics.apim.spark.udf.APIManagerAnalyticsUDF.java

/**
 * This method calculates given percentile using mean and standard deviation
 *
 * @param mean//from   w  w  w  . j a  v a 2s.c  o m
 * @param stdDeviation
 * @param percentile
 * @return
 * @throws APIManagerAnalyticsUDFException
 */
public Double getpercentileValue(Double mean, Double stdDeviation, Double percentile)
        throws APIManagerAnalyticsUDFException {
    if (mean == null || stdDeviation == null || percentile == null) {
        throw new APIManagerAnalyticsUDFException("One or more arguments provided for the method is/are null");
    }

    if (percentile < 0 || percentile > 1) {
        throw new APIManagerAnalyticsUDFException("percentile should in 0 < percentile < 1 range");
    }
    double zValue = new NormalDistribution().inverseCumulativeProbability(percentile);
    return mean + zValue * stdDeviation;
}

From source file:org.wso2.extension.siddhi.execution.reorder.AlphaKSlackExtension.java

@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
        StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
    ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
    lock.lock();//from   w w w  . ja  v  a2  s .c o m
    NormalDistribution actualDistribution = new NormalDistribution();

    double criticalValue = Math.abs(actualDistribution.inverseCumulativeProbability((1 - confidenceLevel) / 2));
    WindowCoverage obj = new WindowCoverage(errorThreshold);
    try {
        while (streamEventChunk.hasNext()) {
            StreamEvent event = streamEventChunk.next();

            if (event.getType() != ComplexEvent.Type.TIMER) {
                streamEventChunk.remove();
                long timestamp = (Long) timestampExecutor.execute(event);
                timestampList.add(timestamp);
                double correlationField = (Double) correlationFieldExecutor.execute(event);
                dataItemList.add(correlationField);
                if (discardFlag) {
                    if (timestamp < lastSentTimestamp) {
                        continue;
                    }
                }

                if (timerFlag) {
                    timerFlag = false;
                    lastScheduledTimestamp = lastScheduledTimestamp + timerDuration;
                    scheduler.notifyAt(lastScheduledTimestamp);
                }

                List<StreamEvent> eventList = primaryTreeMap.get(timestamp);
                if (eventList == null) {
                    eventList = new ArrayList<StreamEvent>();
                    primaryTreeMap.put(timestamp, eventList);
                }
                eventList.add(event);
                counter += 1;
                if (counter > batchSize) {
                    long adjustedBatchsize = Math.round(batchSize * 0.75);
                    alpha = calculateAlpha(obj.calculateWindowCoverageThreshold(criticalValue, dataItemList),
                            obj.calculateRuntimeWindowCoverage(timestampList, adjustedBatchsize));
                    counter = 0;
                    timestampList = new ArrayList<Long>();
                    dataItemList = new ArrayList<Double>();
                }
                if (timestamp > largestTimestamp) {
                    largestTimestamp = timestamp;
                    long minTimestamp = primaryTreeMap.firstKey();
                    long timeDifference = largestTimestamp - minTimestamp;
                    if (timeDifference > k) {
                        if (timeDifference < maxK) {
                            k = Math.round(timeDifference * alpha);
                        } else {
                            k = maxK;
                        }
                    }

                    Iterator<Map.Entry<Long, List<StreamEvent>>> entryIterator = primaryTreeMap.entrySet()
                            .iterator();
                    while (entryIterator.hasNext()) {
                        Map.Entry<Long, List<StreamEvent>> entry = entryIterator.next();
                        List<StreamEvent> list = secondaryTreeMap.get(entry.getKey());
                        if (list != null) {
                            list.addAll(entry.getValue());
                        } else {
                            secondaryTreeMap.put(entry.getKey(), new ArrayList<StreamEvent>(entry.getValue()));
                        }
                    }
                    primaryTreeMap = new TreeMap<Long, List<StreamEvent>>();
                    entryIterator = secondaryTreeMap.entrySet().iterator();
                    while (entryIterator.hasNext()) {
                        Map.Entry<Long, List<StreamEvent>> entry = entryIterator.next();
                        if (entry.getKey() + k <= largestTimestamp) {
                            entryIterator.remove();
                            List<StreamEvent> timeEventList = entry.getValue();
                            lastSentTimestamp = entry.getKey();

                            for (StreamEvent aTimeEventList : timeEventList) {
                                complexEventChunk.add(aTimeEventList);
                            }
                        }
                    }
                }
            } else {
                if (secondaryTreeMap.size() > 0) {
                    for (Map.Entry<Long, List<StreamEvent>> longListEntry : secondaryTreeMap.entrySet()) {
                        List<StreamEvent> timeEventList = longListEntry.getValue();

                        for (StreamEvent aTimeEventList : timeEventList) {
                            complexEventChunk.add(aTimeEventList);
                        }
                    }

                    secondaryTreeMap = new TreeMap<Long, List<StreamEvent>>();

                }

                if (primaryTreeMap.size() > 0) {
                    for (Map.Entry<Long, List<StreamEvent>> longListEntry : primaryTreeMap.entrySet()) {
                        List<StreamEvent> timeEventList = longListEntry.getValue();

                        for (StreamEvent aTimeEventList : timeEventList) {
                            complexEventChunk.add(aTimeEventList);
                        }
                    }

                    primaryTreeMap = new TreeMap<Long, List<StreamEvent>>();
                }

                timerFlag = true;
            }
        }
    } catch (ArrayIndexOutOfBoundsException ec) {
        //This happens due to user specifying an invalid field index.
        throw new ExecutionPlanCreationException("The very first parameter must be an "
                + "Integer with a valid " + " field index (0 to (fieldsLength-1)).");
    }
    lock.unlock();
    nextProcessor.process(complexEventChunk);
}

From source file:org.wso2.extension.siddhi.execution.var.models.montecarlo.MonteCarloStandardSimulation.java

/**
 * get instance of normal distribution/*from  www  . j  ava2s .c o  m*/
 *
 * @return
 */
public NormalDistribution getDistribution() {
    if (!(distribution instanceof NormalDistribution)) {
        this.distribution = new NormalDistribution();
    }
    return this.distribution;
}

From source file:outlineDescriptor.CellArray.java

/**
 * @param ip              corresponding ImageProcessor
 * @param columnDimension Array resolution
 * @param majorAxes       voting kernel parameters
 * @param minorAxes       voting kernel parameters
 * @param ev1             main eigenvalue
 * @param ev2             secondary eigenvalue
 *//*from  w w w .  ja v a 2  s.co  m*/
public CellArray(ImageProcessor ip, int columnDimension, int majorAxes, int minorAxes, double ev1, double ev2) {

    this.directions = OutlineDescriptor_.getDirections();
    this.kernelGenerator = new EllipticalKernelGenerator(majorAxes, minorAxes, ev1, ev2, directions);
    this.imgData = ip.getIntArray();
    this.columnDimension = columnDimension;
    this.array = splitToCells(ip);
    this.distribution = new NormalDistribution();

}

From source file:pyromaniac.Algorithm.MultinomialOneSidedTest.java

public void runTest() throws Exception {
    //not significant if no observations below and no observations above
    if (observationsBelowMode == 0 && observationsAboveMode == 0) {
        this.p = 1;
    } //significant if the number of observations above mode is equal to that at the mode. 
    /*         else if(N <= 5 && observationsAtMode != N)
    {/*from   ww w .j  ava 2 s . co m*/
       this.significant = true;
       this.p = 0;
    }*/
    else if (alpha > 0
            && (observationsAboveMode == observationsAtMode || observationsBelowMode == observationsAtMode)) {
        this.significantAbove = true;
        this.significantBelow = true;
        //this.significantCombined = true;
        this.p = 0;
    } else {

        int indexBelowMode = 1;
        int indexAboveMode = 2;

        double[] X = new double[] { this.observationsAtMode, this.observationsBelowMode,
                this.observationsAboveMode };
        double[] xDivN = new double[] { (double) this.observationsAtMode / (double) this.N,
                (double) this.observationsBelowMode / (double) this.N,
                (double) this.observationsAboveMode / (double) this.N };
        HashSet<Integer> gamma = new HashSet<Integer>();

        if (verbose) {
            for (int i = 0; i < X.length; i++) {
                logger.writeLog("X[" + i + "] = " + X[i], AcaciaLogger.LOG_DEBUG);
                logger.writeLog("XDivN[" + i + "] = " + xDivN[i], AcaciaLogger.LOG_DEBUG);
                logger.writeLog("P[" + i + "] = " + P[i], AcaciaLogger.LOG_DEBUG);
            }
        }

        for (int i = 1; i < xDivN.length; i++) {
            if (xDivN[i] >= P[i]) {
                gamma.add(i);
            }
        }

        double sumX = 0;
        double sumP = 0;

        for (int index : gamma) {
            sumX += X[index];
            sumP += P[index];
        }

        while (gamma.size() < 2) {
            boolean added = false;

            for (int i = 1; i < xDivN.length; i++) {
                double adjP = X[i] * (1 - sumP) / (this.N - sumX);

                if (adjP > P[i] & !gamma.contains(i)) {
                    gamma.add(i);
                    added = true;
                }
            }

            if (!added) {
                break;
            }

            sumP = 0;
            sumX = 0;

            for (int index : gamma) {
                sumX += X[index];
                sumP += P[index];
            }
        }

        NormalDistribution norm = new NormalDistribution();

        double w2Num = Math.pow(((N - sumX) - N * (1 - sumP)), 2);
        double w2Denom = (N * (1 - sumP));
        double resSum = 0;
        for (int index : gamma) {
            resSum += Math.pow(X[index] - N * P[index], 2) / (N * P[index]);
        }

        double w2 = (w2Num / w2Denom) + resSum;
        double m = Math
                .sqrt(P[indexBelowMode] * P[indexAboveMode] / (1 - P[indexBelowMode] - P[indexAboveMode]));
        double calcP = (0.25 + (Math.atan(m) / (2 * Math.PI))) * Math.exp(-w2 / 2)
                + (1 - norm.cumulativeProbability(Math.sqrt(w2)));

        this.p = calcP;

        if (this.alpha > 0) {
            boolean sig = (this.p <= this.alpha);
            this.significantAbove = sig;
            this.significantBelow = sig;
            //   this.significantCombined = sig;
        } else //alpha == zero
        {
        }
    }
}

From source file:ro.hasna.ts.math.distribution.NormalDistributionDivider.java

@Override
public double[] getBreakpoints(int areas) {
    if (areas < 2) {
        throw new NumberIsTooSmallException(areas, 2, true);
    }/*from  ww w. ja  v  a2 s. c om*/

    NormalDistribution normalDistribution = new NormalDistribution();
    int len = areas - 1;
    double[] result = new double[len];
    double searchArea = 1.0 / areas;
    for (int i = 0; i < len; i++) {
        result[i] = normalDistribution.inverseCumulativeProbability(searchArea * (i + 1));
    }

    return result;
}

From source file:roemetz.core.CalcGenRoeMetz.java

/**
 * Numerically integrates a one dimensional gaussian pdf times two normal
 * cdfs//ww  w  .jav a  2 s .  co  m
 * 
 * @param u Contains experiment means
 * @param scale Contains one 1-D gaussian pdf and two 2-D normal cdfs
 * @param numSamples Number of samples for numerical integration
 * @return Integrated product moment
 */
public static double prodMoment1(double[] u, double[] scale, int numSamples) {
    NormalDistribution gauss = new NormalDistribution();

    double scale1 = scale[0];
    double scale20 = scale[1];
    double scale21 = scale[2];

    double lx = 10 * Math.sqrt(scale1);
    double dx = lx / (double) numSamples;
    double[] x = new double[numSamples];
    for (int i = 0; i < numSamples; i++) {
        x[i] = ((double) i * dx) - (0.5 * lx);
    }

    double f[] = new double[numSamples];
    for (int i = 0; i < numSamples; i++) {
        f[i] = Math.exp((-(x[i] * x[i])) / 2.0 / scale1);
    }

    for (int i = 0; i < numSamples; i++) {
        f[i] = f[i] / Math.sqrt(Math.PI * 2.0 * scale1);
    }

    double[] phi = new double[numSamples];
    for (int i = 0; i < numSamples; i++) {
        phi[i] = gauss.cumulativeProbability((u[0] + x[i]) / Math.sqrt(scale20))
                * gauss.cumulativeProbability((u[1] + x[i]) / Math.sqrt(scale21));
    }

    double[] toTotal = new double[numSamples];
    for (int i = 0; i < numSamples; i++) {
        toTotal[i] = dx * f[i] * phi[i];
    }
    return Matrix.total(toTotal);
}

From source file:roemetz.core.CalcGenRoeMetz.java

/**
 * Numerically integrates a two dimensional gaussian pdf times a gaussian
 * cdf//from   w w w .j  a  va  2  s . c  o m
 * 
 * @param u Contains experiment means.
 * @param scale Contains 2-D gaussian pdf and cdf
 * @param numSamples Number of samples for numerical integration
 * @return Integrated product moment
 */
public static double prodMoment(double[] u, double[] scale, int numSamples) {
    NormalDistribution gauss = new NormalDistribution();
    double scaleFixed = scale[0];
    double scaleIndependentA = scale[1] + scale[3];
    double scaleIndependentB = scale[2] + scale[4];

    double lx = 10.0;
    double dx = lx / (double) numSamples;
    double[] x = new double[numSamples];
    double Integral = 0.0;

    for (int i = 0; i < numSamples; i++) {
        x[i] = ((double) i * dx) - (0.5 * lx);
    }

    double[] phi_x = new double[numSamples];
    double[] cdf_A = new double[numSamples];
    double[] cdf_B = new double[numSamples];

    for (int i = 0; i < numSamples; i++) {
        phi_x[i] = Math.exp(-(x[i] * x[i]) / 2.0) / Math.sqrt(Math.PI * 2.0);
        cdf_A[i] = gauss
                .cumulativeProbability((u[0] + x[i] * Math.sqrt(scaleFixed)) / Math.sqrt(scaleIndependentA));
        cdf_B[i] = gauss
                .cumulativeProbability((u[1] + x[i] * Math.sqrt(scaleFixed)) / Math.sqrt(scaleIndependentB));
        Integral = Integral + dx * phi_x[i] * cdf_A[i] * cdf_B[i];
    }

    return Integral;
}

From source file:roemetz.core.CalcGenRoeMetz.java

/**
 * Calculates AUC components of variance for given experiment parameters via
 * numerical integration//  w w w  . java  2 s .  com
 * 
 * @param u Contains experiment means. Has 2 elements.
 * @param var_t Contains variance components. Has 18 elements.
 * @param n Contains experiment sizes. Has 3 elements.
 */
public static void genRoeMetz(double[] u, double[] var_t, int Nreader, int Nnormal, int Ndisease) {
    NormalDistribution gauss = new NormalDistribution();

    // number of samples for numerical integration, can change
    final int numSamples = 256;

    double v_AR0 = var_t[0];
    double v_AC0 = var_t[1];
    double v_ARC0 = var_t[2];
    double v_AR1 = var_t[3];
    double v_AC1 = var_t[4];
    double v_ARC1 = var_t[5];
    double v_BR0 = var_t[6];
    double v_BC0 = var_t[7];
    double v_BRC0 = var_t[8];
    double v_BR1 = var_t[9];
    double v_BC1 = var_t[10];
    double v_BRC1 = var_t[11];
    double v_R0 = var_t[12];
    double v_C0 = var_t[13];
    double v_RC0 = var_t[14];
    double v_R1 = var_t[15];
    double v_C1 = var_t[16];
    double v_RC1 = var_t[17];

    m = new double[2][2][9];

    // AUC
    double scale1 = v_R0 + v_C0 + v_RC0 + v_R1 + v_C1 + v_RC1;
    double scale20 = v_AR0 + v_AC0 + v_ARC0 + v_AR1 + v_AC1 + v_ARC1;
    double scale21 = v_BR0 + v_BC0 + v_BRC0 + v_BR1 + v_BC1 + v_BRC1;
    m[0][0][0] = gauss.cumulativeProbability(u[0] / Math.sqrt(scale1 + scale20));
    m[1][1][0] = gauss.cumulativeProbability(u[1] / Math.sqrt(scale1 + scale21));
    m[1][0][0] = m[0][0][0] - m[1][1][0];
    m[0][1][0] = -m[1][0][0];

    // M1
    double[] scaleM1 = { scale1, scale20, scale21 };
    m[0][0][1] = m[0][0][0];
    m[1][1][1] = m[1][1][0];
    m[1][0][1] = prodMoment1(u, scaleM1, numSamples);
    m[0][1][1] = m[1][0][1];

    // M2
    double scale30 = v_C0 + v_RC0 + v_AC0 + v_ARC0;
    double scale31 = v_C0 + v_RC0 + v_BC0 + v_BRC0;
    scale20 = v_AR1 + v_AC1 + v_ARC1 + v_AR0;
    scale21 = v_BR1 + v_BC1 + v_BRC1 + v_BR0;
    scale1 = v_R1 + v_C1 + v_RC1 + v_R0;

    scaleM1[0] = scale1 + scale20;
    scaleM1[1] = scale30;
    scaleM1[2] = scale30;
    m[0][0][2] = prodMoment1(new double[] { u[0], u[0] }, scaleM1, numSamples);

    scaleM1[0] = scale1 + scale21;
    scaleM1[1] = scale31;
    scaleM1[2] = scale31;
    m[1][1][2] = prodMoment1(new double[] { u[1], u[1] }, scaleM1, numSamples);

    double[] scaleM = { scale1, scale20, scale21, scale30, scale31 };
    m[1][0][2] = prodMoment(new double[] { u[0], u[1] }, scaleM, numSamples);
    m[0][1][2] = m[1][0][2];

    // M3
    scale30 = v_C1 + v_RC1 + v_AC1 + v_ARC1;
    scale31 = v_C1 + v_RC1 + v_BC1 + v_BRC1;
    scale20 = v_AR1 + v_AR0 + v_AC0 + v_ARC0;
    scale21 = v_BR1 + v_BR0 + v_BC0 + v_BRC0;
    scale1 = v_R1 + v_R0 + v_C0 + v_RC0;

    scaleM1[0] = scale1 + scale20;
    scaleM1[1] = scale30;
    scaleM1[2] = scale30;
    m[0][0][3] = prodMoment1(new double[] { u[0], u[0] }, scaleM1, numSamples);

    scaleM1[0] = scale1 + scale21;
    scaleM1[1] = scale31;
    scaleM1[2] = scale31;
    m[1][1][3] = prodMoment1(new double[] { u[1], u[1] }, scaleM1, numSamples);

    scaleM[0] = scale1;
    scaleM[1] = scale20;
    scaleM[2] = scale21;
    scaleM[3] = scale30;
    scaleM[4] = scale31;
    m[1][0][3] = prodMoment(new double[] { u[0], u[1] }, scaleM, numSamples);
    m[0][1][3] = m[1][0][3];

    // M4
    scale30 = v_C1 + v_RC1 + v_AC1 + v_ARC1 + v_C0 + v_RC0 + v_AC0 + v_ARC0;
    scale31 = v_C1 + v_RC1 + v_BC1 + v_BRC1 + v_C0 + v_RC0 + v_BC0 + v_BRC0;
    scale20 = v_AR1 + v_AR0;
    scale21 = v_BR1 + v_BR0;
    scale1 = v_R1 + v_R0;

    scaleM1[0] = scale1 + scale20;
    scaleM1[1] = scale30;
    scaleM1[2] = scale30;
    m[0][0][4] = prodMoment1(new double[] { u[0], u[0] }, scaleM1, numSamples);

    scaleM1[0] = scale1 + scale21;
    scaleM1[1] = scale31;
    scaleM1[2] = scale31;
    m[1][1][4] = prodMoment1(new double[] { u[1], u[1] }, scaleM1, numSamples);

    scaleM[0] = scale1;
    scaleM[1] = scale20;
    scaleM[2] = scale21;
    scaleM[3] = scale30;
    scaleM[4] = scale31;
    m[1][0][4] = prodMoment(new double[] { u[0], u[1] }, scaleM, numSamples);
    m[0][1][4] = m[1][0][4];

    // M5
    scale30 = v_R0 + v_R1 + v_RC0 + v_RC1 + v_AR0 + v_AR1 + v_ARC0 + v_ARC1;
    scale31 = v_R0 + v_R1 + v_RC0 + v_RC1 + v_BR0 + v_BR1 + v_BRC0 + v_BRC1;
    scale20 = v_AC0 + v_AC1;
    scale21 = v_BC0 + v_BC1;
    scale1 = v_C1 + v_C0;

    scaleM1[0] = scale1 + scale20;
    scaleM1[1] = scale30;
    scaleM1[2] = scale30;
    m[0][0][5] = prodMoment1(new double[] { u[0], u[0] }, scaleM1, numSamples);

    scaleM1[0] = scale1 + scale21;
    scaleM1[1] = scale31;
    scaleM1[2] = scale31;
    m[1][1][5] = prodMoment1(new double[] { u[1], u[1] }, scaleM1, numSamples);

    scaleM[0] = scale1;
    scaleM[1] = scale20;
    scaleM[2] = scale21;
    scaleM[3] = scale30;
    scaleM[4] = scale31;
    m[1][0][5] = prodMoment(new double[] { u[0], u[1] }, scaleM, numSamples);
    m[0][1][5] = m[1][0][5];

    // M6
    scale30 = v_R0 + v_R1 + v_C0 + v_RC0 + v_RC1 + v_AR0 + v_AR1 + v_AC0 + v_ARC0 + v_ARC1;
    scale31 = v_R0 + v_R1 + v_C0 + v_RC0 + v_RC1 + v_BR0 + v_BR1 + v_BC0 + v_BRC0 + v_BRC1;
    scale20 = v_AC1;
    scale21 = v_BC1;
    scale1 = v_C1;

    scaleM1[0] = scale1 + scale20;
    scaleM1[1] = scale30;
    scaleM1[2] = scale30;
    m[0][0][6] = prodMoment1(new double[] { u[0], u[0] }, scaleM1, numSamples);

    scaleM1[0] = scale1 + scale21;
    scaleM1[1] = scale31;
    scaleM1[2] = scale31;
    m[1][1][6] = prodMoment1(new double[] { u[1], u[1] }, scaleM1, numSamples);

    scaleM[0] = scale1;
    scaleM[1] = scale20;
    scaleM[2] = scale21;
    scaleM[3] = scale30;
    scaleM[4] = scale31;
    m[1][0][6] = prodMoment(new double[] { u[0], u[1] }, scaleM, numSamples);
    m[0][1][6] = m[1][0][6];

    // M7
    scale30 = v_R0 + v_R1 + v_C1 + v_RC0 + v_RC1 + v_AR0 + v_AR1 + v_AC1 + v_ARC0 + v_ARC1;
    scale31 = v_R0 + v_R1 + v_C1 + v_RC0 + v_RC1 + v_BR0 + v_BR1 + v_BC1 + v_BRC0 + v_BRC1;
    scale20 = v_AC0;
    scale21 = v_BC0;
    scale1 = v_C0;

    scaleM1[0] = scale1 + scale20;
    scaleM1[1] = scale30;
    scaleM1[2] = scale30;
    m[0][0][7] = prodMoment1(new double[] { u[0], u[0] }, scaleM1, numSamples);

    scaleM1[0] = scale1 + scale21;
    scaleM1[1] = scale31;
    scaleM1[2] = scale31;
    m[1][1][7] = prodMoment1(new double[] { u[1], u[1] }, scaleM1, numSamples);

    scaleM[0] = scale1;
    scaleM[1] = scale20;
    scaleM[2] = scale21;
    scaleM[3] = scale30;
    scaleM[4] = scale31;
    m[1][0][7] = prodMoment(new double[] { u[0], u[1] }, scaleM, numSamples);
    m[0][1][7] = m[1][0][7];

    //

    m[0][0][8] = m[0][0][0] * m[0][0][0];
    m[1][1][8] = m[1][1][0] * m[1][1][0];
    m[1][0][8] = m[0][0][0] * m[1][1][0];
    m[0][1][8] = m[1][0][8];

    calcAUCsAndDecomps(Nnormal, Ndisease, Nreader);

}