Example usage for org.apache.commons.math3.distribution NormalDistribution NormalDistribution

List of usage examples for org.apache.commons.math3.distribution NormalDistribution NormalDistribution

Introduction

In this page you can find the example usage for org.apache.commons.math3.distribution NormalDistribution NormalDistribution.

Prototype

public NormalDistribution(double mean, double sd) throws NotStrictlyPositiveException 

Source Link

Document

Create a normal distribution using the given mean and standard deviation.

Usage

From source file:akori.Normal.java

static public void main(String[] args) {
    NormalDistribution n = new NormalDistribution(0, 0.4);

    for (double i = 0; i < 1; i = i + (1.0 / 50.0)) {
        System.out.println("i: " + i);
        System.out.println("densidad: " + n.density(i));
    }//from   w w  w . j  a v  a  2s . c o m
}

From source file:edu.upenn.egricelab.AlignerBoost.FilterSAMAlignPE.java

public static void main(String[] args) {
    if (args.length == 0) {
        printUsage();/*from  ww w  .  j  ava2 s.c o m*/
        return;
    }
    try {
        parseOptions(args);
    } catch (IllegalArgumentException e) {
        System.err.println("Error: " + e.getMessage());
        printUsage();
        return;
    }

    // Read in chrList, if specified
    if (chrFile != null) {
        chrFilter = new HashSet<String>();
        try {
            BufferedReader chrFilterIn = new BufferedReader(new FileReader(chrFile));
            String chr = null;
            while ((chr = chrFilterIn.readLine()) != null)
                chrFilter.add(chr);
            chrFilterIn.close();
            if (verbose > 0)
                System.err.println(
                        "Only looking at alignments on " + chrFilter.size() + " specified chromosomes");
        } catch (IOException e) {
            System.err.println("Error: " + e.getMessage());
            return;
        }
    }

    if (verbose > 0) {
        // Start the processMonitor
        processMonitor = new Timer();
        // Start the ProcessStatusTask
        statusTask = new ProcessStatusTask();
        // Schedule to show the status every 1 second
        processMonitor.scheduleAtFixedRate(statusTask, 0, statusFreq);
    }

    // Read in known SNP file, if specified
    if (knownSnpFile != null) {
        if (verbose > 0)
            System.err.println("Checking known SNPs from user specified VCF file");
        knownVCF = new VCFFileReader(new File(knownSnpFile));
    }

    SamReaderFactory readerFac = SamReaderFactory.makeDefault();
    SAMFileWriterFactory writerFac = new SAMFileWriterFactory();
    if (!isSilent)
        readerFac.validationStringency(ValidationStringency.LENIENT); // use LENIENT stringency
    else
        readerFac.validationStringency(ValidationStringency.SILENT); // use SILENT stringency

    SamReader in = readerFac.open(new File(inFile));
    SAMFileHeader inHeader = in.getFileHeader();
    if (inHeader.getGroupOrder() == GroupOrder.reference && inHeader.getSortOrder() == SortOrder.coordinate)
        System.err.println("Warning: Input file '" + inFile
                + "' might be sorted by coordinate and cannot be correctly processed!");

    SAMFileHeader header = inHeader.clone(); // copy the inFile header as outFile header
    // Add new programHeader
    SAMProgramRecord progRec = new SAMProgramRecord(progName);
    progRec.setProgramName(progName);
    progRec.setProgramVersion(progVer);
    progRec.setCommandLine(StringUtils.join(" ", args));
    header.addProgramRecord(progRec);
    //System.err.println(inFile + " groupOrder: " + in.getFileHeader().getGroupOrder() + " sortOrder: " + in.getFileHeader().getSortOrder());
    // reset the orders
    header.setGroupOrder(groupOrder);
    header.setSortOrder(sortOrder);

    // write SAMHeader
    String prevID = null;
    SAMRecord prevRecord = null;
    List<SAMRecord> alnList = new ArrayList<SAMRecord>();
    List<SAMRecordPair> alnPEList = null;

    // Estimate fragment length distribution by scan one-pass through the alignments
    SAMRecordIterator results = in.iterator();
    if (!NO_ESTIMATE) {
        if (verbose > 0) {
            System.err.println("Estimating insert fragment size distribution ...");
            statusTask.reset();
            statusTask.setInfo("alignments scanned");
        }
        long N = 0;
        double fragL_S = 0; // fragLen sum
        double fragL_SS = 0; // fragLen^2 sum
        while (results.hasNext()) {
            SAMRecord record = results.next();
            if (verbose > 0)
                statusTask.updateStatus();
            if (record.getFirstOfPairFlag() && !record.isSecondaryOrSupplementary()) {
                double fragLen = Math.abs(record.getInferredInsertSize());
                if (fragLen != 0 && fragLen >= MIN_FRAG_LEN && fragLen <= MAX_FRAG_LEN) { // only consider certain alignments
                    N++;
                    fragL_S += fragLen;
                    fragL_SS += fragLen * fragLen;
                }
                // stop estimate if already enough
                if (MAX_ESTIMATE_SCAN > 0 && N >= MAX_ESTIMATE_SCAN)
                    break;
            }
        }
        if (verbose > 0)
            statusTask.finish();
        // estimate fragment size
        if (N >= MIN_ESTIMATE_BASE) { // override command line values
            MEAN_FRAG_LEN = fragL_S / N;
            SD_FRAG_LEN = Math.sqrt((N * fragL_SS - fragL_S * fragL_S) / (N * (N - 1)));
            String estStr = String.format("Estimated fragment size distribution: N(%.1f, %.1f)", MEAN_FRAG_LEN,
                    SD_FRAG_LEN);
            if (verbose > 0)
                System.err.println(estStr);
            // also add the estimation to comment
            header.addComment(estStr);
        } else {
            System.err.println(
                    "Unable to estimate the fragment size distribution due to too few observed alignments");
            System.err.println(
                    "You have to specify the '--mean-frag-len' and '--sd-frag-len' on the command line and re-run this step");
            statusTask.cancel();
            processMonitor.cancel();
            return;
        }
        // Initiate the normal model
        normModel = new NormalDistribution(MEAN_FRAG_LEN, SD_FRAG_LEN);
        // reset the iterator, if necessary
        if (in.type() == SamReader.Type.SAM_TYPE) {
            try {
                in.close();
            } catch (IOException e) {
                System.err.println(e.getMessage());
            }
            in = readerFac.open(new File(inFile));
        }
        results.close();
        results = in.iterator();
    } // end of NO_ESTIMATE

    SAMFileWriter out = OUT_IS_SAM ? writerFac.makeSAMWriter(header, false, new File(outFile))
            : writerFac.makeBAMWriter(header, false, new File(outFile));

    // check each alignment again
    if (verbose > 0) {
        System.err.println("Filtering alignments ...");
        statusTask.reset();
        statusTask.setInfo("alignments processed");
    }
    while (results.hasNext()) {
        SAMRecord record = results.next();
        if (verbose > 0)
            statusTask.updateStatus();
        String ID = record.getReadName();
        // fix read and quality string for this read, if is a secondary hit from multiple hits, used for BWA alignment
        if (ID.equals(prevID) && record.getReadLength() == 0)
            SAMAlignFixer.fixSAMRecordRead(record, prevRecord);
        if (chrFilter != null && !chrFilter.contains(record.getReferenceName())) {
            prevID = ID;
            prevRecord = record;
            continue;
        }

        // fix MD:Z string for certain aligners with invalid format (i.e. seqAlto)
        if (fixMD)
            SAMAlignFixer.fixMisStr(record);

        // fix alignment, ignore if failed (unmapped or empty)
        if (!SAMAlignFixer.fixSAMRecord(record, knownVCF, DO_1DP)) {
            prevID = ID;
            prevRecord = record;
            continue;
        }
        if (!record.getReadPairedFlag()) {
            System.err.println("Error: alignment is not from a paired-end read at\n" + record.getSAMString());
            out.close();
            statusTask.cancel();
            processMonitor.cancel();
            return;
        }

        if (!ID.equals(prevID) && prevID != null || !results.hasNext()) { // a non-first new ID meet, or end of alignments
            // create alnPEList from filtered alnList
            alnPEList = createAlnPEListFromAlnList(alnList);
            //System.err.printf("%d alignments for %s transformed to %d alnPairs%n", alnList.size(), prevID, alnPEList.size());
            int totalPair = alnPEList.size();
            // filter highly unlikely PEhits
            filterPEHits(alnPEList, MIN_ALIGN_RATE, MIN_IDENTITY);
            // calculate posterior mapQ for each pair
            calcPEHitPostP(alnPEList, totalPair, MAX_HIT);
            // filter hits by mapQ
            if (MIN_MAPQ > 0)
                filterPEHits(alnPEList, MIN_MAPQ);

            // sort the list first with an anonymous class of comparator, with DESCREASING order
            Collections.sort(alnPEList, Collections.reverseOrder());
            // control max-best
            if (MAX_BEST != 0 && alnPEList.size() > MAX_BEST) { // potential too much best hits
                int nBestStratum = 0;
                int bestMapQ = alnPEList.get(0).getPEMapQ(); // best mapQ from first PE
                for (SAMRecordPair pr : alnPEList)
                    if (pr.getPEMapQ() == bestMapQ)
                        nBestStratum++;
                    else
                        break; // stop searching for sorted list
                if (nBestStratum > MAX_BEST)
                    alnPEList.clear();
            }
            // filter alignments with auxiliary filters
            if (!MAX_SENSITIVITY)
                filterPEHits(alnPEList, MAX_SEED_MIS, MAX_SEED_INDEL, MAX_ALL_MIS, MAX_ALL_INDEL);

            // report remaining secondary alignments, up-to MAX_REPORT
            for (int i = 0; i < alnPEList.size() && (MAX_REPORT == 0 || i < MAX_REPORT); i++) {
                SAMRecordPair repPair = alnPEList.get(i);
                if (doUpdateBit)
                    repPair.setNotPrimaryAlignmentFlags(i != 0);
                int nReport = MAX_REPORT == 0 ? Math.min(alnPEList.size(), MAX_REPORT) : alnPEList.size();
                int nFiltered = alnPEList.size();
                if (repPair.fwdRecord != null) {
                    repPair.fwdRecord.setAttribute("NH", nReport);
                    repPair.fwdRecord.setAttribute("XN", nFiltered);
                    out.addAlignment(repPair.fwdRecord);
                }
                if (repPair.revRecord != null) {
                    repPair.revRecord.setAttribute("NH", nReport);
                    repPair.revRecord.setAttribute("XN", nFiltered);
                    out.addAlignment(repPair.revRecord);
                }
            }
            // reset list
            alnList.clear();
            alnPEList.clear();
        }
        // update
        if (!ID.equals(prevID)) {
            prevID = ID;
            prevRecord = record;
        }
        alnList.add(record);
    } // end while
    try {
        in.close();
        out.close();
    } catch (IOException e) {
        System.err.println(e.getMessage());
    }
    // Terminate the monitor task and monitor
    if (verbose > 0) {
        statusTask.cancel();
        statusTask.finish();
        processMonitor.cancel();
    }
}

From source file:io.yields.math.framework.distribution.Samplers.java

public static Sampler normal() {
    return new Sampler(new CommonsMathDoubleRandomSequence(new MersenneTwister(0)),
            new CommonsMathDistribution(new NormalDistribution(0d, 1d)));
}

From source file:com.mycompany.app.VariousTest.java

public static void normalDistribution() {
    NormalDistribution normalDistribution = new NormalDistribution(-2, 2.5);
    System.out.println(normalDistribution.density(-3) * 25);

}

From source file:com.freevariable.lancer.stat.Probability.java

public static double normal(double mean, double variance, double a) {
    // XXX: sure would be smarter to memoize these
    return (new NormalDistribution(mean, Math.sqrt(variance))).cumulativeProbability(a);
}

From source file:net.openhft.chronicle.timeseries.Columns.java

public static void generateBrownian(DoubleColumn col, double start, double end, double sd) {
    long length = col.length();
    double sd2 = sd / Math.sqrt(length);
    NormalDistribution nd = new NormalDistribution(0, sd2 * CHUNK_SIZE);
    int trendLength = Math.toIntExact((length - 1) / CHUNK_SIZE + 2);
    BytesStore trend = NativeBytesStore.lazyNativeBytesStoreWithFixedCapacity(trendLength * 8L);
    double x = start;
    RandomGenerator rand = new MersenneTwister();
    for (int i = 0; i < trendLength - 1; i++) {
        float f = rand.nextFloat();
        trend.writeDouble((long) i << 3, x);
        x += nd.inverseCumulativeProbability(f);
    }/*from   w w  w .java  2s  .co  m*/
    trend.writeDouble((long) (trendLength - 1) << 3, x);
    double diff = end - x;
    double gradient = diff / (trendLength - 1);
    for (int i = 0; i < trendLength; i++) {
        double y = trend.addAndGetDoubleNotAtomic((long) i << 3, i * gradient);
        //            System.out.println(i + ": "+y);
    }
    int procs = Runtime.getRuntime().availableProcessors();
    int chunksPerTask = (trendLength - 1) / procs + 1;
    ForkJoinPool fjp = ForkJoinPool.commonPool();
    List<ForkJoinTask> tasks = new ArrayList<>(procs);
    for (int i = 0; i < procs; i++) {
        int si = i * chunksPerTask;
        int ei = Math.min(trendLength, si + chunksPerTask);
        tasks.add(fjp.submit(() -> {
            NormalDistribution nd2 = new NormalDistribution(0, sd2);
            RandomGenerator rand2 = new MersenneTwister();
            for (int j = si; j < ei; j++) {
                generateBrownian(col, (long) j * CHUNK_SIZE, trend.readDouble((long) j << 3),
                        trend.readDouble((long) (j + 1) << 3), nd2, rand2);
            }
        }));
    }
    for (ForkJoinTask task : tasks) {
        task.join();
    }
    trend.release();
}

From source file:com.freevariable.lancer.stat.Probability.java

public static double normalInverse(double mean, double variance, double a) {
    // XXX: sure would be smarter to memoize these
    return (new NormalDistribution(mean, Math.sqrt(variance))).inverseCumulativeProbability(a);
}

From source file:gedi.util.math.stat.kernel.GaussianKernel.java

public GaussianKernel() {
    super(new NormalDistribution(0, 1), 1E-4);
}

From source file:gedi.util.math.stat.kernel.GaussianKernel.java

public GaussianKernel(double sd) {
    super(new NormalDistribution(0, sd), 1E-4);
}

From source file:com.itemanalysis.psychometrics.statistics.NormalDensity.java

public NormalDensity(double mean, double sd) {
    normal = new NormalDistribution(mean, sd);
}