Example usage for org.apache.commons.lang.math NumberUtils max

List of usage examples for org.apache.commons.lang.math NumberUtils max

Introduction

In this page you can find the example usage for org.apache.commons.lang.math NumberUtils max.

Prototype

public static float max(float[] array) 

Source Link

Document

Returns the maximum value in an array.

Usage

From source file:MathUtilsTrial.java

public static void main(String[] args) {

    // Check if a String contains only digits
    System.out.println("Is Digits >>> " + NumberUtils.isDigits("123.123"));

    // Check if a String is a valid number
    System.out.println("Is Number >>> " + NumberUtils.isNumber("123.123"));

    // Get MAX value from an array
    System.out.println("MAX >>> " + NumberUtils.max(new double[] { 3.33, 8.88, 1.11 }));

}

From source file:edu.scripps.fl.curves.plot.GCurvePlot.java

protected static XYLine sampleFunctionToLine(Curve curve, FitFunction f, double start, double end,
        int samples) {
    double yValues[] = new double[samples];
    double xValues[] = new double[samples];

    double step = (end - start) / (double) (samples - 1);
    for (int i = 0; i < samples; i++) {
        double x = start + step * (double) i;
        xValues[i] = x;//from www  .j a v  a2 s .  co  m
        double y = f.getResponse(curve, Math.pow(10, x));
        yValues[i] = y;
    }

    Data xData = DataUtil.scaleWithinRange(NumberUtils.min(xValues), NumberUtils.max(xValues), xValues);
    Data yData = DataUtil.scaleWithinRange(NumberUtils.min(yValues), NumberUtils.max(yValues), yValues);
    return Plots.newXYLine(xData, yData, Color.GREEN, "");
}

From source file:com.haulmont.cuba.gui.data.aggregation.NumberAggregationHelper.java

@Nullable
public Double max() {
    if (items.isEmpty()) {
        return null;
    }/*w w  w.ja v a  2  s .  c om*/

    return NumberUtils.max(ArrayUtils.toPrimitive(items.toArray(new Double[items.size()])));
}

From source file:edu.scripps.fl.curves.plot.GCurvePlot.java

public void addCurve(Curve curve, FitFunction function) {
    double[] yValues = (double[]) ConvertUtils.convert(curve.getResponses(), double[].class);
    double curveMinY = NumberUtils.min(yValues);
    double curveMaxY = NumberUtils.max(yValues);
    this.minY = Math.min(minY, curveMinY);
    this.maxY = Math.min(maxY, curveMaxY);
    Data yData = DataUtil.scaleWithinRange(curveMinY, curveMaxY, yValues);

    double[] xValues = (double[]) ConvertUtils.convert(curve.getConcentrations(), double[].class);
    for (int ii = 0; ii < xValues.length; ii++) {
        double x = Math.log10(xValues[ii]);
        xValues[ii] = x;//from   w w w.  ja  va  2  s  .  c  o m
    }
    double curveMinX = NumberUtils.min(xValues);
    double curveMaxX = NumberUtils.max(xValues);
    this.minX = Math.min(minX, curveMinX);
    this.maxX = Math.min(maxX, curveMaxX);
    Data xData = DataUtil.scaleWithinRange(NumberUtils.min(xValues), NumberUtils.max(xValues), xValues);

    String hexColor = Integer
            .toHexString(((java.awt.Color) drawingSupplier.getNextPaint()).getRGB() & 0x00ffffff);
    StringBuffer sb = new StringBuffer();
    sb.append(hexColor);
    while (sb.length() < 6)
        sb.insert(0, "0");
    Color color = Color.newColor(sb.toString());

    XYLine line1 = Plots.newXYLine(xData, yData, getBackgroundColor(), "");
    //        line1.setLineStyle(LineStyle.newLineStyle(3, 1, 0));
    line1.addShapeMarkers(Shape.CIRCLE, color, 5);

    XYLine fittedLine = sampleFunctionToLine(curve, function, curveMinX, curveMaxX, 100);
    //      fittedLine.setLineStyle(LineStyle.newLineStyle(3, 1, 0));
    fittedLine.setColor(color);

    lines.add(line1);
    lines.add(fittedLine);
}

From source file:org.apache.accumulo.test.functional.BalanceInPresenceOfOfflineTableIT.java

@Test
public void test() throws Exception {
    log.info("Test that balancing is not stopped by an offline table with outstanding migrations.");

    log.debug("starting test ingestion");

    TestIngest.Opts opts = new TestIngest.Opts();
    VerifyIngest.Opts vopts = new VerifyIngest.Opts();
    ClientConfiguration conf = cluster.getClientConfig();
    if (conf.getBoolean(ClientProperty.INSTANCE_RPC_SASL_ENABLED.getKey(), false)) {
        opts.updateKerberosCredentials(cluster.getClientConfig());
        vopts.updateKerberosCredentials(cluster.getClientConfig());
    } else {//from  ww w  . j ava  2 s. com
        opts.setPrincipal("root");
        vopts.setPrincipal("root");
    }
    vopts.rows = opts.rows = 200000;
    opts.setTableName(TEST_TABLE);
    TestIngest.ingest(connector, opts, new BatchWriterOpts());
    connector.tableOperations().flush(TEST_TABLE, null, null, true);
    vopts.setTableName(TEST_TABLE);
    VerifyIngest.verifyIngest(connector, vopts, new ScannerOpts());

    log.debug("waiting for balancing, up to ~5 minutes to allow for migration cleanup.");
    final long startTime = System.currentTimeMillis();
    long currentWait = 10 * 1000;
    boolean balancingWorked = false;

    Credentials creds = new Credentials(getAdminPrincipal(), getAdminToken());
    while (!balancingWorked && (System.currentTimeMillis() - startTime) < ((5 * 60 + 15) * 1000)) {
        Thread.sleep(currentWait);
        currentWait *= 2;

        log.debug("fetch the list of tablets assigned to each tserver.");

        MasterClientService.Iface client = null;
        MasterMonitorInfo stats = null;
        try {
            Instance instance = new ZooKeeperInstance(cluster.getClientConfig());
            client = MasterClient
                    .getConnectionWithRetry(new ClientContext(instance, creds, cluster.getClientConfig()));
            stats = client.getMasterStats(Tracer.traceInfo(), creds.toThrift(instance));
        } catch (ThriftSecurityException exception) {
            throw new AccumuloSecurityException(exception);
        } catch (TException exception) {
            throw new AccumuloException(exception);
        } finally {
            if (client != null) {
                MasterClient.close(client);
            }
        }

        if (stats.getTServerInfoSize() < 2) {
            log.debug("we need >= 2 servers. sleeping for " + currentWait + "ms");
            continue;
        }
        if (stats.getUnassignedTablets() != 0) {
            log.debug("We shouldn't have unassigned tablets. sleeping for " + currentWait + "ms");
            continue;
        }

        long[] tabletsPerServer = new long[stats.getTServerInfoSize()];
        Arrays.fill(tabletsPerServer, 0l);
        for (int i = 0; i < stats.getTServerInfoSize(); i++) {
            for (Map.Entry<String, TableInfo> entry : stats.getTServerInfo().get(i).getTableMap().entrySet()) {
                tabletsPerServer[i] += entry.getValue().getTablets();
            }
        }

        if (tabletsPerServer[0] <= 10) {
            log.debug("We should have > 10 tablets. sleeping for " + currentWait + "ms");
            continue;
        }
        long min = NumberUtils.min(tabletsPerServer), max = NumberUtils.max(tabletsPerServer);
        log.debug("Min=" + min + ", Max=" + max);
        if ((min / ((double) max)) < 0.5) {
            log.debug("ratio of min to max tablets per server should be roughly even. sleeping for "
                    + currentWait + "ms");
            continue;
        }
        balancingWorked = true;
    }

    Assert.assertTrue("did not properly balance", balancingWorked);
}

From source file:org.broadinstitute.cga.tools.gatk.walkers.cancer.mutect.MuTect.java

@Override
public Integer map(final RefMetaDataTracker tracker, final ReferenceContext ref,
        final AlignmentContext rawContext) {
    if (MTAC.NOOP)
        return 0;

    TreeMap<Double, CandidateMutation> messageByTumorLod = new TreeMap<Double, CandidateMutation>();

    ReadBackedPileup pileup = rawContext.getBasePileup();
    int numberOfReads = pileup.depthOfCoverage();
    binReadsProcessed += numberOfReads;// ww  w .  j av a 2  s. com

    if (binReadsProcessed >= 1000000) {
        long time = System.currentTimeMillis();
        long elapsedTime = time - lastTime;
        lastTime = time;

        totalReadsProcessed += binReadsProcessed;
        binReadsProcessed = 0;

        logger.info(String.format("[MUTECT] Processed %d reads in %d ms", totalReadsProcessed, elapsedTime));
    }

    // an optimization to speed things up when there is no coverage
    if (!MTAC.FORCE_OUTPUT && numberOfReads == 0) {
        return -1;
    }

    // get sequence context around mutation
    String sequenceContext = SequenceUtils.createSequenceContext(ref, 3);

    // only process bases where the reference is [ACGT], because the FASTA for HG18 has N,M and R!
    final char upRef = Character.toUpperCase(ref.getBaseAsChar());
    if (upRef != 'A' && upRef != 'C' && upRef != 'G' && upRef != 'T') {
        return -1;
    }

    try {

        Map<SampleType, ReadBackedPileup> pileupMap = getPileupsBySampleType(pileup);

        final LocusReadPile tumorReadPile = new LocusReadPile(pileupMap.get(SampleType.TUMOR), upRef,
                MTAC.MIN_QSCORE, MIN_QSUM_QSCORE, false, MTAC.ARTIFACT_DETECTION_MODE,
                MTAC.ENABLE_QSCORE_OUTPUT);
        final LocusReadPile normalReadPile = new LocusReadPile(pileupMap.get(SampleType.NORMAL), upRef,
                MTAC.MIN_QSCORE, 0, this.USE_MAPQ0_IN_NORMAL_QSCORE, true, MTAC.ENABLE_QSCORE_OUTPUT);

        Collection<VariantContext> panelOfNormalsVC = tracker.getValues(normalPanelRod,
                rawContext.getLocation());
        Collection<VariantContext> cosmicVC = tracker.getValues(cosmicRod, rawContext.getLocation());
        Collection<VariantContext> dbsnpVC = tracker.getValues(dbsnpRod, rawContext.getLocation());

        // remove the effect of cosmic from dbSNP
        boolean germlineAtRisk = (!dbsnpVC.isEmpty() && cosmicVC.isEmpty());

        // compute coverage flags
        int tumorCoveredDepthThreshold = 14;
        int normalCoveredDepthThreshold = (germlineAtRisk) ? 19 : 8;
        if (!hasNormalBam) {
            normalCoveredDepthThreshold = 0;
        }

        int tumorBaseCount = tumorReadPile.finalPileupReads.size();
        int normalBaseCount = normalReadPile.finalPileupReads.size();
        boolean isTumorCovered = tumorBaseCount >= tumorCoveredDepthThreshold;
        boolean isNormalCovered = normalBaseCount >= normalCoveredDepthThreshold;
        boolean isBaseCovered = isTumorCovered && isNormalCovered;
        if (!hasNormalBam) {
            isBaseCovered = isTumorCovered;
        }

        stdCovWriter.writeCoverage(rawContext, isBaseCovered);
        int tumorQ20BaseCount = tumorReadPile.getFilteredBaseCount(20);
        int normalQ20BaseCount = normalReadPile.getFilteredBaseCount(20);
        q20CovWriter.writeCoverage(rawContext, tumorQ20BaseCount >= 20 && normalQ20BaseCount >= 20);
        tumorDepthWriter.writeCoverage(rawContext, tumorBaseCount);
        normalDepthWriter.writeCoverage(rawContext, normalBaseCount);

        // calculate power
        double tumorPower = tumorPowerCalculator.cachingPowerCalculation(tumorBaseCount,
                MTAC.POWER_CONSTANT_AF);

        double normalPowerNoSNPPrior = normalNovelSitePowerCalculator.cachingPowerCalculation(normalBaseCount);
        double normalPowerWithSNPPrior = normalDbSNPSitePowerCalculator
                .cachingPowerCalculation(normalBaseCount);

        double normalPower = (germlineAtRisk) ? normalPowerWithSNPPrior : normalPowerNoSNPPrior;

        double combinedPower = tumorPower * normalPower;
        if (!hasNormalBam) {
            combinedPower = tumorPower;
        }

        powerWriter.writeCoverage(rawContext, combinedPower);

        int mapQ0Reads = tumorReadPile.qualityScoreFilteredPileup.getNumberOfMappingQualityZeroReads()
                + normalReadPile.qualityScoreFilteredPileup.getNumberOfMappingQualityZeroReads();

        int totalReads = tumorReadPile.qualityScoreFilteredPileup.depthOfCoverage()
                + normalReadPile.qualityScoreFilteredPileup.depthOfCoverage();

        // Test each of the possible alternate alleles
        for (final char altAllele : new char[] { 'A', 'C', 'G', 'T' }) {
            if (altAllele == upRef) {
                continue;
            }
            if (!MTAC.FORCE_OUTPUT && tumorReadPile.qualitySums.getCounts(altAllele) == 0) {
                continue;
            }

            CandidateMutation candidate = new CandidateMutation(rawContext.getLocation(), upRef);
            candidate.setSequenceContext(sequenceContext);
            candidate.setTumorSampleName(MTAC.TUMOR_SAMPLE_NAME);
            candidate.setNormalSampleName(MTAC.NORMAL_SAMPLE_NAME);
            candidate.setCovered(isBaseCovered);
            candidate.setPower(combinedPower);
            candidate.setTumorPower(tumorPower);
            candidate.setNormalPower(normalPower);
            candidate.setNormalPowerWithSNPPrior(normalPowerWithSNPPrior);
            candidate.setNormalPowerNoSNPPrior(normalPowerNoSNPPrior);
            candidate.setTumorQ20Count(tumorQ20BaseCount);
            candidate.setNormalQ20Count(normalQ20BaseCount);
            candidate.setInitialTumorNonRefQualitySum(tumorReadPile.qualitySums.getOtherQualities(upRef));
            candidate.setAltAllele(altAllele);
            candidate.setMapQ0Reads(mapQ0Reads);
            candidate.setTotalReads(totalReads);
            candidate.setContaminationFraction(MTAC.FRACTION_CONTAMINATION);
            candidate.setPanelOfNormalsVC(
                    panelOfNormalsVC.isEmpty() ? null : panelOfNormalsVC.iterator().next()); // if there are multiple, we're just grabbing the first
            candidate.setCosmicSite(!cosmicVC.isEmpty());
            candidate.setDbsnpSite(!dbsnpVC.isEmpty());
            candidate.setDbsnpVC(dbsnpVC.isEmpty() ? null : dbsnpVC.iterator().next());
            candidate.setTumorF(tumorReadPile.estimateAlleleFraction(upRef, altAllele));

            if (!MTAC.FORCE_OUTPUT && candidate.getTumorF() < MTAC.TUMOR_F_PRETEST) {
                continue;
            }

            if (++candidatesInspected % 1000 == 0) {
                logger.info(String.format("[MUTECT] Inspected %d potential candidates", candidatesInspected));
            }

            candidate.setInitialTumorAltCounts(tumorReadPile.qualitySums.getCounts(altAllele));
            candidate.setInitialTumorRefCounts(tumorReadPile.qualitySums.getCounts(upRef));
            candidate.setInitialTumorAltQualitySum(tumorReadPile.qualitySums.getQualitySum(altAllele));
            candidate.setInitialTumorRefQualitySum(tumorReadPile.qualitySums.getQualitySum(upRef));

            double tumorLod = tumorReadPile.calculateAltVsRefLOD((byte) altAllele, candidate.getTumorF(), 0);
            candidate.setTumorLodFStar(tumorLod);

            candidate.setInitialTumorReadDepth(tumorReadPile.finalPileupReads.size());
            candidate.setTumorInsertionCount(tumorReadPile.getInsertionsCount());
            candidate.setTumorDeletionCount(tumorReadPile.getDeletionsCount());

            if (candidate.getTumorLodFStar() < MTAC.INITIAL_TUMOR_LOD_THRESHOLD) {
                continue;
            }

            // calculate lod of contaminant
            double contaminantF = Math.min(contaminantAlternateFraction, candidate.getTumorF());
            VariableAllelicRatioGenotypeLikelihoods contaminantLikelihoods = new VariableAllelicRatioGenotypeLikelihoods(
                    upRef, contaminantF);

            List<PileupElement> peList = new ArrayList<PileupElement>(
                    tumorReadPile.finalPileup.depthOfCoverage());
            for (PileupElement pe : tumorReadPile.finalPileup) {
                peList.add(pe);
            }

            Collections.sort(peList, new PileupComparatorByAltRefQual((byte) altAllele));
            int readsToKeep = (int) (peList.size() * contaminantAlternateFraction);

            for (PileupElement pe : peList) {
                byte base = pe.getBase();
                if (pe.getBase() == altAllele) {
                    // if we've retained all we need, then turn the remainder of alts to ref
                    if (readsToKeep == 0) {
                        base = (byte) upRef;
                    } else {
                        readsToKeep--;
                    }
                }

                contaminantLikelihoods.add(base, pe.getQual());
            }
            double[] refHetHom = LocusReadPile.extractRefHetHom(contaminantLikelihoods, upRef, altAllele);
            double contaminantLod = refHetHom[1] - refHetHom[0];
            candidate.setContaminantLod(contaminantLod);

            final QualitySums normQs = normalReadPile.qualitySums;

            VariableAllelicRatioGenotypeLikelihoods normalGl = normalReadPile
                    .calculateLikelihoods(normalReadPile.qualityScoreFilteredPileup); // use MAPQ0 reads
            candidate.setInitialNormalBestGenotype(normalReadPile.getBestGenotype(normalGl));
            candidate.setInitialNormalLod(LocusReadPile.getRefVsAlt(normalGl, upRef, altAllele));

            double normalF = Math.max(LocusReadPile
                    .estimateAlleleFraction(normalReadPile.qualityScoreFilteredPileup, upRef, altAllele),
                    MTAC.MINIMUM_NORMAL_ALLELE_FRACTION);
            candidate.setNormalF(normalF);

            candidate.setInitialNormalAltQualitySum(normQs.getQualitySum(altAllele));
            candidate.setInitialNormalRefQualitySum(normQs.getQualitySum(upRef));

            candidate.setNormalAltQualityScores(normQs.getBaseQualityScores(altAllele));
            candidate.setNormalRefQualityScores(normQs.getBaseQualityScores(upRef));

            candidate.setInitialNormalAltCounts(normQs.getCounts(altAllele));
            candidate.setInitialNormalRefCounts(normQs.getCounts(upRef));
            candidate.setInitialNormalReadDepth(normalReadPile.finalPileupReads.size());

            // TODO: parameterize filtering Mate-Rescued Reads (if someone wants to disable this)
            final LocusReadPile t2 = filterReads(ref, tumorReadPile.finalPileup, true);

            // if there are no reads remaining, abandon this theory
            if (!MTAC.FORCE_OUTPUT && t2.finalPileupReads.size() == 0) {
                continue;
            }

            candidate.setInitialTumorAltCounts(t2.qualitySums.getCounts(altAllele));
            candidate.setInitialTumorRefCounts(t2.qualitySums.getCounts(upRef));
            candidate.setInitialTumorAltQualitySum(t2.qualitySums.getQualitySum(altAllele));
            candidate.setInitialTumorRefQualitySum(t2.qualitySums.getQualitySum(upRef));

            candidate.setTumorAltQualityScores(t2.qualitySums.getBaseQualityScores(altAllele));
            candidate.setTumorRefQualityScores(t2.qualitySums.getBaseQualityScores(upRef));

            VariableAllelicRatioGenotypeLikelihoods t2Gl = t2.calculateLikelihoods(t2.finalPileup);
            candidate.setInitialTumorLod(t2.getAltVsRef(t2Gl, upRef, altAllele));
            candidate.setInitialTumorReadDepth(t2.finalPileupReads.size());

            candidate.setTumorF(t2.estimateAlleleFraction(upRef, altAllele));
            double tumorLod2 = t2.calculateAltVsRefLOD((byte) altAllele, candidate.getTumorF(), 0);
            candidate.setTumorLodFStar(tumorLod2);

            //TODO: clean up use of forward/reverse vs positive/negative (prefer the latter since GATK uses it)
            ReadBackedPileup forwardPileup = filterReads(ref, tumorReadPile.finalPileupPositiveStrand,
                    true).finalPileupPositiveStrand;
            double f2forward = LocusReadPile.estimateAlleleFraction(forwardPileup, upRef, altAllele);
            candidate.setTumorLodFStarForward(
                    t2.calculateAltVsRefLOD(forwardPileup, (byte) altAllele, f2forward, 0.0));

            ReadBackedPileup reversePileup = filterReads(ref, tumorReadPile.finalPileupNegativeStrand,
                    true).finalPileupNegativeStrand;
            double f2reverse = LocusReadPile.estimateAlleleFraction(reversePileup, upRef, altAllele);
            candidate.setTumorLodFStarReverse(
                    t2.calculateAltVsRefLOD(reversePileup, (byte) altAllele, f2reverse, 0.0));

            // calculate strand bias power
            candidate.setPowerToDetectPositiveStrandArtifact(strandArtifactPowerCalculator
                    .cachingPowerCalculation(reversePileup.depthOfCoverage(), candidate.getTumorF()));
            candidate.setPowerToDetectNegativeStrandArtifact(strandArtifactPowerCalculator
                    .cachingPowerCalculation(forwardPileup.depthOfCoverage(), candidate.getTumorF()));

            candidate.setStrandContingencyTable(SequenceUtils.getStrandContingencyTable(forwardPileup,
                    reversePileup, (byte) upRef, (byte) altAllele));

            ArrayList<PileupElement> mutantPileupElements = new ArrayList<PileupElement>();
            ArrayList<PileupElement> referencePileupElements = new ArrayList<PileupElement>();

            for (PileupElement p : t2.finalPileup) {
                final SAMRecord read = p.getRead();
                final int offset = p.getOffset();

                if (read.getReadString().charAt(offset) == altAllele) {
                    mutantPileupElements.add(p);
                } else if (read.getReadString().charAt(offset) == upRef) {
                    referencePileupElements.add(p);
                } else {
                    // just drop the read...
                }
            }

            ReadBackedPileup mutantPileup = new ReadBackedPileupImpl(rawContext.getLocation(),
                    mutantPileupElements);

            ReadBackedPileup referencePileup = new ReadBackedPileupImpl(rawContext.getLocation(),
                    referencePileupElements);

            // TODO: shouldn't this be refAllele here?
            final LocusReadPile mutantPile = new LocusReadPile(mutantPileup, altAllele, 0, 0,
                    MTAC.ENABLE_QSCORE_OUTPUT);
            final LocusReadPile refPile = new LocusReadPile(referencePileup, altAllele, 0, 0,
                    MTAC.ENABLE_QSCORE_OUTPUT);

            // Set the maximum observed mapping quality score for the reference and alternate alleles
            int[] rmq = referencePileup.getMappingQuals();
            candidate.setTumorRefMaxMapQ((rmq.length == 0) ? 0 : NumberUtils.max(rmq));

            int[] amq = mutantPileup.getMappingQuals();
            candidate.setTumorAltMaxMapQ((amq.length == 0) ? 0 : NumberUtils.max(amq));

            // start with just the tumor pile
            candidate.setTumorAltForwardOffsetsInRead(SequenceUtils.getForwardOffsetsInRead(mutantPileup));
            candidate.setTumorAltReverseOffsetsInRead(SequenceUtils.getReverseOffsetsInRead(mutantPileup));

            if (candidate.getTumorAltForwardOffsetsInRead().size() > 0) {
                double[] offsets = MuTectStats
                        .convertIntegersToDoubles(candidate.getTumorAltForwardOffsetsInRead());
                double median = MuTectStats.getMedian(offsets);
                candidate.setTumorForwardOffsetsInReadMedian(median);
                candidate.setTumorForwardOffsetsInReadMad(MuTectStats.calculateMAD(offsets, median));
            }

            if (candidate.getTumorAltReverseOffsetsInRead().size() > 0) {
                double[] offsets = MuTectStats
                        .convertIntegersToDoubles(candidate.getTumorAltReverseOffsetsInRead());
                double median = MuTectStats.getMedian(offsets);
                candidate.setTumorReverseOffsetsInReadMedian(median);
                candidate.setTumorReverseOffsetsInReadMad(MuTectStats.calculateMAD(offsets, median));
            }

            // test to see if the candidate should be rejected
            performRejection(candidate);

            if (MTAC.FORCE_ALLELES) {
                out.println(callStatsGenerator.generateCallStats(candidate));
            } else {
                messageByTumorLod.put(candidate.getInitialTumorLod(), candidate);
            }
        }

        // if more than one site passes the tumor lod threshold for KEEP the fail the tri_allelic Site filter
        int passingCandidates = 0;
        for (CandidateMutation c : messageByTumorLod.values()) {
            if (c.getTumorLodFStar() >= MTAC.TUMOR_LOD_THRESHOLD) {
                passingCandidates++;
            }
        }

        if (passingCandidates > 1) {
            for (CandidateMutation c : messageByTumorLod.values()) {
                c.addRejectionReason("triallelic_site");
            }
        }

        // write out the call stats for the "best" candidate
        if (!messageByTumorLod.isEmpty()) {
            CandidateMutation m = messageByTumorLod.lastEntry().getValue();

            // only output passing calls OR rejected sites if ONLY_PASSING_CALLS is not specified
            if (!m.isRejected() || (m.isRejected() && !MTAC.ONLY_PASSING_CALLS)) {

                out.println(callStatsGenerator.generateCallStats(m));
                if (vcf != null) {
                    vcf.add(VCFGenerator.generateVC(m));
                }
            }
        }

        return -1;
    } catch (Throwable t) {
        System.err.println("Error processing " + rawContext.getContig() + ":" + rawContext.getPosition());
        t.printStackTrace(System.err);

        throw new RuntimeException(t);
    }
}

From source file:org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsDomainBinder.java

/**
 * Interrogates the specified constraints looking for any constraints that would limit the
 * precision and/or scale of the property's value.  If such constraints exist, this method adjusts
 * the precision and/or scale of the column accordingly.
 *
 * @param column              the column that corresponds to the property
 * @param constrainedProperty the property's constraints
 *///from  w w w  .j a  v  a 2 s. c om
protected static void bindNumericColumnConstraints(Column column, ConstrainedProperty constrainedProperty) {
    int scale = Column.DEFAULT_SCALE;
    int precision = Column.DEFAULT_PRECISION;

    if (constrainedProperty.getScale() != null) {
        scale = constrainedProperty.getScale().intValue();
        column.setScale(scale);
    }

    Comparable<?> minConstraintValue = constrainedProperty.getMin();
    Comparable<?> maxConstraintValue = constrainedProperty.getMax();

    int minConstraintValueLength = 0;
    if ((minConstraintValue != null) && (minConstraintValue instanceof Number)) {
        minConstraintValueLength = Math.max(countDigits((Number) minConstraintValue),
                countDigits(((Number) minConstraintValue).longValue()) + scale);
    }
    int maxConstraintValueLength = 0;
    if ((maxConstraintValue != null) && (maxConstraintValue instanceof Number)) {
        maxConstraintValueLength = Math.max(countDigits((Number) maxConstraintValue),
                countDigits(((Number) maxConstraintValue).longValue()) + scale);
    }

    if (minConstraintValueLength > 0 && maxConstraintValueLength > 0) {
        // If both of min and max constraints are setted we could use
        // maximum digits number in it as precision
        precision = NumberUtils.max(new int[] { minConstraintValueLength, maxConstraintValueLength });
    } else {
        // Overwise we should also use default precision
        precision = NumberUtils
                .max(new int[] { precision, minConstraintValueLength, maxConstraintValueLength });
    }

    column.setPrecision(precision);
}

From source file:org.epochx.stats.StatsUtilsTest.java

/**
 * Tests that the value returned from maxIndex is the index for the largest
 * value.//from   ww w .j a v a2s. co  m
 */
public void testMaxIndexDouble() {
    final double[] values = { 0.1, 0.2, 0.3, 0.4 };

    assertEquals("maximum index not for the maximum value", NumberUtils.max(values),
            values[StatsUtils.maxIndex(values)]);
}

From source file:org.epochx.stats.StatsUtilsTest.java

/**
 * Tests that the value returned from maxIndex is the index for the largest
 * value.//from   w ww.  j a  va2 s.co m
 */
public void testMaxIndexInt() {
    final int[] values = { 1, 2, 3, 4 };

    assertEquals("maximum index not for the maximum value", NumberUtils.max(values),
            values[StatsUtils.maxIndex(values)]);
}

From source file:org.kuali.student.enrollment.class2.acal.service.impl.AcademicCalendarViewHelperServiceImpl.java

/**
 * Validates the term at the given index
 *
 * @param termWrapper list of terms in an academic calendar
 * @param beforeSortingIndex index of the term before sorting for terms happens.
 * @param afterSortingIndex index of the term after sorting for terms happens.
 * @param acal ACal dto needed to compare the start and end date
 *///  w  w  w.j a  v  a2s.  c om
public void validateTerm(List<AcademicTermWrapper> termWrapper, int beforeSortingIndex, int afterSortingIndex,
        AcademicCalendarInfo acal) {

    AcademicTermWrapper termWrapperToValidate = termWrapper.get(beforeSortingIndex);
    String termSectionName = "term_section_line" + afterSortingIndex;
    String keyDateGroupSectionName = "acal-term-keydatesgroup_line" + afterSortingIndex;

    int index2 = 0;
    //Validate duplicate term name
    for (AcademicTermWrapper wrapper : termWrapper) {
        index2++;
        if (wrapper != termWrapperToValidate) {
            if (StringUtils.equalsIgnoreCase(wrapper.getName(), termWrapperToValidate.getName())) {
                GlobalVariables.getMessageMap().putErrorForSectionId(termSectionName,
                        CalendarConstants.MessageKeys.ERROR_DUPLICATE_TERM_NAME,
                        "" + NumberUtils.min(new int[] { afterSortingIndex, index2 }),
                        "" + NumberUtils.max(new int[] { afterSortingIndex, index2 }));
            }
        }
    }

    if (!AcalCommonUtils.isValidDateRange(termWrapperToValidate.getStartDate(),
            termWrapperToValidate.getEndDate())) {
        GlobalVariables.getMessageMap().putErrorForSectionId(termSectionName,
                CalendarConstants.MessageKeys.ERROR_INVALID_DATE_RANGE, termWrapperToValidate.getName(),
                AcalCommonUtils.formatDate(termWrapperToValidate.getStartDate()),
                AcalCommonUtils.formatDate(termWrapperToValidate.getEndDate()));
    }

    if (!AcalCommonUtils.isDateWithinRange(acal.getStartDate(), acal.getEndDate(),
            termWrapperToValidate.getStartDate())
            || !AcalCommonUtils.isDateWithinRange(acal.getStartDate(), acal.getEndDate(),
                    termWrapperToValidate.getEndDate())) {
        GlobalVariables.getMessageMap().putWarningForSectionId(termSectionName,
                CalendarConstants.MessageKeys.ERROR_TERM_NOT_IN_ACAL_RANGE, termWrapperToValidate.getName());
    }
    if (termWrapperToValidate.isSubTerm()) {
        if (termWrapperToValidate.getParentTermInfo() != null) {
            if (!AcalCommonUtils.isDateWithinRange(termWrapperToValidate.getParentTermInfo().getStartDate(),
                    termWrapperToValidate.getParentTermInfo().getEndDate(),
                    termWrapperToValidate.getStartDate())
                    || !AcalCommonUtils.isDateWithinRange(
                            termWrapperToValidate.getParentTermInfo().getStartDate(),
                            termWrapperToValidate.getParentTermInfo().getEndDate(),
                            termWrapperToValidate.getEndDate())) {
                GlobalVariables.getMessageMap().putWarningForSectionId(termSectionName,
                        CalendarConstants.MessageKeys.ERROR_TERM_NOT_IN_TERM_RANGE,
                        termWrapperToValidate.getName(), termWrapperToValidate.getParentTermInfo().getName());
            }
        } else {
            // Find term manually if calendar hasn't already been saved.
            AcademicTermWrapper parentTerm = null;
            for (AcademicTermWrapper term : termWrapper) {
                String termType = term.getTermType();
                if (StringUtils.isBlank(termType)) {
                    termType = term.getTermInfo().getTypeKey();
                }
                if (termWrapperToValidate.getParentTerm().equals(termType)) {
                    parentTerm = term;
                    break;
                }
            }

            if (!AcalCommonUtils.isDateWithinRange(parentTerm.getStartDate(), parentTerm.getEndDate(),
                    termWrapperToValidate.getStartDate())
                    || !AcalCommonUtils.isDateWithinRange(parentTerm.getStartDate(), parentTerm.getEndDate(),
                            termWrapperToValidate.getEndDate())) {
                GlobalVariables.getMessageMap().putWarningForSectionId(termSectionName,
                        CalendarConstants.MessageKeys.ERROR_TERM_NOT_IN_TERM_RANGE,
                        termWrapperToValidate.getName(), parentTerm.getName());
            }
        }
    }

    for (KeyDatesGroupWrapper keyDatesGroupWrapper : termWrapperToValidate.getKeyDatesGroupWrappers()) {
        for (KeyDateWrapper keyDateWrapper : keyDatesGroupWrapper.getKeydates()) {
            // Start and End Dates of the key date entry should be within the start and end dates of the term.
            if (!AcalCommonUtils.isDateWithinRange(termWrapperToValidate.getStartDate(),
                    termWrapperToValidate.getEndDate(), keyDateWrapper.getStartDate())
                    || !AcalCommonUtils.isDateWithinRange(termWrapperToValidate.getStartDate(),
                            termWrapperToValidate.getEndDate(), keyDateWrapper.getEndDate())) {
                String keyDatePath = "termWrapperList[" + beforeSortingIndex + "].keyDatesGroupWrappers["
                        + termWrapperToValidate.getKeyDatesGroupWrappers().indexOf(keyDatesGroupWrapper)
                        + "].keydates[" + keyDatesGroupWrapper.getKeydates().indexOf(keyDateWrapper) + "]";
                GlobalVariables.getMessageMap().putWarning(keyDatePath + ".startDate",
                        CalendarConstants.MessageKeys.ERROR_INVALID_DATERANGE_KEYDATE,
                        keyDateWrapper.getKeyDateNameUI(), termWrapperToValidate.getName());
            }
        }
    }

    //Validate exam dates
    validateExamPeriod(termWrapperToValidate, beforeSortingIndex, afterSortingIndex);
}