Example usage for java.util.concurrent ForkJoinPool getParallelism

List of usage examples for java.util.concurrent ForkJoinPool getParallelism

Introduction

In this page you can find the example usage for java.util.concurrent ForkJoinPool getParallelism.

Prototype

public int getParallelism() 

Source Link

Document

Returns the targeted parallelism level of this pool.

Usage

From source file:MSUmpire.SpectrumParser.mzXMLParser.java

private List<MzXMLthreadUnit> ParseScans(final BitSet IncludedScans) {
    List<MzXMLthreadUnit> ScanList = new ArrayList<>();
    ArrayList<ForkJoinTask<?>> futures = new ArrayList<>();
    final ForkJoinPool fjp = new ForkJoinPool(NoCPUs);
    Iterator<Entry<Integer, Long>> iter = ScanIndex.entrySet().iterator();
    Entry<Integer, Long> ent = iter.next();
    long currentIdx = ent.getValue();
    int nextScanNo = ent.getKey();
    final RandomAccessFile fileHandler;
    try {// w  ww .java2  s . com
        fileHandler = new RandomAccessFile(filename, "r");
    } catch (FileNotFoundException e) {
        throw new RuntimeException(e);
    }
    byte[] buffer = new byte[1 << 10];
    if (step == -1)
        step = fjp.getParallelism() * 32;
    while (iter.hasNext()) {
        ent = iter.next();
        long startposition = currentIdx;
        long nexposition = ent.getValue();
        int currentScanNo = nextScanNo;
        nextScanNo = ent.getKey();
        currentIdx = nexposition;

        if (IncludedScans.get(currentScanNo)) {
            try {
                final int bufsize = (int) (nexposition - startposition);
                if (buffer.length < bufsize)
                    buffer = new byte[Math.max(bufsize, buffer.length << 1)];
                //                    byte[] buffer = new byte[bufsize];
                //                    RandomAccessFile fileHandler = new RandomAccessFile(filename, "r");
                fileHandler.seek(startposition);
                fileHandler.read(buffer, 0, bufsize);
                //                    fileHandler.close();
                //                    String xmltext = new String(buffer);
                String xmltext = new String(buffer, 0, bufsize, StandardCharsets.ISO_8859_1);
                if (ent.getKey() == Integer.MAX_VALUE) {
                    xmltext = xmltext.replaceAll("</msRun>", "");
                }
                boolean ReadPeak = true;
                final MzXMLthreadUnit unit = new MzXMLthreadUnit(xmltext, parameter, datatype, ReadPeak);
                futures.add(fjp.submit(unit));
                ScanList.add(unit);

                if ((ScanList.size() % step) == 0) {
                    futures.get(futures.size() - step).get();
                    if (iter.hasNext() && fjp.getActiveThreadCount() < fjp.getParallelism()) {
                        step *= 2;
                        //                            System.out.println("MzXMLthreadUnit: fjp.getActiveThreadCount()\t" + fjp.getActiveThreadCount()+"\t"+step);
                    }
                }
            } catch (Exception ex) {
                Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex));
            }
        }
    }
    try {
        fileHandler.close();
    } catch (IOException ex) {
        throw new RuntimeException(ex);
    }
    fjp.shutdown();
    try {
        fjp.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException ex) {
        throw new RuntimeException(ex);
    }
    //        for (MzXMLthreadUnit unit : ScanList) {
    //            executorPool.execute(unit);
    //        }
    //        executorPool.shutdown();
    //
    //        try {
    //            executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    //        } catch (InterruptedException e) {
    //            Logger.getRootLogger().info("interrupted..");
    //        }
    return ScanList;
}

From source file:MSUmpire.PeptidePeakClusterDetection.PDHandlerBase.java

protected void FindAllMzTracePeakCurves(ScanCollection scanCollection) throws IOException {
    //        final HashSet<String> IncludedHashMap = new HashSet<>();

    //        Logger.getRootLogger().info("Processing all scans to detect possible m/z peak curves....");
    Logger.getRootLogger().info("Processing all scans to detect possible m/z peak curves and");
    Logger.getRootLogger().info("Smoothing detected signals......");
    float preRT = 0f;

    //Loop for each scan in the ScanCollection
    final ArrayList<ForkJoinTask<ArrayList<PeakCurve>>> ftemp = new ArrayList<>();
    final ForkJoinPool fjp = new ForkJoinPool(NoCPUs);
    final int idx_end = scanCollection.GetScanNoArray(MSlevel).size();

    final int[] ia = new int[idx_end + 1];
    ia[0] = 0;//from  w w  w. j  a v  a  2  s .c  o  m
    for (int idx = 0; idx < idx_end; idx++) {
        final int scanNO = scanCollection.GetScanNoArray(MSlevel).get(idx);
        final ScanData sd = scanCollection.GetScan(scanNO);
        ia[idx + 1] = sd.Data.size() + ia[idx];
    }

    final boolean[] included = new boolean[ia[ia.length - 1]];
    if (step == -1)
        step = fjp.getParallelism() * 32;
    long peakCurvesCount = 0;
    for (int idx = 0; idx < idx_end; idx++) {
        int scanNO = scanCollection.GetScanNoArray(MSlevel).get(idx);
        ScanData scanData = scanCollection.GetScan(scanNO);

        //If we are doing targeted peak detection and the RT of current scan is not in the range of targeted list, jump to the next scan 
        if (TargetedOnly && !FoundInInclusionRTList(scanData.RetentionTime)) {
            continue;
        }
        if (idx == 0) {
            preRT = scanData.RetentionTime - 0.01f;
        }
        for (int i = 0; i < scanData.PointCount(); i++) {
            XYData peak = scanData.Data.get(i);
            //If we are doing targeted peak detection and the RT and m/z of current peak is not in the range of targeted list, jump to the next peak 
            if (TargetedOnly && !FoundInInclusionMZList(scanData.RetentionTime, peak.getX())) {
                continue;
            }

            if (peak.getX() < parameter.MinMZ) {
                continue;
            }

            //Check if the current peak has been included in previously developed peak curves
            //                if (!IncludedHashMap.contains(scanNO + "_" + peak.getX())) {//The peak hasn't been included
            final int id_scanNO_peak = int_id(ia, idx, i);
            if (!included[id_scanNO_peak]) {//The peak hasn't been included
                //The current peak will be the starting peak of a new peak curve
                //Add it to the hash table

                //                    IncludedHashMap.add(scanNO + "_" + peak.getX());
                included[id_scanNO_peak] = true;

                float startmz = peak.getX();
                float startint = peak.getY();

                //Find the maximum peak within PPM window as the starting peak
                for (int j = i + 1; j < scanData.PointCount(); j++) {
                    XYData currentpeak = scanData.Data.get(j);
                    final int id_scanNO_currentpeak = int_id(ia, idx, j);
                    if (!included[id_scanNO_currentpeak]) {
                        //                        if (!IncludedHashMap.contains(scanNO + "_" + currentpeak.getX())) {
                        if (InstrumentParameter.CalcPPM(currentpeak.getX(), startmz) <= PPM) {
                            included[id_scanNO_currentpeak] = true;
                            //                                IncludedHashMap.add(scanNO + "_" + currentpeak.getX());

                            if (currentpeak.getY() >= startint) {
                                startmz = currentpeak.getX();
                                startint = currentpeak.getY();
                            }
                        } else {
                            break;
                        }
                    }
                }

                //Initialize a new peak curve
                PeakCurve Peakcurve = new PeakCurve(parameter);
                //Add a background peak
                Peakcurve.AddPeak(preRT, startmz, scanData.background);
                //Add the starting peak
                Peakcurve.AddPeak(scanData.RetentionTime, startmz, startint);
                Peakcurve.StartScan = scanNO;

                int missedScan = 0;
                float endrt = scanData.RetentionTime;
                int endScan = scanData.ScanNum;
                float bk = 0f;

                //Starting from the next scan, find the following peaks given the starting peak
                for (int idx2 = idx + 1; idx2 < scanCollection.GetScanNoArray(MSlevel).size()
                        && (missedScan < parameter.NoMissedScan /*|| (TargetedOnly && Peakcurve.RTWidth()<parameter.MaxCurveRTRange)*/); idx2++) {
                    int scanNO2 = scanCollection.GetScanNoArray(MSlevel).get(idx2);
                    ScanData scanData2 = scanCollection.GetScan(scanNO2);

                    endrt = scanData2.RetentionTime;
                    endScan = scanData2.ScanNum;
                    bk = scanData2.background;
                    float currentmz = 0f;
                    float currentint = 0f;

                    //If the scan is empty
                    if (scanData2.PointCount() == 0) {
                        if (parameter.FillGapByBK) {
                            Peakcurve.AddPeak(scanData2.RetentionTime, Peakcurve.TargetMz,
                                    scanData2.background);
                        }
                        missedScan++;
                        continue;
                    }

                    //Find the m/z index 
                    int mzidx = scanData2.GetLowerIndexOfX(Peakcurve.TargetMz);
                    for (int pkidx = mzidx; pkidx < scanData2.Data.size(); pkidx++) {
                        XYData currentpeak = scanData2.Data.get(pkidx);
                        if (currentpeak.getX() < parameter.MinMZ) {
                            continue;
                        }
                        //Check if the peak has been included or not
                        final int int_id_scanNO2_currentpeak = int_id(ia, idx2, pkidx);
                        //                            if (!included.get(int_id_scanNO2_currentpeak)) {
                        if (!included[int_id_scanNO2_currentpeak]) {
                            if (InstrumentParameter.CalcPPM(currentpeak.getX(), Peakcurve.TargetMz) > PPM) {
                                if (currentpeak.getX() > Peakcurve.TargetMz) {
                                    break;
                                }
                            } else {
                                //////////The peak is in the ppm window, select the highest peak
                                included[int_id_scanNO2_currentpeak] = true;
                                //                                    IncludedHashMap.add(scanNO2 + "_" + currentpeak.getX());
                                if (currentint < currentpeak.getY()) {
                                    currentmz = currentpeak.getX();
                                    currentint = currentpeak.getY();
                                }
                            }
                        }
                    }

                    //No peak in the PPM window has been found
                    if (currentmz == 0f) {
                        if (parameter.FillGapByBK) {
                            Peakcurve.AddPeak(scanData2.RetentionTime, Peakcurve.TargetMz,
                                    scanData2.background);
                        }
                        missedScan++;
                    } else {
                        missedScan = 0;
                        Peakcurve.AddPeak(scanData2.RetentionTime, currentmz, currentint);
                    }
                }
                Peakcurve.AddPeak(endrt, Peakcurve.TargetMz, bk);
                Peakcurve.EndScan = endScan;

                //First check if the peak curve is in targeted list
                if (FoundInInclusionList(Peakcurve.TargetMz, Peakcurve.StartRT(), Peakcurve.EndRT())) {
                    //                        LCMSPeakBase.UnSortedPeakCurves.add(Peakcurve);
                    ++peakCurvesCount;
                    ftemp.add(fjp.submit(new PeakCurveSmoothingUnit(Peakcurve, parameter)));
                    //Then check if the peak curve passes the criteria
                } else if (Peakcurve.GetRawSNR() > LCMSPeakBase.SNR
                        && Peakcurve.GetPeakList().size() >= parameter.MinPeakPerPeakCurve + 2) {
                    //                        LCMSPeakBase.UnSortedPeakCurves.add(Peakcurve);
                    ++peakCurvesCount;
                    ftemp.add(fjp.submit(new PeakCurveSmoothingUnit(Peakcurve, parameter)));
                } else {
                    Peakcurve = null;
                }
            }
        }
        preRT = scanData.RetentionTime;
        if (ReleaseScans) {
            scanData.dispose();
        }
        /** the if statement below does PeakCurveSmoothing() and ClearRawPeaks()
         */
        final boolean last_iter = idx + 1 == idx_end;
        if (ftemp.size() == step || last_iter) {
            final List<ForkJoinTask<ArrayList<PeakCurve>>> ftemp_sublist_view = last_iter ? ftemp
                    : ftemp.subList(0, step / 2);
            for (final Future<ArrayList<PeakCurve>> f : ftemp_sublist_view) {
                try {
                    LCMSPeakBase.UnSortedPeakCurves.addAll(f.get());
                } catch (InterruptedException | ExecutionException e) {
                    throw new RuntimeException(e);
                }
            }
            ftemp_sublist_view.clear();
            if (!last_iter && fjp.getActiveThreadCount() < fjp.getParallelism()) {
                //                    System.out.println("PeakCurveSmoothingUnit: fjp.getActiveThreadCount()\t"+fjp.getActiveThreadCount()+"\t"+step);
                step *= 2;
            }
        }
    }
    assert ftemp.isEmpty();
    //System.out.print("PSM removed (PeakCurve generation):" + PSMRemoved );

    int i = 1;
    //Assign peak curve index
    for (PeakCurve peakCurve : LCMSPeakBase.UnSortedPeakCurves) {
        peakCurve.Index = i++;
    }

    System.gc();
    //        Logger.getRootLogger().info(LCMSPeakBase.UnSortedPeakCurves.size() + " Peak curves found (Memory usage:" + Math.round((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576) + "MB)");
    Logger.getRootLogger()
            .info(peakCurvesCount + " Peak curves found (Memory usage:"
                    + Math.round(
                            (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576)
                    + "MB)");
}

From source file:MSUmpire.PeptidePeakClusterDetection.PDHandlerBase.java

protected void PeakCurveCorrClustering(XYData mzRange) throws IOException {
    Logger.getRootLogger().info("Grouping isotopic peak curves........");

    LCMSPeakBase.PeakClusters = new ArrayList<>();

    //Thread pool
    final ForkJoinPool fjp = new ForkJoinPool(NoCPUs);
    //        ArrayList<PeakCurveClusteringCorrKDtree> ResultList = new ArrayList<>();
    final ArrayList<ForkJoinTask<ArrayList<PeakCluster>>> ftemp = new ArrayList<>();
    final int end_idx = LCMSPeakBase.UnSortedPeakCurves.size();
    final ArrayList<PeakCluster> resultClusters = new ArrayList<>();
    //For each peak curve
    //        for (PeakCurve Peakcurve : LCMSPeakBase.UnSortedPeakCurves) {
    for (int i = 0; i < end_idx; ++i) {
        final PeakCurve Peakcurve = LCMSPeakBase.UnSortedPeakCurves.get(i);
        if (Peakcurve.TargetMz >= mzRange.getX() && Peakcurve.TargetMz <= mzRange.getY()) {
            //Create a thread unit for doing isotope clustering given a peak curve as the monoisotope peak
            PeakCurveClusteringCorrKDtree unit = new PeakCurveClusteringCorrKDtree(Peakcurve,
                    LCMSPeakBase.GetPeakCurveSearchTree(), parameter, IsotopePatternMap,
                    LCMSPeakBase.StartCharge, LCMSPeakBase.EndCharge, LCMSPeakBase.MaxNoPeakCluster,
                    LCMSPeakBase.MinNoPeakCluster);
            //                ResultList.add(unit);
            ftemp.add(fjp.submit(unit));
        }//from w w  w . ja  v  a 2 s .  c o  m
        if (step_pccc == -1)
            step_pccc = fjp.getParallelism() * 32;
        final boolean last_iter = i + 1 == end_idx;
        if (ftemp.size() == step_pccc || last_iter) {
            final List<ForkJoinTask<ArrayList<PeakCluster>>> ftemp_sublist_view = last_iter ? ftemp
                    : ftemp.subList(0, step_pccc / 2);
            for (final ForkJoinTask<ArrayList<PeakCluster>> fut : ftemp_sublist_view)
                try {
                    resultClusters.addAll(fut.get());
                } catch (InterruptedException | ExecutionException ex) {
                    throw new RuntimeException(ex);
                }
            ftemp_sublist_view.clear();
            if (!last_iter && fjp.getActiveThreadCount() < fjp.getParallelism()) {
                //                    System.out.println("PeakCurveSmoothingUnit: fjp.getActiveThreadCount()\t"+fjp.getActiveThreadCount()+"\t"+step_pccc);
                step_pccc *= 2;
            }
        }
    }

    assert ftemp.isEmpty() : "temp storage for futures should be empty by end of loop";
    fjp.shutdown();

    try {
        fjp.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException e) {
        Logger.getRootLogger().info("interrupted..");
    }

    for (final PeakCluster peakCluster : resultClusters) {
        //Check if the monoistope peak of cluster has been grouped in other isotope cluster, if yes, remove the peak cluster
        if (!parameter.RemoveGroupedPeaks ||
        //                    !peakCluster.MonoIsotopePeak.ChargeGrouped.contains(peakCluster.Charge)
                !IonChargeHashSet.contains(peakCluster.MonoIsotopePeak.ChargeGrouped, peakCluster.Charge)) {
            peakCluster.Index = LCMSPeakBase.PeakClusters.size() + 1;
            peakCluster.GetConflictCorr();
            LCMSPeakBase.PeakClusters.add(peakCluster);
        }
    }

    System.gc();
    Logger.getRootLogger()
            .info("No of ion clusters:" + LCMSPeakBase.PeakClusters.size() + " (Memory usage:"
                    + Math.round(
                            (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576)
                    + "MB)");
}