Example usage for java.util.concurrent ForkJoinPool awaitTermination

List of usage examples for java.util.concurrent ForkJoinPool awaitTermination

Introduction

In this page you can find the example usage for java.util.concurrent ForkJoinPool awaitTermination.

Prototype

public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException 

Source Link

Document

Blocks until all tasks have completed execution after a shutdown request, or the timeout occurs, or the current thread is interrupted, whichever happens first.

Usage

From source file:com.hygenics.parser.QualityAssurer.java

private void sendToDb(ArrayList<String> json, boolean split) {
    if (json.size() > 0)
        log.info("Records to Add: " + json.size());

    if (split) {//www  .  j av  a  2  s . co  m

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procnum * qnum))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnum);
        for (int conn = 0; conn < qnum; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {
                l.addAll(json.subList((conn * size), (json.size() - 1)));
                f2.execute(new SplitPost(template, l));

                break;
            }

            f2.execute(new SplitPost(template, l));
        }

        try {
            f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e1) {
            e1.printStackTrace();
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;

    } else {
        for (String j : json) {

            boolean valid = false;

            try {
                Json.read(j);
                valid = true;
            } catch (Exception e) {
                log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                System.out.println(j);
            }

            try {

                this.template.postSingleJson(j);
            } catch (Exception e) {
                log.info("Failed to Post");
                log.error(j);
                e.printStackTrace();
            }
        }
    }

}

From source file:com.hygenics.parser.Mapper.java

private void sendToDb(List<String> json, boolean split) {
    if (json.size() > 0) {
        log.info("Records to Add: " + json.size());

        if (split) {

            ForkJoinPool f2 = new ForkJoinPool(Runtime.getRuntime().availableProcessors() * qnum);
            ArrayList<String> l;
            int size = (int) Math.ceil(json.size() / qnum);
            for (int conn = 0; conn < qnum; conn++) {
                l = new ArrayList<String>();
                if (((conn + 1) * size) < json.size()) {
                    l.addAll(json.subList((conn * size), ((conn + 1) * size)));

                } else {
                    l.addAll(json.subList((conn * size), (json.size() - 1)));
                    f2.execute(new SplitPost(template, l));

                    break;
                }/* w w  w . ja  va 2 s.  c  om*/

                f2.execute(new SplitPost(template, l));
            }

            try {
                f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
            } catch (InterruptedException e1) {
                e1.printStackTrace();
            }

            f2.shutdown();

            int incrementor = 0;

            while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
                incrementor++;
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
                log.info("Shutting Down" + incrementor);
            }

            l = null;
            f2 = null;

        } else {
            for (String j : json) {

                boolean valid = false;

                try {
                    Json.read(j);
                    valid = true;
                } catch (Exception e) {
                    log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                    System.out.println(j);
                }

                try {

                    this.template.postSingleJson(j);
                } catch (Exception e) {
                    log.info("Failed to Post");
                    log.error(j);
                    e.printStackTrace();
                }
            }
        }
    }
}

From source file:com.hygenics.parser.BreakMultiple.java

/**
 * Post to db/*from   w w w  .j a  v  a2  s  .c o  m*/
 * 
 * @param json
 * @param split
 */
public void postToDb(ArrayList<String> json, boolean split) {
    log.info("Posting " + json.size() + " Records");

    if (split) {

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procnum * sqlnum))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnum);
        for (int conn = 0; conn < qnum; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {
                l.addAll(json.subList((conn * size), (json.size() - 1)));
                f2.execute(new SplitPost(template, l));

                break;
            }

            f2.execute(new SplitPost(template, l));
        }

        try {
            f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;
    } else {
        log.info("Singlethread");

        this.template.postJsonDatawithTable(json);
    }

}

From source file:edu.usu.sdl.openstorefront.report.ExternalLinkValidationReport.java

private void checkLinks() {
    int timeOutTime = MAX_CONNECTION_TIME_MILLIS;
    if (report.getReportOption() != null) {
        if (report.getReportOption().getMaxWaitSeconds() != null) {
            timeOutTime = report.getReportOption().getMaxWaitSeconds() * 1000;
        }/*from  www  . j  a v a  2s .com*/
    }

    ForkJoinPool forkJoinPool = new ForkJoinPool(MAX_CHECKPOOL_SIZE);

    Map<String, LinkCheckModel> linkMap = new HashMap();
    List<ForkJoinTask<LinkCheckModel>> tasks = new ArrayList<>();
    for (LinkCheckModel link : links) {
        linkMap.put(link.getId(), link);
        tasks.add(forkJoinPool.submit(new CheckLinkTask(link, timeOutTime)));
    }

    int completedCount = 0;
    for (ForkJoinTask<LinkCheckModel> task : tasks) {
        try {
            LinkCheckModel processed;
            try {
                processed = task.get(timeOutTime, TimeUnit.MILLISECONDS);
                if (processed != null) {
                    LinkCheckModel reportModel = linkMap.get(processed.getId());
                    reportModel.setStatus(processed.getStatus());
                    reportModel.setCheckResults(processed.getCheckResults());
                    reportModel.setHttpStatus(processed.getHttpStatus());
                } else {
                    //This shouldn't occur, however if it does at least show a message.
                    log.log(Level.WARNING, MessageFormat.format(
                            "A link check task failed to return results.  Status at Completed Abnormally? {0}",
                            task.isCompletedAbnormally()));
                }
            } catch (TimeoutException e) {
                task.cancel(true);
            }

            completedCount++;
        } catch (InterruptedException | ExecutionException ex) {
            log.log(Level.WARNING, "Check task  was interrupted.  Report results may be not complete.", ex);
        }
        log.log(Level.FINE, MessageFormat.format("Complete Checking Link Count: {0} out of {1}",
                new Object[] { completedCount, links.size() }));
    }

    for (LinkCheckModel checkModel : links) {
        if (StringUtils.isBlank(checkModel.getStatus())) {
            checkModel.setStatus("Unable to verify.  Timed out while waiting.");
        }
    }

    forkJoinPool.shutdownNow();
    try {
        forkJoinPool.awaitTermination(1000, TimeUnit.MILLISECONDS);
    } catch (InterruptedException ex) {
        log.log(Level.WARNING,
                "Check task shutdown was interrupted.  The application will recover and continue.", ex);
    }
}

From source file:com.hygenics.parser.ParseDispatcher.java

private void spl(ArrayList<String> json, boolean split) {
    if (json.size() > 0)
        log.info("Records to Add: " + json.size());

    if (split) {/*from   w  w w. jav  a  2s.co m*/

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procnum * sqlnum))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnum);
        for (int conn = 0; conn < qnum; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {
                l.addAll(json.subList((conn * size), (json.size() - 1)));
                f2.execute(new SplitPost(template, l));

                break;
            }

            f2.execute(new SplitPost(template, l));
        }

        try {
            f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;

    } else {
        for (String j : json) {

            boolean valid = false;

            try {
                Json.read(j);
                valid = true;
            } catch (Exception e) {
                log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                System.out.println(j);
            }

            try {

                this.template.postSingleJson(j);
            } catch (Exception e) {
                log.info("Failed to Post");
                log.error(j);
                e.printStackTrace();
            }
        }
    }

}

From source file:com.hygenics.parser.ParseDispatcher.java

private void sendToDb(ArrayList<String> json, boolean split) {
    if (json.size() > 0)
        log.info("Records to Add: " + json.size());

    if (split) {/*from  ww w.j  a  v  a2s. c  om*/

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procnum * sqlnum))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnum);
        for (int conn = 0; conn < qnum; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {
                l.addAll(json.subList((conn * size), (json.size() - 1)));
                f2.execute(new SplitPost(template, l));

                break;
            }

            f2.execute(new SplitPost(template, l));
        }

        try {
            f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;

    } else {
        for (String j : json) {

            boolean valid = false;

            try {
                Json.read(j);
                valid = true;
            } catch (Exception e) {
                log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                System.out.println(j);
            }

            try {

                this.template.postSingleJson(j);
            } catch (Exception e) {
                log.info("Failed to Post");
                log.error(j);
                e.printStackTrace();
            }
        }
    }

}

From source file:MSUmpire.PeptidePeakClusterDetection.PDHandlerBase.java

protected void PeakCurveCorrClustering(XYData mzRange) throws IOException {
    Logger.getRootLogger().info("Grouping isotopic peak curves........");

    LCMSPeakBase.PeakClusters = new ArrayList<>();

    //Thread pool
    final ForkJoinPool fjp = new ForkJoinPool(NoCPUs);
    //        ArrayList<PeakCurveClusteringCorrKDtree> ResultList = new ArrayList<>();
    final ArrayList<ForkJoinTask<ArrayList<PeakCluster>>> ftemp = new ArrayList<>();
    final int end_idx = LCMSPeakBase.UnSortedPeakCurves.size();
    final ArrayList<PeakCluster> resultClusters = new ArrayList<>();
    //For each peak curve
    //        for (PeakCurve Peakcurve : LCMSPeakBase.UnSortedPeakCurves) {
    for (int i = 0; i < end_idx; ++i) {
        final PeakCurve Peakcurve = LCMSPeakBase.UnSortedPeakCurves.get(i);
        if (Peakcurve.TargetMz >= mzRange.getX() && Peakcurve.TargetMz <= mzRange.getY()) {
            //Create a thread unit for doing isotope clustering given a peak curve as the monoisotope peak
            PeakCurveClusteringCorrKDtree unit = new PeakCurveClusteringCorrKDtree(Peakcurve,
                    LCMSPeakBase.GetPeakCurveSearchTree(), parameter, IsotopePatternMap,
                    LCMSPeakBase.StartCharge, LCMSPeakBase.EndCharge, LCMSPeakBase.MaxNoPeakCluster,
                    LCMSPeakBase.MinNoPeakCluster);
            //                ResultList.add(unit);
            ftemp.add(fjp.submit(unit));
        }/* www . jav a  2s. c o m*/
        if (step_pccc == -1)
            step_pccc = fjp.getParallelism() * 32;
        final boolean last_iter = i + 1 == end_idx;
        if (ftemp.size() == step_pccc || last_iter) {
            final List<ForkJoinTask<ArrayList<PeakCluster>>> ftemp_sublist_view = last_iter ? ftemp
                    : ftemp.subList(0, step_pccc / 2);
            for (final ForkJoinTask<ArrayList<PeakCluster>> fut : ftemp_sublist_view)
                try {
                    resultClusters.addAll(fut.get());
                } catch (InterruptedException | ExecutionException ex) {
                    throw new RuntimeException(ex);
                }
            ftemp_sublist_view.clear();
            if (!last_iter && fjp.getActiveThreadCount() < fjp.getParallelism()) {
                //                    System.out.println("PeakCurveSmoothingUnit: fjp.getActiveThreadCount()\t"+fjp.getActiveThreadCount()+"\t"+step_pccc);
                step_pccc *= 2;
            }
        }
    }

    assert ftemp.isEmpty() : "temp storage for futures should be empty by end of loop";
    fjp.shutdown();

    try {
        fjp.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException e) {
        Logger.getRootLogger().info("interrupted..");
    }

    for (final PeakCluster peakCluster : resultClusters) {
        //Check if the monoistope peak of cluster has been grouped in other isotope cluster, if yes, remove the peak cluster
        if (!parameter.RemoveGroupedPeaks ||
        //                    !peakCluster.MonoIsotopePeak.ChargeGrouped.contains(peakCluster.Charge)
                !IonChargeHashSet.contains(peakCluster.MonoIsotopePeak.ChargeGrouped, peakCluster.Charge)) {
            peakCluster.Index = LCMSPeakBase.PeakClusters.size() + 1;
            peakCluster.GetConflictCorr();
            LCMSPeakBase.PeakClusters.add(peakCluster);
        }
    }

    System.gc();
    Logger.getRootLogger()
            .info("No of ion clusters:" + LCMSPeakBase.PeakClusters.size() + " (Memory usage:"
                    + Math.round(
                            (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576)
                    + "MB)");
}

From source file:MSUmpire.SpectrumParser.mzXMLParser.java

private List<MzXMLthreadUnit> ParseScans(final BitSet IncludedScans) {
    List<MzXMLthreadUnit> ScanList = new ArrayList<>();
    ArrayList<ForkJoinTask<?>> futures = new ArrayList<>();
    final ForkJoinPool fjp = new ForkJoinPool(NoCPUs);
    Iterator<Entry<Integer, Long>> iter = ScanIndex.entrySet().iterator();
    Entry<Integer, Long> ent = iter.next();
    long currentIdx = ent.getValue();
    int nextScanNo = ent.getKey();
    final RandomAccessFile fileHandler;
    try {//from www . j a va  2s  . com
        fileHandler = new RandomAccessFile(filename, "r");
    } catch (FileNotFoundException e) {
        throw new RuntimeException(e);
    }
    byte[] buffer = new byte[1 << 10];
    if (step == -1)
        step = fjp.getParallelism() * 32;
    while (iter.hasNext()) {
        ent = iter.next();
        long startposition = currentIdx;
        long nexposition = ent.getValue();
        int currentScanNo = nextScanNo;
        nextScanNo = ent.getKey();
        currentIdx = nexposition;

        if (IncludedScans.get(currentScanNo)) {
            try {
                final int bufsize = (int) (nexposition - startposition);
                if (buffer.length < bufsize)
                    buffer = new byte[Math.max(bufsize, buffer.length << 1)];
                //                    byte[] buffer = new byte[bufsize];
                //                    RandomAccessFile fileHandler = new RandomAccessFile(filename, "r");
                fileHandler.seek(startposition);
                fileHandler.read(buffer, 0, bufsize);
                //                    fileHandler.close();
                //                    String xmltext = new String(buffer);
                String xmltext = new String(buffer, 0, bufsize, StandardCharsets.ISO_8859_1);
                if (ent.getKey() == Integer.MAX_VALUE) {
                    xmltext = xmltext.replaceAll("</msRun>", "");
                }
                boolean ReadPeak = true;
                final MzXMLthreadUnit unit = new MzXMLthreadUnit(xmltext, parameter, datatype, ReadPeak);
                futures.add(fjp.submit(unit));
                ScanList.add(unit);

                if ((ScanList.size() % step) == 0) {
                    futures.get(futures.size() - step).get();
                    if (iter.hasNext() && fjp.getActiveThreadCount() < fjp.getParallelism()) {
                        step *= 2;
                        //                            System.out.println("MzXMLthreadUnit: fjp.getActiveThreadCount()\t" + fjp.getActiveThreadCount()+"\t"+step);
                    }
                }
            } catch (Exception ex) {
                Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex));
            }
        }
    }
    try {
        fileHandler.close();
    } catch (IOException ex) {
        throw new RuntimeException(ex);
    }
    fjp.shutdown();
    try {
        fjp.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException ex) {
        throw new RuntimeException(ex);
    }
    //        for (MzXMLthreadUnit unit : ScanList) {
    //            executorPool.execute(unit);
    //        }
    //        executorPool.shutdown();
    //
    //        try {
    //            executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    //        } catch (InterruptedException e) {
    //            Logger.getRootLogger().info("interrupted..");
    //        }
    return ScanList;
}

From source file:com.hygenics.parser.SpecifiedDump.java

/**
 * Runs the Dump//from  w  w  w  . j  av a2 s . co m
 */
public void run() {

    if (archive) {
        if (tables.keySet().size() > 0) {

            Archiver zip = new Archiver();
            String basefile = tables.keySet().iterator().next().split("\\|")[1];

            if (basefile.trim().length() > 0) {
                zip.setBasedirectory(basefile);
                zip.setZipDirectory(basefile + "archive.zip");
                zip.setAvoidanceString(".zip|archive");
                zip.setDelFiles(true);
                zip.run();
            }
        }
    }

    int dumped = 0;
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    boolean checkedTables = (this.tablesMustHave == null);
    for (String tf : tables.keySet()) {
        String[] split = tf.split("\\|");
        log.info("Dumping for " + split[0]);
        String schema = null;
        try {
            schema = split[0].split("\\.")[0];

            if (!checkedTables) {
                ArrayList<String> mustHaveTemp = (ArrayList<String>) this.tablesMustHave.clone();
                ArrayList<String> existingTables = this.template.getJsonData(
                        "SELECT table_name FROM information_schema.tables WHERE table_schema ILIKE '%" + schema
                                + "%'");
                for (String tdict : existingTables) {
                    String table = JsonObject.readFrom(tdict).get("table_name").asString();
                    if (mustHaveTemp.contains(table)) {
                        mustHaveTemp.remove(table);

                        // get count
                        if (this.template.getCount(schema + "." + table) == 0) {
                            try {
                                throw new MissingData(
                                        "Data Missing from Required Table: " + schema + "." + table);
                            } catch (MissingData e) {
                                e.printStackTrace();
                            }
                        }
                    }
                }

                if (mustHaveTemp.size() > 0) {
                    log.error("Drop Schema " + schema + "  is missing the following tables:\n");
                    for (String table : mustHaveTemp) {
                        log.error(table + "\n");
                    }

                    try {
                        throw new TableMissingException();
                    } catch (TableMissingException e) {
                        e.printStackTrace();
                        System.exit(-1);
                    }
                }

            }

        } catch (IndexOutOfBoundsException e) {
            try {
                throw new SQLMalformedException("FATAL ERROR: Table name " + split[0] + " malformed");
            } catch (SQLMalformedException e2) {
                e2.printStackTrace();
                System.exit(-1);
            }
        }

        log.info("Checking  table: " + split[0] + "&& schema: " + schema);
        if (template.checkTable(split[0], schema)) {
            if (template.getCount(schema + "." + split[0].replace(schema + ".", "")) > 0) {
                Set<String> keys = tables.get(tf).keySet();
                String sql;
                String select = "SELECT ";
                String distinct = null;
                String attrs = null;
                String where = null;
                String group = null;
                String order = null;

                /**
                 * SET THE ATTRIBUTES WHICH CAN BE SPECIFIED WITH
                 * distinct-for concacting distinct part of query not0-for
                 * specifiying that the length must be greater than 0 in the
                 * WHERE clause group-for grouping the attribute not
                 * null-for specifying that the attr cannot be null
                 * orderby-for specifying our one order attr
                 */
                for (String k : keys) {
                    if (k.toLowerCase().contains("distinct")) {
                        distinct = (distinct == null)
                                ? "distinct on(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : distinct + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("group")) {
                        group = (group == null) ? "GROUP BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : group + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("not0")) {
                        if (k.contains("not0OR")) {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "OR length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        } else {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "AND length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        }
                    }

                    if (k.toLowerCase().contains("notnull")) {
                        if (k.toLowerCase().contains("notnullor")) {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " OR " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        } else {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " AND " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        }
                    }

                    if (k.toLowerCase().contains("order")) {
                        if (k.toLowerCase().contains("orderdesc")) {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " ASC"
                                    : order;
                        } else {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " DESC"
                                    : order;
                        }
                    }

                    String field = tables.get(tf).get(k);
                    if (k.toLowerCase().contains("attr")) {
                        if (unicoderemove == true) {
                            field = "trim(replace(regexp_replace(" + field
                                    + ",'[^\\u0020-\\u007e,\\(\\);\\-\\[\\]]+',' '),'" + this.delimiter + "','"
                                    + this.replacedel + "')) as " + field;
                        } else {
                            field = "trim(replace(" + field + ",'" + this.delimiter + "','" + this.replacedel
                                    + "'))";
                        }

                        attrs = (attrs == null) ? field : attrs + "," + field;
                    }
                }

                select = (distinct == null) ? select : select.trim() + " " + distinct.trim() + ")";
                select += " " + attrs.trim();
                select += " FROM " + split[0].trim();
                select = (where == null) ? select : select.trim() + " " + where.trim();
                select = (group == null) ? select : select.trim() + " " + group.trim();
                select = (order == null) ? select : select.trim() + " " + order.trim();

                if (extracondition != null) {
                    select += (select.contains(" WHERE ") == true) ? " AND" + extracondition
                            : " WHERE " + extracondition;
                }

                select = select.trim();

                log.info("Dump Select Command: " + select);

                sql = "COPY  (" + select + ") TO STDOUT WITH DELIMITER '" + delimiter.trim()
                        + "' NULL as '' CSV HEADER";
                fjp.execute(new ToFile(sql, split[1].trim()));

                select = "SELECT ";
                distinct = null;
                attrs = null;
                where = null;
                group = null;
                order = null;
                dumped += 1;
            } else {
                try {
                    throw new NoDataException("No Data found in " + split[0]);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        } else {
            try {
                throw new SQLMalformedException("WARNING: Table " + split[0] + " is missing");
            } catch (SQLMalformedException e) {
                e.printStackTrace();
            }
        }
    }

    try {
        fjp.awaitTermination(60000, TimeUnit.MILLISECONDS);
        fjp.shutdown();
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    if (dumped == 0) {
        log.error("No Date found in any tables");
        System.exit(-1);
    }

}

From source file:com.hygenics.parser.SpecDumpWithReference.java

/**
 * Runs the Dump/*from w w w  .  ja  va2s .  c  o  m*/
 */
public void run() {

    if (archive) {
        Archiver zip = new Archiver();
        String[] barr = baseFile.split("\\/");
        String basefile = "";
        for (int i = 0; i > barr.length - 1; i++) {
            basefile += (i == 0) ? barr[i] : "/" + barr[i];
        }
        if (basefile.trim().length() > 0) {
            zip.setBasedirectory(basefile);
            zip.setZipDirectory(basefile + "archive.zip");
            zip.setAvoidanceString(".zip|archive");
            zip.setDelFiles(true);
            zip.run();
        }
    }

    int dumped = 0;
    log.info("Tables Found: " + tables.size());
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    boolean checkedTables = (this.tablesMustHave == null);
    for (String tf : tables.keySet()) {
        String[] split = (this.baseschema + "." + tf + "|" + this.baseFile + tf).split("\\|");
        log.info("Dumping for " + split[0]);
        String schema = null;
        try {
            schema = split[0].split("\\.")[0];

            if (!checkedTables) {
                ArrayList<String> mustHaveTemp = (ArrayList<String>) this.tablesMustHave.clone();
                ArrayList<String> existingTables = this.template.getJsonData(
                        "SELECT table_name FROM information_schema.tables WHERE table_schema ILIKE '%" + schema
                                + "%'");
                for (String tdict : existingTables) {

                    String table = Json.parse(tdict).asObject().get("table_name").asString();
                    if (mustHaveTemp.contains(table)) {
                        mustHaveTemp.remove(table);

                        // get count
                        if (this.template.getCount(schema + "." + table) == 0) {
                            try {
                                throw new MissingData(
                                        "Data Missing from Required Table: " + schema + "." + table);
                            } catch (MissingData e) {
                                e.printStackTrace();
                                if (tablesMustHave.contains(table)) {
                                    log.error("Critical Table Missing Data! Terminating!");
                                    System.exit(-1);
                                }
                            }
                        }

                    }
                }

                if (mustHaveTemp.size() > 0) {
                    log.error("Drop Schema " + schema + "  is missing the following tables:\n");
                    for (String table : mustHaveTemp) {
                        log.error(table + "\n");
                    }

                    try {
                        throw new TableMissingException();
                    } catch (TableMissingException e) {
                        e.printStackTrace();
                        System.exit(-1);
                    }
                }
            }

        } catch (IndexOutOfBoundsException e) {
            try {
                throw new SQLMalformedException("FATAL ERROR: Table name " + split[0] + " malformed");
            } catch (SQLMalformedException e2) {
                e2.printStackTrace();
                System.exit(-1);
            }
        }

        log.info("Checking  table: " + split[0] + "&& schema: " + schema);

        if (template.checkTable(split[0], schema)) {
            // check if there are records

            if (template.getCount(schema + "." + split[0].replace(schema + ".", "")) > 0) {
                dumped += 1;
                Set<String> keys = tables.get(tf).keySet();
                String sql;
                String select = "SELECT ";
                String distinct = null;
                String attrs = null;
                String where = null;
                String group = null;
                String order = null;

                /**
                 * SET THE ATTRIBUTES WHICH CAN BE SPECIFIED WITH
                 * distinct-for concacting distinct part of query not0-for
                 * specifiying that the length must be greater than 0 in the
                 * WHERE clause group-for grouping the attribute not
                 * null-for specifying that the attr cannot be null
                 * orderby-for specifying our one order attr
                 */
                for (String k : keys) {
                    if (k.toLowerCase().contains("distinct")) {
                        distinct = (distinct == null)
                                ? "distinct on(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : distinct + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("group")) {
                        group = (group == null) ? "GROUP BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : group + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("not0")) {
                        if (k.contains("not0OR")) {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "OR length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        } else {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "AND length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        }
                    }

                    if (k.toLowerCase().contains("notnull")) {
                        if (k.toLowerCase().contains("notnullor")) {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " OR " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        } else {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " AND " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        }
                    }

                    if (k.toLowerCase().contains("order")) {
                        if (k.toLowerCase().contains("orderdesc")) {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " ASC"
                                    : order;
                        } else {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " DESC"
                                    : order;
                        }
                    }

                    String field = tables.get(tf).get(k);
                    if (k.toLowerCase().contains("attr")) {
                        if (unicoderemove == true) {
                            field = "regexp_replace(trim(replace(regexp_replace(cast(" + field + " as text)"
                                    + ",'[^\\u0020-\\u007e,\\(\\);\\-\\[\\]]+',' '),'" + this.delimiter + "','"
                                    + this.replacedel + "')),'[\\r|\\n]+','   ','gm') as " + field;
                        } else {
                            field = "regexp_replace(trim(replace(cast(" + field + " as text),'" + this.delimiter
                                    + "','" + this.replacedel + "')),'[\\r|\\n]+','   ','gm')";
                        }

                        attrs = (attrs == null) ? field : attrs + "," + field;
                    }
                }

                select = (distinct == null) ? select : select.trim() + " " + distinct.trim() + ")";
                select += " " + attrs.trim();
                select += " FROM " + split[0].trim();
                select = (where == null) ? select : select.trim() + " " + where.trim();
                select = (group == null) ? select : select.trim() + " " + group.trim();
                select = (order == null) ? select : select.trim() + " " + order.trim();

                if (extracondition != null) {
                    select += (select.contains(" WHERE ") == true) ? " AND" + extracondition
                            : " WHERE " + extracondition;
                }

                select = select.trim();

                log.info("Dump Select Command: " + select);

                sql = "COPY  (" + select + ") TO STDOUT WITH DELIMITER '" + delimiter.trim()
                        + "' NULL as '' CSV HEADER";
                fjp.execute(new ToFile(sql, split[1].trim()));

                select = "SELECT ";
                distinct = null;
                attrs = null;
                where = null;
                group = null;
                order = null;
            } else {
                try {

                    throw new NoDataException("WARNING: Table " + split[0] + " has no Data");

                } catch (NoDataException e) {
                    e.printStackTrace();
                    if (tablesMustHave != null && tablesMustHave.contains(split[0])) {
                        log.error("Table is a Must Have Table by has not Data. Terminating!");
                        System.exit(-1);
                    }
                }
            }
        } else {
            try {
                throw new SQLMalformedException("WARNING: Table " + split[0] + " is missing");
            } catch (SQLMalformedException e) {
                e.printStackTrace();
            }
        }
    }

    try {
        fjp.awaitTermination(60000, TimeUnit.MILLISECONDS);
        fjp.shutdown();
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    if (dumped == 0) {
        log.info("No Data Found in any Table");
        System.exit(-1);
    }

}