Example usage for java.util.concurrent ForkJoinPool execute

List of usage examples for java.util.concurrent ForkJoinPool execute

Introduction

In this page you can find the example usage for java.util.concurrent ForkJoinPool execute.

Prototype

public void execute(Runnable task) 

Source Link

Usage

From source file:com.chingo247.structureapi.plan.io.StructurePlanReader.java

public List<IStructurePlan> readDirectory(File structurePlanDirectory, boolean printstuff, ForkJoinPool pool) {
    Iterator<File> fit = FileUtils.iterateFiles(structurePlanDirectory, new String[] { "xml" }, true);
    SchematicManager sdm = SchematicManager.getInstance();
    sdm.load(structurePlanDirectory);//  ww  w  .j  a  va2 s  .c  o  m

    Map<String, StructurePlanProcessor> processors = new HashMap<>();

    while (fit.hasNext()) {
        File structurePlanFile = fit.next();
        StructurePlanProcessor spp = new StructurePlanProcessor(structurePlanFile);
        processors.put(structurePlanFile.getAbsolutePath(), spp);
        pool.execute(spp);
    }

    List<IStructurePlan> plans = new ArrayList<>();
    try {

        for (StructurePlanProcessor spp : processors.values()) {
            IStructurePlan plan = spp.get();
            if (plan != null) {
                plans.add(plan);
            }
        }
    } catch (Exception ex) {
        java.util.logging.Logger.getLogger(getClass().getName()).log(java.util.logging.Level.SEVERE,
                ex.getMessage(), ex);
    }

    return plans;
}

From source file:com.hygenics.parser.Mapper.java

private void sendToDb(List<String> json, boolean split) {
    if (json.size() > 0) {
        log.info("Records to Add: " + json.size());

        if (split) {

            ForkJoinPool f2 = new ForkJoinPool(Runtime.getRuntime().availableProcessors() * qnum);
            ArrayList<String> l;
            int size = (int) Math.ceil(json.size() / qnum);
            for (int conn = 0; conn < qnum; conn++) {
                l = new ArrayList<String>();
                if (((conn + 1) * size) < json.size()) {
                    l.addAll(json.subList((conn * size), ((conn + 1) * size)));

                } else {
                    l.addAll(json.subList((conn * size), (json.size() - 1)));
                    f2.execute(new SplitPost(template, l));

                    break;
                }/*  ww w .  j  av a 2s . co  m*/

                f2.execute(new SplitPost(template, l));
            }

            try {
                f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
            } catch (InterruptedException e1) {
                e1.printStackTrace();
            }

            f2.shutdown();

            int incrementor = 0;

            while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
                incrementor++;
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
                log.info("Shutting Down" + incrementor);
            }

            l = null;
            f2 = null;

        } else {
            for (String j : json) {

                boolean valid = false;

                try {
                    Json.read(j);
                    valid = true;
                } catch (Exception e) {
                    log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                    System.out.println(j);
                }

                try {

                    this.template.postSingleJson(j);
                } catch (Exception e) {
                    log.info("Failed to Post");
                    log.error(j);
                    e.printStackTrace();
                }
            }
        }
    }
}

From source file:com.hygenics.parser.QualityAssurer.java

private void sendToDb(ArrayList<String> json, boolean split) {
    if (json.size() > 0)
        log.info("Records to Add: " + json.size());

    if (split) {/*from   w w w .j ava2s .c o m*/

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procnum * qnum))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnum);
        for (int conn = 0; conn < qnum; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {
                l.addAll(json.subList((conn * size), (json.size() - 1)));
                f2.execute(new SplitPost(template, l));

                break;
            }

            f2.execute(new SplitPost(template, l));
        }

        try {
            f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e1) {
            e1.printStackTrace();
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;

    } else {
        for (String j : json) {

            boolean valid = false;

            try {
                Json.read(j);
                valid = true;
            } catch (Exception e) {
                log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                System.out.println(j);
            }

            try {

                this.template.postSingleJson(j);
            } catch (Exception e) {
                log.info("Failed to Post");
                log.error(j);
                e.printStackTrace();
            }
        }
    }

}

From source file:com.hygenics.parser.KVParser.java

private void sendToDb(ArrayList<String> json, boolean split) {
    if (json.size() > 0)
        log.info("Records to Add: " + json.size());

    if (split) {//from w ww  . jav  a 2 s. c  o  m

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procs * qnums))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnums);
        for (int conn = 0; conn < qnums; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {

                l.addAll(json.subList((conn * size), json.size()));
            }
            f2.execute(new SplitPost(template, l));
        }
        int w = 0;
        while (f2.isQuiescent() == false && f2.getActiveThreadCount() > 0) {
            w++;
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;

    } else {
        for (String j : json) {

            boolean valid = false;

            try {
                Json.read(j);
                valid = true;
            } catch (Exception e) {
                log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                System.out.println(j);
            }

            try {

                this.template.postSingleJson(j);
            } catch (Exception e) {
                log.info("Failed to Post");
                log.error(j);
                e.printStackTrace();
            }
        }
    }

}

From source file:com.hygenics.parser.BreakMultiple.java

/**
 * Post to db/*from  w ww .  j  a  v  a  2  s.c  o  m*/
 * 
 * @param json
 * @param split
 */
public void postToDb(ArrayList<String> json, boolean split) {
    log.info("Posting " + json.size() + " Records");

    if (split) {

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procnum * sqlnum))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnum);
        for (int conn = 0; conn < qnum; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {
                l.addAll(json.subList((conn * size), (json.size() - 1)));
                f2.execute(new SplitPost(template, l));

                break;
            }

            f2.execute(new SplitPost(template, l));
        }

        try {
            f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;
    } else {
        log.info("Singlethread");

        this.template.postJsonDatawithTable(json);
    }

}

From source file:com.chingo247.structureapi.schematic.SchematicManager.java

public synchronized void load(File directory) {
    Preconditions.checkArgument(directory.isDirectory());

    Iterator<File> fit = FileUtils.iterateFiles(directory, new String[] { "schematic" }, true);

    if (fit.hasNext()) {
        ForkJoinPool pool = new ForkJoinPool(Runtime.getRuntime().availableProcessors()); // only create the pool if we have schematics
        Map<Long, SchematicDataNode> alreadyHere = Maps.newHashMap();
        Map<Long, SchematicDataNode> needsUpdating = Maps.newHashMap();

        List<SchematicProcessor> tasks = Lists.newArrayList();
        List<Schematic> alreadyDone = Lists.newArrayList();
        XXHasher hasher = new XXHasher();

        try (Transaction tx = graph.beginTx()) {
            Collection<SchematicDataNode> schematicNodes = schematicRepository
                    .findAfterDate(System.currentTimeMillis() - TWO_DAYS);
            for (SchematicDataNode node : schematicNodes) {
                if (!node.hasRotation()) {
                    needsUpdating.put(node.getXXHash64(), node);
                    continue;
                }/*w  w  w .j  a  v  a  2  s.  c om*/
                alreadyHere.put(node.getXXHash64(), node);
            }

            // Process the schematics that need to be loaded
            while (fit.hasNext()) {
                File schematicFile = fit.next();
                try {
                    long checksum = hasher.hash64(schematicFile);
                    // Only load schematic data that wasn't yet loaded...
                    SchematicDataNode existingData = alreadyHere.get(checksum);
                    if (existingData != null) {
                        Schematic s = new DefaultSchematic(schematicFile, existingData.getWidth(),
                                existingData.getHeight(), existingData.getLength(),
                                existingData.getAxisOffset());
                        alreadyDone.add(s);
                    } else if (getSchematic(checksum) == null) {
                        SchematicProcessor processor = new SchematicProcessor(schematicFile);
                        tasks.add(processor);
                        pool.execute(processor);
                    }
                } catch (IOException ex) {
                    Logger.getLogger(SchematicManager.class.getName()).log(Level.SEVERE, null, ex);
                }
            }
            tx.success();
        }

        // Wait for the processes the finish and queue them for bulk insert
        List<Schematic> newSchematics = Lists.newArrayList();
        try {
            for (SchematicProcessor sp : tasks) {
                Schematic schematic = sp.get();
                if (schematic != null) {
                    newSchematics.add(schematic);
                }
            }
        } catch (Exception ex) {
            Logger.getLogger(SchematicManager.class.getName()).log(Level.SEVERE, null, ex);
        }

        // Close the pool!
        pool.shutdown();

        int updated = 0;
        // Update the database
        try (Transaction tx = graph.beginTx()) {
            for (Schematic data : alreadyDone) {
                SchematicDataNode sdn = schematicRepository.findByHash(data.getHash());
                sdn.setLastImport(System.currentTimeMillis());
            }
            for (Schematic newData : newSchematics) {
                if (needsUpdating.get(newData.getHash()) != null) {
                    SchematicDataNode dataNode = schematicRepository.findByHash(newData.getHash());
                    dataNode.setRotation(newData.getAxisOffset());
                    updated++;
                    continue;
                }
                String name = newData.getFile().getName();
                long xxhash = newData.getHash();
                int width = newData.getWidth();
                int height = newData.getHeight();
                int length = newData.getLength();
                int axisOffset = newData.getAxisOffset();
                schematicRepository.addSchematic(name, xxhash, width, height, length, axisOffset,
                        System.currentTimeMillis());
            }

            // Delete unused
            int removed = 0;
            for (SchematicDataNode sdn : schematicRepository
                    .findBeforeDate(System.currentTimeMillis() - TWO_DAYS)) {
                sdn.delete();
                removed++;
            }
            if (removed > 0) {
                System.out.println("[SettlerCraft]: Deleted " + removed + " schematic(s) from cache");
            }

            if (updated > 0) {
                System.out.println("[SettlerCraft]: Updated " + updated + " schematic(s) from cache");
            }

            tx.success();
        }

        synchronized (schematics) {
            for (Schematic schematic : newSchematics) {
                schematics.put(schematic.getHash(), schematic);
            }
            for (Schematic schematic : alreadyDone) {
                schematics.put(schematic.getHash(), schematic);
            }
        }

    }

}

From source file:com.hygenics.parser.ParseDispatcher.java

private void spl(ArrayList<String> json, boolean split) {
    if (json.size() > 0)
        log.info("Records to Add: " + json.size());

    if (split) {/*from   w  w  w .j  av a 2  s  . c om*/

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procnum * sqlnum))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnum);
        for (int conn = 0; conn < qnum; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {
                l.addAll(json.subList((conn * size), (json.size() - 1)));
                f2.execute(new SplitPost(template, l));

                break;
            }

            f2.execute(new SplitPost(template, l));
        }

        try {
            f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;

    } else {
        for (String j : json) {

            boolean valid = false;

            try {
                Json.read(j);
                valid = true;
            } catch (Exception e) {
                log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                System.out.println(j);
            }

            try {

                this.template.postSingleJson(j);
            } catch (Exception e) {
                log.info("Failed to Post");
                log.error(j);
                e.printStackTrace();
            }
        }
    }

}

From source file:com.hygenics.parser.ParseDispatcher.java

private void sendToDb(ArrayList<String> json, boolean split) {
    if (json.size() > 0)
        log.info("Records to Add: " + json.size());

    if (split) {/*from  w  ww .j a  v a 2s.co  m*/

        ForkJoinPool f2 = new ForkJoinPool(
                (Runtime.getRuntime().availableProcessors() + ((int) Math.ceil(procnum * sqlnum))));
        ArrayList<String> l;
        int size = (int) Math.ceil(json.size() / qnum);
        for (int conn = 0; conn < qnum; conn++) {
            l = new ArrayList<String>();
            if (((conn + 1) * size) < json.size()) {
                l.addAll(json.subList((conn * size), ((conn + 1) * size)));

            } else {
                l.addAll(json.subList((conn * size), (json.size() - 1)));
                f2.execute(new SplitPost(template, l));

                break;
            }

            f2.execute(new SplitPost(template, l));
        }

        try {
            f2.awaitTermination(termtime, TimeUnit.MILLISECONDS);
        } catch (InterruptedException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        f2.shutdown();

        int incrementor = 0;

        while (f2.isShutdown() == false && f2.getActiveThreadCount() > 0 && f2.isQuiescent() == false) {
            incrementor++;
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            log.info("Shutting Down" + incrementor);
        }

        l = null;
        f2 = null;

    } else {
        for (String j : json) {

            boolean valid = false;

            try {
                Json.read(j);
                valid = true;
            } catch (Exception e) {
                log.info("ERROR: JSON NOT FORMATTED PROPERLY");
                System.out.println(j);
            }

            try {

                this.template.postSingleJson(j);
            } catch (Exception e) {
                log.info("Failed to Post");
                log.error(j);
                e.printStackTrace();
            }
        }
    }

}

From source file:com.hygenics.parser.GetImages.java

private void addFromFile() {
    File f = new File(fpath);
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    ArrayList<String> imageData = new ArrayList<String>();
    int size = (int) Math.ceil(commitsize / numqueries);

    if (f.exists()) {
        // get the image data
        File[] list = f.listFiles();
        int curr = 0;
        if (list != null) {
            for (File img : list) {
                curr += 1;//from w ww .  j  av a2  s. c o m
                if (img.isDirectory() == false
                        && (img.getName().contains(".bmp") || img.getName().toLowerCase().contains(".jpg")
                                || img.getName().toLowerCase().contains(".png")
                                || img.getName().toLowerCase().contains("jpeg"))) {
                    try {
                        if (dbcondition == null
                                || template
                                        .getJsonData(dbcondition.replace("$IMAGE$",
                                                img.getName().replaceAll("(?mis)" + imagepostfix, "")))
                                        .size() > 0) {
                            BufferedImage bi = ImageIO.read(img);// only
                            // used
                            // to
                            // ensure
                            // that
                            // this
                            // is an
                            // image
                            JsonObject jobj = new JsonObject();
                            jobj.add("offenderhash", img.getName().replaceAll("(?mis)" + imagepostfix, ""));// assumes
                            // hash
                            // is
                            // file
                            // name+postfix
                            jobj.add("image", img.getName().replaceAll("(?mis)" + imagepostfix, ""));
                            jobj.add("image_path", img.getName());
                            jobj.add("table", table);
                            jobj.add("date", Calendar.getInstance().getTime().toString());
                            imageData.add(jobj.toString());
                        }
                    } catch (IOException e) {
                        log.info(img.getName() + " is not an Image!");
                        e.printStackTrace();
                    } catch (Exception e) {
                        log.info("Error in Posting to Database.");
                        e.printStackTrace();
                    }
                }

                // post if > commitsize
                if (imageData.size() >= commitsize || curr == list.length) {
                    log.info("Posting to DB @ " + Calendar.getInstance().getTime().toString());
                    for (int i = 0; i < numqueries; i++) {
                        if (((i + 1) * size) < imageData.size()) {
                            fjp.execute(new ImagePost(imageData.subList((i * size), ((i + 1) * size))));
                        } else {
                            fjp.execute(new ImagePost(imageData.subList((i * size), imageData.size())));
                        }
                    }

                    int w = 0;
                    while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) {
                        w++;
                    }
                    log.info("Waited for " + w + " cycles");
                    imageData.clear();
                    log.info("Finished Posting to DB @ " + Calendar.getInstance().getTime().toString());
                }
            }
        }

    } else {
        log.error("File Path does Not Exist.Please Check Image Pull!");
    }
    fjp.shutdown();
    fjp = null;
}

From source file:com.hygenics.parser.GetImages.java

private void getImages() {
    // controls the web process from a removed method
    log.info("Setting Up Pull");
    String[] proxyarr = (proxies == null) ? null : proxies.split(",");
    // cleanup/*w  w w .ja  va 2  s  .c  o  m*/
    if (cleanup) {
        cleanupDir(fpath);
    }

    // image grab
    CookieManager cm = new CookieManager();
    cm.setCookiePolicy(CookiePolicy.ACCEPT_ALL);
    CookieHandler.setDefault(cm);
    int numimages = 0;
    InputStream is;
    byte[] bytes;
    int iter = 0;
    int found = 0;

    // set proxy if needed
    if (proxyuser != null) {
        proxy(proxyhost, proxyport, https, proxyuser, proxypass);
    }

    int i = 0;
    ArrayList<String> postImages = new ArrayList<String>();
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    Set<Callable<String>> pulls = new HashSet<Callable<String>>();
    Set<Callable<ArrayList<String>>> sqls = new HashSet<Callable<ArrayList<String>>>();
    List<Future<String>> imageFutures;

    ArrayList<String> images;
    int chunksize = (int) Math.ceil(commitsize / numqueries);
    log.info("Chunksize: " + chunksize);
    if (baseurl != null || baseurlcolumn != null) {
        do {
            log.info("Offset: " + offset);
            log.info("Getting Images");
            images = new ArrayList<String>(commitsize);
            log.info("Getting Columns");
            for (int n = 0; n < numqueries; n++) {
                String tempsql = sql + " WHERE " + idString + " >= " + offset + " AND " + idString + " < "
                        + (offset + chunksize);

                if (conditions != null) {
                    tempsql += conditions;
                }

                sqls.add(new QueryDatabase(
                        ((extracondition != null) ? tempsql + " " + extracondition : tempsql)));

                offset += chunksize;
            }

            List<Future<ArrayList<String>>> futures = fjp.invokeAll(sqls);

            int w = 0;
            while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) {
                w++;
            }

            for (Future<ArrayList<String>> f : futures) {
                try {
                    ArrayList<String> fjson;
                    fjson = f.get();
                    if (fjson.size() > 0) {
                        images.addAll(fjson);
                    }

                    if (f.isDone() == false) {
                        f.cancel(true);
                    }
                } catch (InterruptedException e) {
                    e.printStackTrace();
                } catch (ExecutionException e) {
                    e.printStackTrace();
                }
            }
            log.info(Integer.toString(images.size()) + " image links found. Pulling.");

            ArrayList<String> tempproxies = new ArrayList<String>();

            if (proxyarr != null) {
                for (String proxy : proxyarr) {
                    tempproxies.add(proxy.trim());
                }
            }

            if (maxproxies > 0) {
                maxproxies -= 1;// 0 and 1 should be equivalent conditions
                // --num is not like most 0 based still due
                // to >=
            }

            // get images
            for (int num = 0; num < images.size(); num++) {
                String icols = images.get(num);
                int proxnum = (int) Math.random() * (tempproxies.size() - 1);
                String proxy = (tempproxies.size() == 0) ? null : tempproxies.get(proxnum);

                // add grab
                pulls.add(new ImageGrabber(icols, proxy));

                if (proxy != null) {
                    tempproxies.remove(proxy);
                }

                // check for execution
                if (num + 1 == images.size() || pulls.size() >= commitsize || tempproxies.size() == 0) {
                    if (tempproxies.size() == 0 && proxies != null) {
                        tempproxies = new ArrayList<String>(proxyarr.length);

                        for (String p : proxyarr) {
                            tempproxies.add(p.trim());
                        }
                    }

                    imageFutures = fjp.invokeAll(pulls);
                    w = 0;

                    while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) {
                        w++;
                    }

                    for (Future<String> f : imageFutures) {
                        String add;
                        try {
                            add = f.get();

                            if (add != null) {
                                postImages.add(add);
                            }
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        } catch (ExecutionException e) {
                            e.printStackTrace();
                        }
                    }
                    imageFutures = null;// garbage collect elligible
                    pulls = new HashSet<Callable<String>>(commitsize);
                }

                if (postImages.size() >= commitsize && addtoDB == true) {
                    if (addtoDB) {
                        log.info("Posting to Database");
                        log.info("Found " + postImages.size() + " images");
                        numimages += postImages.size();
                        int size = (int) Math.floor(postImages.size() / numqueries);
                        for (int n = 0; n < numqueries; n++) {
                            if (((n + 1) * size) < postImages.size() && (n + 1) < numqueries) {
                                fjp.execute(new ImagePost(postImages.subList(n * size, (n + 1) * size)));
                            } else {
                                fjp.execute(new ImagePost(postImages.subList(n * size, postImages.size() - 1)));
                            }
                        }

                        w = 0;
                        while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) {
                            w++;
                        }
                    }
                    found += postImages.size();
                    postImages.clear();
                }

            }

            if (postImages.size() > 0 && addtoDB == true) {
                log.info("Posting to Database");
                numimages += postImages.size();
                int size = (int) Math.floor(postImages.size() / numqueries);
                for (int n = 0; n < numqueries; n++) {
                    if (((n + 1) * size) < postImages.size()) {
                        fjp.execute(new ImagePost(postImages.subList(n * size, (n + 1) * size)));
                    } else {
                        fjp.execute(new ImagePost(postImages.subList(n * size, postImages.size())));
                    }
                }

                w = 0;
                while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) {
                    w++;
                }

                found += postImages.size();
                postImages.clear();
            }

            // handle iterations specs
            iter += 1;
            log.info("Iteration: " + iter);
            if ((iter < iterations && found < images.size()) || tillfound == true) {
                log.info("Not All Images Obtained Trying Iteration " + iter + " of " + iterations);
                offset -= commitsize;
            } else if ((iter < iterations && found >= images.size()) && tillfound == false) {
                log.info("Images Obtained in " + iter + " iterations. Continuing.");
                iter = 0;
            } else {
                // precautionary
                log.info("Images Obtained in " + iter + " iterations. Continuing");
                iter = 0;
            }

        } while (images.size() > 0 && iter < iterations);

        if (fjp.isShutdown()) {
            fjp.shutdownNow();
        }
    }

    log.info("Complete. Check for Errors \n " + numimages + " Images Found");
}