Example usage for java.lang Thread interrupted

List of usage examples for java.lang Thread interrupted

Introduction

In this page you can find the example usage for java.lang Thread interrupted.

Prototype

public static boolean interrupted() 

Source Link

Document

Tests whether the current thread has been interrupted.

Usage

From source file:com.github.cukedoctor.jenkins.CukedoctorPublisher.java

@Override
public void perform(Run<?, ?> build, FilePath workspace, Launcher launcher, TaskListener listener)
        throws IOException, InterruptedException {

    FilePath workspaceJsonSourceDir;//most of the time on slave
    FilePath workspaceJsonTargetDir;//always on master
    if (!hasText(featuresDir)) {
        workspaceJsonSourceDir = workspace;
        workspaceJsonTargetDir = getMasterWorkspaceDir(build);
    } else {/*w w w  .j a  va  2  s. co m*/
        workspaceJsonSourceDir = new FilePath(workspace, featuresDir);
        workspaceJsonTargetDir = new FilePath(getMasterWorkspaceDir(build), featuresDir);
    }

    logger = listener.getLogger();
    workspaceJsonSourceDir.copyRecursiveTo("**/*.json,**/cukedoctor-intro.adoc,**/cukedoctor.properties",
            workspaceJsonTargetDir);

    System.setProperty("INTRO_CHAPTER_DIR", workspaceJsonTargetDir.getRemote());

    logger.println("");
    logger.println("Generating living documentation for " + build.getFullDisplayName()
            + " with the following arguments: ");
    logger.println("Features dir: " + workspaceJsonSourceDir.getRemote());
    logger.println("Format: " + format.getFormat());
    logger.println("Toc: " + toc.getToc());
    logger.println("Title: " + title);
    logger.println("Numbered: " + Boolean.toString(numbered));
    logger.println("Section anchors: " + Boolean.toString(sectAnchors));
    logger.println("Hide features section: " + Boolean.toString(hideFeaturesSection));
    logger.println("Hide summary: " + Boolean.toString(hideSummary));
    logger.println("Hide scenario keyword: " + Boolean.toString(hideScenarioKeyword));
    logger.println("Hide step time: " + Boolean.toString(hideStepTime));
    logger.println("Hide tags: " + Boolean.toString(hideTags));
    logger.println("");

    Result result = Result.SUCCESS;
    List<Feature> features = FeatureParser.findAndParse(workspaceJsonTargetDir.getRemote());
    if (!features.isEmpty()) {
        if (!hasText(title)) {
            title = "Living Documentation";
        }

        logger.println("Found " + features.size() + " feature(s)...");

        File targetBuildDirectory = new File(build.getRootDir(), CukedoctorBaseAction.BASE_URL);
        if (!targetBuildDirectory.exists()) {
            boolean created = targetBuildDirectory.mkdirs();
            if (!created) {
                listener.error("Could not create file at location: " + targetBuildDirectory.getAbsolutePath());
                result = Result.UNSTABLE;
            }
        }

        GlobalConfig globalConfig = GlobalConfig.getInstance();
        DocumentAttributes documentAttributes = globalConfig.getDocumentAttributes().backend(format.getFormat())
                .toc(toc.getToc()).numbered(numbered).sectAnchors(sectAnchors).docTitle(title);

        globalConfig.getLayoutConfig().setHideFeaturesSection(hideFeaturesSection);

        globalConfig.getLayoutConfig().setHideSummarySection(hideSummary);

        globalConfig.getLayoutConfig().setHideScenarioKeyword(hideScenarioKeyword);

        globalConfig.getLayoutConfig().setHideStepTime(hideStepTime);

        globalConfig.getLayoutConfig().setHideTags(hideTags);

        String outputPath = targetBuildDirectory.getAbsolutePath();
        CukedoctorBuildAction action = new CukedoctorBuildAction(build);
        final ExecutorService pool = Executors.newFixedThreadPool(4);
        if ("all".equals(format.getFormat())) {
            File allHtml = new File(
                    outputPath + System.getProperty("file.separator") + CukedoctorBaseAction.ALL_DOCUMENTATION);
            if (!allHtml.exists()) {
                boolean created = allHtml.createNewFile();
                if (!created) {
                    listener.error("Could not create file at location: " + allHtml.getAbsolutePath());
                    result = Result.UNSTABLE;
                }
            }
            InputStream is = null;
            OutputStream os = null;
            try {
                is = getClass().getResourceAsStream("/" + CukedoctorBaseAction.ALL_DOCUMENTATION);
                os = new FileOutputStream(allHtml);

                int copyResult = IOUtils.copy(is, os);
                if (copyResult == -1) {
                    listener.error("File is too big.");//will never reach here but findbugs forced it...
                    result = Result.UNSTABLE;
                }
            } finally {
                if (is != null) {
                    is.close();
                }
                if (os != null) {
                    os.close();
                }
            }

            action.setDocumentationPage(CukedoctorBaseAction.ALL_DOCUMENTATION);
            pool.execute(runAll(features, documentAttributes, outputPath));
        } else {
            action.setDocumentationPage("documentation." + format.getFormat());
            pool.execute(run(features, documentAttributes, outputPath));
        }

        build.addAction(action);
        pool.shutdown();
        try {
            if (format.equals(FormatType.HTML)) {
                pool.awaitTermination(5, TimeUnit.MINUTES);
            } else {
                pool.awaitTermination(15, TimeUnit.MINUTES);
            }
        } catch (final InterruptedException e) {
            Thread.interrupted();
            listener.error(
                    "Your documentation is taking too long to be generated. Halting the generation now to not throttle Jenkins.");
            result = Result.FAILURE;
        }

        if (result.equals(Result.SUCCESS)) {
            listener.hyperlink("../" + CukedoctorBaseAction.BASE_URL, "Documentation generated successfully!");
            logger.println("");
        }

    } else {
        logger.println(String.format("No features Found in %s. %sLiving documentation will not be generated.",
                workspaceJsonTargetDir.getRemote(), "\n"));

    }

    build.setResult(result);
}

From source file:com.tesora.dve.mysqlapi.repl.MyReplicationSlaveService.java

@Override
public void restart() throws PEException {
    stop();/*from  w  w  w .ja va  2 s  . c o  m*/

    // initiate reconnection on new thread to allow existing i/o thread to resume channel closed processing
    reconnectThread = new Thread(new Runnable() {

        @Override
        public void run() {
            FastDateFormat formatter = FastDateFormat.getDateTimeInstance(FastDateFormat.MEDIUM,
                    FastDateFormat.MEDIUM);

            long retries = 0;
            long start = System.currentTimeMillis();
            long lastAttempt = start;

            logger.info("Replication slave lost connection on " + formatter.format(start)
                    + ".  Attempting to reconnect with the following parameters: "
                    + MyReplicationSlaveConfig.REPL_SLAVE_MASTER_RETRY_CONNECT + "="
                    + myConfig.getMasterConnectRetry() + " seconds" + ", "
                    + MyReplicationSlaveConfig.REPL_SLAVE_SLAVE_NET_TIMEOUT + "="
                    + myConfig.getSlaveNetTimeout() + " seconds" + ", "
                    + MyReplicationSlaveConfig.REPL_SLAVE_MASTER_RETRY_COUNT + "="
                    + myConfig.getMasterRetryCount());

            while (myClient == null || !myClient.isConnected()) {
                if (Thread.interrupted()) {
                    logger.info("Replication slave reconnection was terminated by STOP service command.");
                    reconnectThread = null;
                    break;
                }

                if (((retries > 0) && (retries >= myConfig.getMasterRetryCount()))
                        || ((lastAttempt - start) / 1000) > myConfig.getSlaveNetTimeout()) {
                    logger.warn(
                            "Replication slave was unable to reconnect and will stop replication.  Total attempts="
                                    + retries + ", Started=" + formatter.format(start) + ", Ended="
                                    + formatter.format(lastAttempt));
                    reconnectThread = null;
                    return;
                }

                try {
                    Thread.sleep(myConfig.getMasterConnectRetry() * 1000);
                } catch (Exception e) {
                    logger.info("Replication slave reconnection was terminated by STOP service command.");
                    reconnectThread = null;
                    return;
                }

                if (Thread.interrupted()) {
                    reconnectThread = null;
                    logger.info("Replication slave reconnection was terminated by STOP service command.");
                    break;
                }

                retries++;
                lastAttempt = System.currentTimeMillis();
                try {
                    if (logger.isDebugEnabled()) {
                        logger.debug("Replication slave reconnect attempt #" + retries + " at "
                                + formatter.format(lastAttempt));
                    }
                    start(false);
                } catch (Exception e) {
                    // do nothing
                    if (logger.isDebugEnabled()) {
                        logger.debug("Replication slave reconnect attempt #" + retries + " failed.", e);
                    }
                }
            }

            if (myClient.isConnected()) {
                // successfully reconnected
                logger.info("Replication slave successfully reconnected on attempt " + retries + " on "
                        + formatter.format(lastAttempt));
            }
        }

    }, "ReplicationReconnect");
    reconnectThread.start();
}

From source file:org.apache.hadoop.hbase.mymapreduce.TableMapReduceUtil.java

public static void initCredentials(Job job) throws IOException {
    if (User.isHBaseSecurityEnabled(job.getConfiguration())) {
        try {/*from   w ww .jav  a  2  s  . c  o  m*/
            User.getCurrent().obtainAuthTokenForJob(job.getConfiguration(), job);
        } catch (InterruptedException ie) {
            LOG.info("Interrupted obtaining user authentication token");
            Thread.interrupted();
        }
    }
}

From source file:com.jeremyhaberman.playgrounds.WebPlaygroundDAO.java

@Override
public Collection<? extends Playground> getWithin(Context context, GeoPoint topLeft, GeoPoint bottomRight,
        int maxQuantity) {
    playgrounds = new ArrayList<Playground>();
    String result = swingset.getResources().getString(R.string.error);
    HttpURLConnection httpConnection = null;
    Log.d(TAG, "getPlaygrounds()");

    try {// www  .  j av  a2s .c  o m
        // Check if task has been interrupted
        if (Thread.interrupted()) {
            throw new InterruptedException();
        }

        // Build query
        URL url = new URL("http://swingsetweb.appspot.com/playground?" + TYPE_PARAM + "=" + WITHIN + "&"
                + TOP_LEFT_LATITUDE_PARAM + "=" + topLeft.getLatitudeE6() / 1E6 + "&" + TOP_LEFT_LONGITUDE_PARAM
                + "=" + topLeft.getLongitudeE6() / 1E6 + "&" + BOTTOM_RIGHT_LATITUDE_PARAM + "="
                + bottomRight.getLatitudeE6() / 1E6 + "&" + BOTTOM_RIGHT_LONGITUDE_PARAM + "="
                + bottomRight.getLongitudeE6() / 1E6);
        httpConnection = (HttpURLConnection) url.openConnection();
        httpConnection.setConnectTimeout(15000);
        httpConnection.setReadTimeout(15000);
        StringBuilder response = new StringBuilder();

        if (httpConnection.getResponseCode() == HttpURLConnection.HTTP_OK) {
            // Read results from the query
            BufferedReader input = new BufferedReader(
                    new InputStreamReader(httpConnection.getInputStream(), "UTF-8"));
            String strLine = null;
            while ((strLine = input.readLine()) != null) {
                response.append(strLine);
            }
            input.close();

        }

        // Parse to get translated text
        JSONArray jsonPlaygrounds = new JSONArray(response.toString());
        int numOfPlaygrounds = jsonPlaygrounds.length();

        JSONObject jsonPlayground = null;

        for (int i = 0; i < numOfPlaygrounds; i++) {
            jsonPlayground = jsonPlaygrounds.getJSONObject(i);
            playgrounds.add(toPlayground(jsonPlayground));
        }

    } catch (Exception e) {
        Log.e(TAG, "Exception", e);
        Intent errorIntent = new Intent(context, Playgrounds.class);
        errorIntent.putExtra("Exception", e.getLocalizedMessage());
        errorIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
        context.startActivity(errorIntent);
    } finally {
        if (httpConnection != null) {
            httpConnection.disconnect();
        }
    }

    Log.d(TAG, "   -> returned " + result);
    return playgrounds;
}

From source file:com.alibaba.wasp.fserver.handler.OpenEntityGroupHandler.java

/**
 * Update ZK, ROOT or META. This can take a while if for example the .META. is
 * not available -- if server hosting .META. crashed and we are waiting on it
 * to come back -- so run in a thread and keep updating znode state meantime
 * so master doesn't timeout our entityGroup-in-transition. Caller must
 * cleanup entityGroup if this fails./*w w  w  . j  a  v  a  2s.co  m*/
 */
boolean updateMeta(final EntityGroup entityGroup) {
    if (this.server.isStopped() || this.fsServices.isStopping()) {
        return false;
    }
    // Object we do wait/notify on. Make it boolean. If set, we're done.
    // Else, wait.
    final AtomicBoolean signaller = new AtomicBoolean(false);
    PostOpenDeployTasksThread t = new PostOpenDeployTasksThread(entityGroup, this.server, this.fsServices,
            signaller);
    t.start();
    int assignmentTimeout = this.server.getConfiguration()
            .getInt("wasp.master.assignment.timeoutmonitor.period", 10000);
    // Total timeout for meta edit. If we fail adding the edit then close out
    // the entityGroup and let it be assigned elsewhere.
    long timeout = assignmentTimeout * 10;
    long now = System.currentTimeMillis();
    long endTime = now + timeout;
    // Let our period at which we update OPENING state to be be 1/3rd of the
    // entityGroups-in-transition timeout period.
    long period = Math.max(1, assignmentTimeout / 3);
    long lastUpdate = now;
    boolean tickleOpening = true;
    while (!signaller.get() && t.isAlive() && !this.server.isStopped() && !this.fsServices.isStopping()
            && (endTime > now)) {
        long elapsed = now - lastUpdate;
        if (elapsed > period) {
            // Only tickle OPENING if postOpenDeployTasks is taking some time.
            lastUpdate = now;
            tickleOpening = tickleOpening("post_open_deploy");
        }
        synchronized (signaller) {
            try {
                signaller.wait(period);
            } catch (InterruptedException e) {
                // Go to the loop check.
            }
        }
        now = System.currentTimeMillis();
    }
    // Is thread still alive? We may have left above loop because server is
    // stopping or we timed out the edit. Is so, interrupt it.
    if (t.isAlive()) {
        if (!signaller.get()) {
            // Thread still running; interrupt
            LOG.debug("Interrupting thread " + t);
            t.interrupt();
        }
        try {
            t.join();
        } catch (InterruptedException ie) {
            LOG.warn("Interrupted joining " + entityGroup.getEntityGroupInfo().getEntityGroupNameAsString(),
                    ie);
            Thread.currentThread().interrupt();
        }
    }

    // Was there an exception opening the entityGroup? This should trigger on
    // InterruptedException too. If so, we failed. Even if tickle opening fails
    // then it is a failure.
    return ((!Thread.interrupted() && t.getException() == null) && tickleOpening);
}

From source file:com.splout.db.dnode.Fetcher.java

private File s3Fetch(URI uri, Reporter reporter) throws IOException, InterruptedException {
    String bucketName = uri.getHost();
    String path = uri.getPath();/* w  ww . j a va2 s .  c  o m*/
    UUID uniqueId = UUID.randomUUID();
    File destFolder = new File(tempDir, uniqueId.toString() + "/" + path);
    if (destFolder.exists()) {
        FileUtils.deleteDirectory(destFolder);
    }
    destFolder.mkdirs();

    Throttler throttler = new Throttler((double) bytesPerSecThrottle);

    boolean done = false;
    try {
        s3Service = new RestS3Service(getCredentials());
        if (s3Service.checkBucketStatus(bucketName) != RestS3Service.BUCKET_STATUS__MY_BUCKET) {
            throw new IOException("Bucket doesn't exist or is already claimed: " + bucketName);
        }

        if (path.startsWith("/")) {
            path = path.substring(1, path.length());
        }

        for (S3Object object : s3Service.listObjects(new S3Bucket(bucketName), path, "")) {
            long bytesSoFar = 0;

            String fileName = path;
            if (path.contains("/")) {
                fileName = path.substring(path.lastIndexOf("/") + 1, path.length());
            }
            File fileDest = new File(destFolder, fileName);
            log.info("Downloading " + object.getKey() + " to " + fileDest + " ...");

            if (fileDest.exists()) {
                fileDest.delete();
            }

            object = s3Service.getObject(new S3Bucket(bucketName), object.getKey());
            InputStream iS = object.getDataInputStream();
            FileOutputStream writer = new FileOutputStream(fileDest);
            byte[] buffer = new byte[downloadBufferSize];

            int nRead;
            while ((nRead = iS.read(buffer, 0, buffer.length)) != -1) {
                // Needed to being able to be interrupted at any moment.
                if (Thread.interrupted()) {
                    iS.close();
                    writer.close();
                    cleanDirNoExceptions(destFolder);
                    throw new InterruptedException();
                }

                bytesSoFar += nRead;
                writer.write(buffer, 0, nRead);
                throttler.incrementAndThrottle(nRead);
                if (bytesSoFar >= bytesToReportProgress) {
                    reporter.progress(bytesSoFar);
                    bytesSoFar = 0l;
                }
            }

            if (reporter != null) {
                reporter.progress(bytesSoFar);
            }

            writer.close();
            iS.close();
            done = true;
        }

        if (!done) {
            throw new IOException("Bucket is empty! " + bucketName + " path: " + path);
        }
    } catch (S3ServiceException e) {
        throw new IOException(e);
    }

    return destFolder;
}

From source file:io.druid.server.namespace.cache.NamespaceExtractionCacheManagerExecutorsTest.java

public void testDelete(final String ns) throws InterruptedException {
    final CountDownLatch latch = new CountDownLatch(5);
    final CountDownLatch latchMore = new CountDownLatch(10);

    final AtomicLong runs = new AtomicLong(0);
    long prior = 0;
    final URIExtractionNamespace namespace = new URIExtractionNamespace(ns, tmpFile.toURI(),
            new URIExtractionNamespace.ObjectMapperFlatDataParser(
                    URIExtractionNamespaceTest.registerTypes(new ObjectMapper())),
            new Period(1l), null);
    final String cacheId = UUID.randomUUID().toString();
    final CountDownLatch latchBeforeMore = new CountDownLatch(1);
    ListenableFuture<?> future = manager.schedule(namespace, factory, new Runnable() {
        @Override/*w  ww  .ja va2  s . c  o m*/
        public void run() {
            try {
                if (!Thread.interrupted()) {
                    manager.getPostRunnable(namespace, factory, cacheId).run();
                } else {
                    Thread.currentThread().interrupt();
                }
                if (!Thread.interrupted()) {
                    runs.incrementAndGet();
                } else {
                    Thread.currentThread().interrupt();
                }
            } finally {
                latch.countDown();
                try {
                    if (latch.getCount() == 0) {
                        latchBeforeMore.await();
                    }
                } catch (InterruptedException e) {
                    log.debug("Interrupted");
                    Thread.currentThread().interrupt();
                } finally {
                    latchMore.countDown();
                }
            }
        }
    }, cacheId);
    latch.await();
    prior = runs.get();
    latchBeforeMore.countDown();
    Assert.assertFalse(future.isCancelled());
    Assert.assertFalse(future.isDone());
    Assert.assertTrue(fnCache.containsKey(ns));
    latchMore.await();
    Assert.assertTrue(runs.get() > prior);

    Assert.assertTrue(manager.implData.containsKey(ns));

    manager.delete("ns");
    Assert.assertFalse(manager.implData.containsKey(ns));
    Assert.assertFalse(fnCache.containsKey(ns));
    Assert.assertTrue(future.isCancelled());
    Assert.assertTrue(future.isDone());
    prior = runs.get();
    Thread.sleep(20);
    Assert.assertEquals(prior, runs.get());
}

From source file:org.executequery.gui.editor.autocomplete.AutoCompleteSelectionsFactory.java

@SuppressWarnings("resource")
private void databaseExecutableForHost(DatabaseHost databaseHost, String type, String databaseObjectDescription,
        AutoCompleteListItemType autocompleteType) {

    trace("Building autocomplete object list using [ " + databaseHost.getName() + " ] for type - " + type);

    ResultSet rs = null;//www.ja v  a2 s.  c o m
    try {

        DatabaseMetaData databaseMetaData = databaseHost.getDatabaseMetaData();
        String catalog = databaseHost.getCatalogNameForQueries(defaultCatalogForHost(databaseHost));
        String schema = databaseHost.getSchemaNameForQueries(defaultSchemaForHost(databaseHost));

        List<String> names = new ArrayList<String>();
        List<AutoCompleteListItem> list = new ArrayList<AutoCompleteListItem>();

        if (autocompleteType == AutoCompleteListItemType.DATABASE_FUNCTION) {

            try {

                rs = databaseMetaData.getFunctions(catalog, schema, null);

            } catch (Throwable e) {

                trace("Functions not available using [ getFunctions() ] - reverting to [ getProcedures() ] - "
                        + e.getMessage());
                rs = getProcedures(databaseMetaData, catalog, schema);
            }

        } else {

            rs = getProcedures(databaseMetaData, catalog, schema);
        }

        if (rs != null) {

            int count = 0;
            while (rs.next()) {

                try {
                    if (Thread.interrupted() || databaseMetaData.getConnection().isClosed()) {

                        return;
                    }
                } catch (SQLException e) {
                }

                names.add(rs.getString(3));
                count++;

                if (count >= INCREMENT) {

                    addTablesToProvider(databaseObjectDescription, autocompleteType, names, list);
                    count = 0;
                    list.clear();
                    names.clear();
                }

            }

            addTablesToProvider(databaseObjectDescription, autocompleteType, names, list);

        }

    } catch (Exception e) {

        error("Tables not available for type " + type + " - driver returned: " + e.getMessage());

    } finally {

        releaseResources(rs);
        trace("Finished autocomplete object list using [ " + databaseHost.getName() + " ] for type - " + type);
    }

}

From source file:com.workplacesystems.queuj.utils.BackgroundProcess.java

protected final boolean interrupted() {
    return Thread.interrupted();
}

From source file:org.executequery.gui.importexport.ImportDelimitedWorker.java

private Object doWork() {

    // the process result
    String processResult = null;//from  w  ww  .  j av a2s  .co  m

    // are we halting on any error
    int onError = getParent().getOnError();
    haltOnError = (onError == ImportExportProcess.STOP_TRANSFER);

    boolean isBatch = getParent().runAsBatchProcess();

    appendProgressText("Beginning import from delimited file process...");
    appendProgressText("Using connection: " + getParent().getDatabaseConnection().getName());

    // ---------------------------------------
    // table specific counters

    // the table statement result
    int tableInsertCount = 0;

    // the records processed for this table
    int tableRowCount = 0;

    // the table commit count
    int tableCommitCount = 0;

    // ---------------------------------------
    // total import process counters

    // the current commit block size
    int commitCount = 0;

    // the total records inserted
    int totalInsertCount = 0;

    // the total records processed
    int totalRecordCount = 0;

    // the error count
    int errorCount = 0;

    // the current line number
    int lineNumber = 0;

    int rollbackSize = getParent().getRollbackSize();
    int rollbackCount = 0;

    FileReader fileReader = null;
    BufferedReader reader = null;
    DateFormat dateFormat = null;

    try {
        // retrieve the import files
        Vector files = getParent().getDataFileVector();
        int fileCount = files.size();

        // whether to trim whitespace
        boolean trimWhitespace = getParent().trimWhitespace();

        // whether this table has a date/time field
        boolean hasDate = false;

        // whether we are parsing date formats
        boolean parsingDates = parseDateValues();

        // column names are first row
        boolean hasColumnNames = getParent().includeColumnNames();

        // currently bound variables in the prepared statement
        Map<ColumnData, String> boundVariables = null;

        // ignored indexes of columns from the file
        List<Integer> ignoredIndexes = null;

        if (hasColumnNames) {
            boundVariables = new HashMap<ColumnData, String>();
            ignoredIndexes = new ArrayList<Integer>();
            appendProgressText("Using column names from input file's first row.");
        }

        // columns to be imported that are in the file
        Map<ColumnData, String> fileImportedColumns = new HashMap<ColumnData, String>();

        // whether the data format failed (switch structure)
        boolean failed = false;

        // define the delimiter
        String delim = getParent().getDelimiter();

        // ---------------------------
        // --- initialise counters ---
        // ---------------------------

        // the table's column count
        int columnCount = -1;

        // the length of each line in the file
        int rowLength = -1;

        // progress bar values
        int progressStatus = -1;

        // ongoing progress value
        int progressCheck = -1;

        // the import file size
        long fileSize = -1;

        // set the date format

        if (parseDateValues()) {

            try {

                dateFormat = createDateFormatter();

            } catch (IllegalArgumentException e) {

                errorCount++;
                outputExceptionError("Error applying date mask", e);

                return FAILED;
            }

        }

        // record the start time
        start();

        // setup the regex matcher for delims

        // ----------------------------------------------------------------
        // below was the original pattern from oreilly book.
        // discovered issues when parsing values with quotes
        // in them - not only around them.
        /*
        String regex =
            "(?:^|\\" +
            delim +
            ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \" | ( [^\"\\" +
            delim + "]*+ ) )";
        Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher("");
        Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher("");
        */
        // ----------------------------------------------------------------

        // modified above to regex below
        // added the look-ahead after the close quote
        // and removed the quote from the last regex pattern

        String escapedDelim = escapeDelim(delim);

        String regex = "(?:^|" + escapedDelim + ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \"(?="
                + escapedDelim + "?) | ( [^" + escapedDelim + "]*+ ) )";

        // ----------------------------------------------------------------
        // changed above to the following - seems to work for now
        // regex pattern in full - where <delim> is the delimiter to use
        //      \"([^\"]+?)\"<delim>?|([^<delim>]+)<delim>?|<delim>
        //
        // fixed oreilly one - not running this one
        // ----------------------------------------------------------------

        Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher("");
        Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher("");

        // ----------------------------------------
        // --- begin looping through the tables ---
        // ----------------------------------------

        // ensure the connection has auto-commit to false
        conn = getConnection();
        conn.setAutoCommit(false);

        int currentRowLength = 0;
        boolean insertLine = false;

        // the number of columns actually available in the file
        int filesColumnCount = 0;

        for (int i = 0; i < fileCount; i++) {

            lineNumber = 0;
            tableInsertCount = 0;
            tableCommitCount = 0;
            rollbackCount = 0;
            tableRowCount = 0;
            rowLength = 0;

            if (Thread.interrupted()) {
                setProgressStatus(100);
                throw new InterruptedException();
            }

            tableCount++;

            DataTransferObject dto = (DataTransferObject) files.elementAt(i);

            // initialise the file object
            File inputFile = new File(dto.getFileName());

            outputBuffer.append("---------------------------\nTable: ");
            outputBuffer.append(dto.getTableName());
            outputBuffer.append("\nImport File: ");
            outputBuffer.append(inputFile.getName());
            appendProgressText(outputBuffer);

            // setup the reader objects
            fileReader = new FileReader(inputFile);
            reader = new BufferedReader(fileReader);

            // retrieve the columns to be imported (or all)
            Vector<ColumnData> columns = getColumns(dto.getTableName());
            columnCount = columns.size();
            filesColumnCount = columnCount;

            // the wntire row read
            String row = null;

            // the current delimited value
            String value = null;

            // the ignored column count
            int ignoredCount = 0;

            // clear the file columns cache
            fileImportedColumns.clear();

            // if the first row in the file has the column
            // names compare these with the columns selected
            if (hasColumnNames) {

                // init the bound vars cache with the selected columns
                boundVariables.clear();

                for (int k = 0; k < columnCount; k++) {

                    boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND);
                }

                row = reader.readLine();
                lineNumber++;

                String[] _columns = MiscUtils.splitSeparatedValues(row, delim);
                if (_columns != null && _columns.length > 0) {

                    filesColumnCount = _columns.length;

                    // --------------------------------------
                    // first determine if we have any columns in the
                    // input file that were not selected for import

                    // reset the ignored columns
                    ignoredIndexes.clear();

                    // set up another list to re-add the columns in
                    // the order in which they appear in the file.
                    // all other columns will be added to the end
                    Vector<ColumnData> temp = new Vector<ColumnData>(columnCount);

                    ColumnData cd = null;
                    int ignoredIndex = -1;
                    for (int j = 0; j < _columns.length; j++) {
                        ignoredIndex = j;
                        String column = _columns[j];

                        for (int k = 0; k < columnCount; k++) {
                            cd = columns.get(k);
                            String _column = cd.getColumnName();

                            if (_column.equalsIgnoreCase(column)) {
                                temp.add(cd);
                                fileImportedColumns.put(cd, INCLUDED_COLUMN);
                                ignoredIndex = -1;
                                break;
                            }

                        }

                        if (ignoredIndex != -1) {

                            ignoredIndexes.add(Integer.valueOf(ignoredIndex));
                        }

                    }
                    ignoredCount = ignoredIndexes.size();

                    // if we didn't find any columns at all, show warning
                    if (temp.isEmpty()) {

                        String message = "No matching column names were "
                                + "found within the specified file's first line.\n"
                                + "The current file will be ignored.";

                        outputBuffer.append(message);
                        appendProgressWarningText(outputBuffer);

                        int yesNo = GUIUtilities.displayYesNoDialog(message + "\nDo you wish to continue?",
                                "Warning");

                        if (yesNo == JOptionPane.YES_OPTION) {
                            continue;
                        } else {
                            throw new InterruptedException();
                        }

                    } else {

                        // add any other selected columns to the
                        // end of the temp list with the columns
                        // available in the file
                        boolean addColumn = false;
                        for (int k = 0; k < columnCount; k++) {
                            addColumn = false;
                            cd = columns.get(k);
                            for (int j = 0, n = temp.size(); j < n; j++) {
                                addColumn = true;
                                if (temp.get(j) == cd) {
                                    addColumn = false;
                                    break;
                                }
                            }

                            if (addColumn) {
                                temp.add(cd);
                            }

                        }
                        columns = temp; // note: size should not have changed
                    }

                }
            }
            // otherwise just populate the columns in the file
            // with all the selected columns
            else {

                for (int j = 0; j < columnCount; j++) {

                    fileImportedColumns.put(columns.get(j), INCLUDED_COLUMN);
                }

            }

            /*
            Log.debug("ignored count: " + ignoredCount);
            for (int j = 0; j < columnCount; j++) {
            Log.debug("column: " + columns.get(j));
            }
            */

            fileSize = inputFile.length();
            progressStatus = 10;
            progressCheck = (int) (fileSize / progressStatus);

            // prepare the statement
            prepareStatement(dto.getTableName(), columns);

            if (parsingDates && dateFormat == null) {

                // check for a date data type
                for (int j = 0; j < columnCount; j++) {

                    if (dateFormat == null && !hasDate) {

                        ColumnData cd = columns.get(j);

                        if (fileImportedColumns.containsKey(cd)) {

                            if (cd.isDateDataType()) {

                                hasDate = true;
                                break;
                            }

                        }

                    }
                }

                if (hasDate && dateFormat == null) {

                    String pattern = verifyDate();

                    if (StringUtils.isNotBlank(pattern)) {

                        fileReader.close();
                        setProgressStatus(100);
                        throw new InterruptedException();
                    }

                    dateFormat = createDateFormatter(pattern);
                }

            }

            rowLength = 0;

            while ((row = reader.readLine()) != null) {

                insertLine = true;
                lineNumber++;
                tableRowCount++;
                totalRecordCount++;

                if (Thread.interrupted()) {

                    fileReader.close();
                    printTableResult(tableRowCount, tableCommitCount, dto.getTableName());

                    setProgressStatus(100);
                    throw new InterruptedException();
                }

                currentRowLength = row.length();

                if (currentRowLength == 0) {

                    outputBuffer.append("Line ");
                    outputBuffer.append(lineNumber);
                    outputBuffer.append(" contains no delimited values");
                    appendProgressWarningText(outputBuffer);

                    int yesNo = GUIUtilities.displayYesNoDialog("No values provided from line " + lineNumber
                            + " - the row is blank.\n" + "Do you wish to continue?", "Warning");

                    if (yesNo == JOptionPane.YES_OPTION) {
                        continue;
                    } else {
                        throw new InterruptedException();
                    }
                }

                rowLength += currentRowLength;
                if (progressCheck < rowLength) {

                    setProgressStatus(progressStatus);
                    progressStatus += 10;
                    rowLength = 0;
                }

                // reset matcher with current row
                matcher.reset(row);

                int index = 0;
                int lastIndex = -1;
                int loopIgnoredCount = 0;

                //Log.debug(row);

                for (int j = 0; j < filesColumnCount; j++) {

                    if (matcher.find(index)) {

                        String first = matcher.group(2);

                        if (first != null) {

                            value = first;

                        } else {

                            qMatcher.reset(matcher.group(1));
                            value = qMatcher.replaceAll("\"");
                        }

                        index = matcher.end();

                        // check if its an ignored column
                        if (ignoredCount > 0) {

                            if (isIndexIgnored(ignoredIndexes, j)) {

                                loopIgnoredCount++;
                                continue;
                            }

                        }

                    } else {

                        // not enough delims check
                        if (j < (filesColumnCount - 1) && index > (currentRowLength - 1)) {

                            outputBuffer.append("Insufficient number of column ");
                            outputBuffer.append("values provided at line ");
                            outputBuffer.append(lineNumber);
                            appendProgressErrorText(outputBuffer);

                            int yesNo = GUIUtilities
                                    .displayYesNoDialog("Insufficient number of values provided from line "
                                            + lineNumber + ".\n" + "Do you wish to continue?", "Warning");

                            if (yesNo == JOptionPane.YES_OPTION) {

                                insertLine = false;
                                break;

                            } else {

                                throw new InterruptedException();
                            }

                        } else {

                            // check if we're on a delim the matcher didn't pick up

                            int delimLength = delim.length();

                            if (row.substring(index, index + delimLength).equals(delim)) {

                                // increment index
                                index++;
                                // assign as null value
                                value = null;
                            }

                        }

                    }

                    // check if we landed on the same index - likely null value
                    if (index == lastIndex) {
                        index++;
                    }
                    lastIndex = index;

                    if (value != null && value.trim().length() == 0) {
                        value = null;
                    }

                    try {
                        ColumnData cd = columns.get(j - loopIgnoredCount);
                        setValue(value, getIndexOfColumn(columns, cd) + 1, cd.getSQLType(), trimWhitespace,
                                dateFormat);

                        if (hasColumnNames) {
                            boundVariables.put(cd, VARIABLE_BOUND);
                        }

                    } catch (ParseException e) {

                        errorCount++;
                        failed = true;
                        outputBuffer.append("Error parsing date value - ");
                        outputBuffer.append(value);
                        outputBuffer.append(" - on line ");
                        outputBuffer.append(lineNumber);
                        outputBuffer.append(" at position ");
                        outputBuffer.append(j);
                        outputExceptionError(null, e);
                        break;

                    } catch (NumberFormatException e) {

                        errorCount++;
                        failed = true;
                        outputBuffer.append("Error parsing value - ");
                        outputBuffer.append(value);
                        outputBuffer.append(" - on line ");
                        outputBuffer.append(lineNumber);
                        outputBuffer.append(" at position ");
                        outputBuffer.append(j);
                        outputExceptionError(null, e);
                        break;
                    }

                }

                if (!insertLine) {

                    prepStmnt.clearParameters();
                    continue;
                }

                if (failed && haltOnError) {

                    processResult = FAILED;
                    break;
                }

                // execute the statement
                try {

                    // check all variables are bound if we used
                    // the column names from the first row
                    if (hasColumnNames) {

                        index = 0;
                        // check all variables are bound - insert NULL otherwise

                        for (Map.Entry<ColumnData, String> entry : boundVariables.entrySet()) {

                            ColumnData cd = entry.getKey();

                            if (VARIABLE_NOT_BOUND.equals(entry.getValue())) {

                                index = getIndexOfColumn(columns, cd);
                                prepStmnt.setNull(index + 1, cd.getSQLType());
                            }

                        }

                    }

                    if (isBatch) {
                        prepStmnt.addBatch();
                    } else {
                        int result = prepStmnt.executeUpdate();
                        tableInsertCount += result;
                        commitCount += result;
                    }

                    rollbackCount++;
                    // check the rollback segment
                    if (rollbackCount == rollbackSize) {
                        if (isBatch) {
                            int result = getBatchResult(prepStmnt.executeBatch())[0];
                            tableInsertCount += result;
                            commitCount += result;
                            prepStmnt.clearBatch();
                        }
                        conn.commit();
                        totalInsertCount += commitCount;
                        tableCommitCount = tableInsertCount;
                        rollbackCount = 0;
                        commitCount = 0;
                    }

                    // reset bound variables
                    if (hasColumnNames) {
                        for (int k = 0; k < columnCount; k++) {
                            boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND);
                        }
                    }

                } catch (SQLException e) {
                    logException(e);
                    errorCount++;

                    if (!isBatch) {
                        outputBuffer.append("Error inserting data from line ");
                        outputBuffer.append(lineNumber);
                        outputExceptionError(null, e);
                    } else {
                        outputBuffer.append("Error on last batch execution");
                        outputExceptionError(null, e);
                    }

                    if (haltOnError) {
                        processResult = FAILED;
                        conn.rollback();
                        getParent().cancelTransfer();
                        throw new InterruptedException();
                    }

                }

            }

            // ----------------------------
            // file/table has ended here

            if (isBatch) {

                int[] batchResult = null;

                try {
                    batchResult = getBatchResult(prepStmnt.executeBatch());
                    int result = batchResult[0];
                    tableInsertCount += result;
                    commitCount += result;
                    tableCommitCount = tableInsertCount;
                } catch (BatchUpdateException e) {
                    logException(e);
                    int[] updateCounts = e.getUpdateCounts();
                    batchResult = getBatchResult(updateCounts);
                    errorCount += batchResult[1];
                    if (errorCount == 0) {
                        errorCount = 1;
                    }

                    outputBuffer.append("An error occured during the batch process: ");
                    outputBuffer.append(e.getMessage());

                    SQLException _e = e.getNextException();
                    while (_e != null) {
                        outputBuffer.append("\nNext Exception: ");
                        outputBuffer.append(_e.getMessage());
                        _e = _e.getNextException();
                    }

                    outputBuffer.append("\n\nRecords processed to the point ");
                    outputBuffer.append("where this error occurred: ");
                    outputBuffer.append(updateCounts.length);
                    appendProgressErrorText(outputBuffer);
                    processResult = FAILED;
                }

                //  Log.debug("commitCount: " + commitCount +
                //                      " batch: " + batchResult[0]);

                if (tableRowCount != tableInsertCount) {
                    conn.rollback();

                    if (onError == ImportExportProcess.STOP_TRANSFER) {
                        getParent().cancelTransfer();
                        processResult = FAILED;
                        throw new InterruptedException();
                    }

                }

            }

            boolean doCommit = true;
            if (failed && !isBatch && rollbackSize != ImportExportProcess.COMMIT_END_OF_ALL_FILES) {

                int yesNo = GUIUtilities.displayYesNoDialog(
                        "The process completed with errors.\n" + "Do you wish to commit the last block?",
                        "Confirm commit");

                doCommit = (yesNo == JOptionPane.YES_OPTION);
            }

            // do the commit if ok from above
            // and if rollback size selected is end of file
            if (rollbackSize == ImportExportProcess.COMMIT_END_OF_FILE) {
                if (doCommit) {
                    conn.commit();
                    totalInsertCount += commitCount;
                    tableCommitCount = tableInsertCount;
                    commitCount = 0;
                } else {
                    conn.rollback();
                }
            }

            // update the progress display
            printTableResult(tableRowCount, tableInsertCount, dto.getTableName());
            setProgressStatus(100);

            // reset the checks
            hasDate = false;
            failed = false;

        }

        // commit the last remaining block or where
        // set to commit at the end of all files
        if (rollbackSize != ImportExportProcess.COMMIT_END_OF_FILE) {
            setProgressStatus(100);
            boolean doCommit = true;
            if (errorCount > 0 && errorCount != totalRecordCount) {
                int yesNo = GUIUtilities.displayYesNoDialog(
                        "The process completed with errors.\n" + "Do you wish to commit the changes?",
                        "Confirm commit");
                doCommit = (yesNo == JOptionPane.YES_OPTION);
            }

            if (doCommit) {
                conn.commit();
                totalInsertCount += commitCount;
            } else {
                conn.rollback();
            }

        }

        processResult = SUCCESS;
    } catch (InterruptedException e) {

        if (processResult != FAILED) {
            processResult = CANCELLED;
        }

        try {
            if (prepStmnt != null) {
                prepStmnt.cancel();
            }
            if (conn != null) {
                conn.rollback();
            }
        } catch (SQLException e2) {
            outputExceptionError("Error rolling back transaction", e);
        }

    } catch (Exception e) {
        logException(e);
        outputBuffer.append("Error processing data from line ");
        outputBuffer.append(lineNumber);
        outputExceptionError("\nUnrecoverable error importing table data from file", e);

        int yesNo = GUIUtilities.displayYesNoDialog(
                "The process encountered errors.\n" + "Do you wish to commit the last transaction block?",
                "Confirm commit");
        boolean doCommit = (yesNo == JOptionPane.YES_OPTION);

        try {
            if (doCommit) {
                conn.commit();
                totalInsertCount += commitCount;
            } else {
                conn.rollback();
            }
        } catch (SQLException e2) {
            logException(e2);
            outputExceptionError("Error processing last transaction block", e2);
        }
        processResult = FAILED;
    } finally {
        finish();
        releaseResources(getParent().getDatabaseConnection());

        if (totalRecordCount == 0 || errorCount > 0) {
            processResult = FAILED;
        }

        setTableCount(tableCount);
        setRecordCount(totalRecordCount);
        setRecordCountProcessed(totalInsertCount);
        setErrorCount(errorCount);

        setProgressStatus(100);
        GUIUtilities.scheduleGC();

        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
            }
        }
        if (fileReader != null) {
            try {
                fileReader.close();
            } catch (IOException e) {
            }
        }
        if (prepStmnt != null) {
            try {
                prepStmnt.close();
            } catch (SQLException e) {
            }
        }

    }

    return processResult;
}