Example usage for org.apache.commons.dbutils DbUtils closeQuietly

List of usage examples for org.apache.commons.dbutils DbUtils closeQuietly

Introduction

In this page you can find the example usage for org.apache.commons.dbutils DbUtils closeQuietly.

Prototype

public static void closeQuietly(Statement stmt) 

Source Link

Document

Close a Statement, avoid closing if null and hide any SQLExceptions that occur.

Usage

From source file:azkaban.project.JdbcProjectLoader.java

@Override
public void updateProjectProperty(Project project, Props props) throws ProjectManagerException {
    Connection connection = getConnection();
    try {/*from www  . j a v a2s  .co  m*/
        updateProjectProperty(connection, project, props.getSource(), props);
        connection.commit();
    } catch (SQLException e) {
        throw new ProjectManagerException("Error uploading project property files", e);
    } catch (IOException e) {
        throw new ProjectManagerException("Error uploading project property file", e);
    } finally {
        DbUtils.closeQuietly(connection);
    }
}

From source file:io.personium.diff.App.java

/**
 * ?????????????.//from w  w  w  .  j av  a 2 s .  c o  m
 * @param connection MySQL?
 * @param indexName ????
 * @param tableName ????
 * @param srcTable ???DB????
 * @param sql ???SQL
 * @paran missType ??
 */
private void checkRecordsMismatch(final Connection connection, final String indexName, final String tableName,
        final String srcTable, final String sql, final String missType) {
    log.info("[" + srcTable + "] " + " start.");
    PreparedStatement stmt = null;
    ResultSet resultSet = null;
    try {
        stmt = connection.prepareStatement(sql);
        boolean isfailed = false;
        resultSet = stmt.executeQuery();
        while (resultSet.next()) {
            if (!isfailed) {
                isfailed = true;
                log.warn("Detected data missing");
            }
            int index = 1;
            String id = resultSet.getString(index++);
            Long updated = resultSet.getLong(index++);
            log.info("[Inconsistency] " + id + "," + updated + "," + indexName + "," + tableName + ","
                    + missType);
        }
    } catch (SQLException e) {
        log.warn("Faild to checkRecordsMismatch");
        log.info(e.getMessage());
    } finally {
        DbUtils.closeQuietly(resultSet);
        DbUtils.closeQuietly(stmt);
    }
    log.info("[" + srcTable + "] " + " complete.");
}

From source file:azkaban.project.JdbcProjectLoader.java

@Override
public void cleanOlderProjectVersion(int projectId, int version) throws ProjectManagerException {
    Connection connection = getConnection();

    try {//w  w  w . j a  v  a2 s  .c  o  m
        cleanOlderProjectVersionFlows(connection, projectId, version);
        cleanOlderProjectVersionProperties(connection, projectId, version);
        cleanOlderProjectFiles(connection, projectId, version);
        cleanOlderProjectVersion(connection, projectId, version);
    } finally {
        DbUtils.closeQuietly(connection);
    }
}

From source file:azkaban.project.JdbcProjectLoader.java

private Connection getConnection() throws ProjectManagerException {
    Connection connection = null;
    try {//  w  ww . j a va  2  s. c o m
        connection = super.getDBConnection(false);
    } catch (Exception e) {
        DbUtils.closeQuietly(connection);
        throw new ProjectManagerException("Error getting DB connection.", e);
    }

    return connection;
}

From source file:com.mirth.connect.server.controllers.DefaultConfigurationController.java

private boolean isDatabaseRunning() {
    Statement statement = null;/*from w  w  w  . j a  va 2  s  .com*/
    Connection connection = null;
    SqlConfig.getSqlSessionManager().startManagedSession();

    try {
        connection = SqlConfig.getSqlSessionManager().getConnection();
        statement = connection.createStatement();
        statement.execute("SELECT 1 FROM CHANNEL");
        return true;
    } catch (Exception e) {
        logger.warn("could not retrieve status of database", e);
        return false;
    } finally {
        DbUtils.closeQuietly(statement);
        DbUtils.closeQuietly(connection);
        if (SqlConfig.getSqlSessionManager().isManagedSessionStarted()) {
            SqlConfig.getSqlSessionManager().close();
        }
    }
}

From source file:net.sourceforge.seqware.webservice.resources.queries.WorkflowRuntimeResource.java

private void recursiveFindProcessings(Integer processingId, Integer workflowRunId, String workflowName) {
    if (seen.get(processingId) != null && seen.get(processingId)) {
        return;//w w  w .j  a v a  2  s . com
    }
    seen.put(processingId, true);

    ResultSet rs = null;
    ResultSet childRs = null;
    try {
        rs = DBAccess.get().executeQuery(
                "select p.processing_id, p.algorithm, p.status, p.create_tstmp, EXTRACT(EPOCH from p.run_stop_tstmp - p.run_start_tstmp) as length from processing as p where p.processing_id = "
                        + processingId + " and p.status = 'success' order by p.create_tstmp");

        HashMap<Integer, Boolean> childProcessingHash = new HashMap<Integer, Boolean>();

        if (rs.next()) {

            String algorithm = rs.getString(2);
            String status = rs.getString(3);
            Timestamp create = rs.getTimestamp(4);
            Integer runtime = rs.getInt(5);

            algos.put(algorithm, true);
            wrIds.put(workflowRunId, true);

            //HashMap<String, HashMap<String, HashMap<String, HashMap<String, Integer>>>> d = new HashMap<String, HashMap<String, HashMap<String, HashMap<String, Integer>>>>();

            // workflow name -> algorithm -> workflow run id -> runtime -> int
            //                                               -> counts  -> int
            HashMap<String, HashMap<String, HashMap<String, Integer>>> algorithmsHash = d.get(workflowName);
            if (algorithmsHash == null) {
                algorithmsHash = new HashMap<String, HashMap<String, HashMap<String, Integer>>>();
                d.put(workflowName, algorithmsHash);
            }

            HashMap<String, HashMap<String, Integer>> workflowRunHash = algorithmsHash.get(algorithm);
            if (workflowRunHash == null) {
                workflowRunHash = new HashMap<String, HashMap<String, Integer>>();
                algorithmsHash.put(algorithm, workflowRunHash);
            }

            HashMap<String, Integer> runtimeHash = workflowRunHash.get(workflowRunId.toString());
            if (runtimeHash == null) {
                runtimeHash = new HashMap<String, Integer>();
                workflowRunHash.put(workflowRunId.toString(), runtimeHash);
            }

            Integer runtimes = runtimeHash.get("runtime");
            if (runtimes == null) {
                runtimes = runtime;
            } else {
                runtimes += runtime;
            }
            runtimeHash.put("runtime", runtimes);

            Integer counts = runtimeHash.get("counts");
            if (counts == null) {
                counts = 1;
            } else {
                counts++;
            }
            runtimeHash.put("counts", counts);

            childRs = DBAccess.get().executeQuery(
                    "select p.processing_id, p.algorithm, p.status, p.create_tstmp from processing as p, processing_relationship as pr where pr.parent_id = "
                            + processingId
                            + " and pr.child_id = p.processing_id and p.ancestor_workflow_run_id = "
                            + workflowRunId);

            while (childRs.next()) {

                childProcessingHash.put(childRs.getInt(1), true);
            }

        }

        // now recursively call
        for (Integer childProcId : childProcessingHash.keySet()) {
            recursiveFindProcessings(childProcId, workflowRunId, workflowName);
        }

    } catch (SQLException e) {
        // TODO Auto-generated catch block
        System.err.println(e.getMessage());
        e.printStackTrace();
    } finally {
        DBAccess.close();
        DbUtils.closeQuietly(rs);
        DbUtils.closeQuietly(childRs);
    }
}

From source file:nl.b3p.catalog.arcgis.ArcSDE10JDBCHelper.java

@Override
public List<Dir> getFeatureDatasets(final String currentPath) throws NamingException, SQLException {
    Connection c = getConnection();
    try {// w ww  . j a  v a2 s .  c om

        // Nested featuredatasets are not possible to create with ArcCatalog,
        // so no need to check relationships 

        String sql = "select i.objectid, i.name from " + getTableName(TABLE_ITEMS) + " i where type = ?";

        ResultSetHandler<List<Dir>> h = new ResultSetHandler<List<Dir>>() {
            public List<Dir> handle(ResultSet rs) throws SQLException {
                List<Dir> l = new ArrayList<Dir>();
                while (rs.next()) {
                    l.add(new Dir(rs.getString(2) + "", currentPath + rs.getInt(1)));
                }
                return l;
            }
        };
        return new QueryRunner().query(c, sql, h, TYPE_FEATURE_DATASET);
    } finally {
        DbUtils.closeQuietly(c);
    }
}

From source file:nl.b3p.catalog.arcgis.ArcSDE10JDBCHelper.java

@Override
public List<DirEntry> getFeatureClasses(final String currentPath, ArcSDEJDBCDataset parent)
        throws NamingException, SQLException {
    Connection c = getConnection();
    try {//from w w  w. java 2  s. c om
        String sql = "select i.objectid, i.name from " + getTableName(TABLE_ITEMS) + " i " + "join "
                + getTableName(TABLE_ITEMRELATIONSHIPS) + " r on (r.destid = i.uuid) " + "join "
                + getTableName(TABLE_ITEMS) + " parent_i on (parent_i.uuid = r.originid) "
                + "where i.type in (?,?) ";

        if (parent == null) {
            sql += "and parent_i.path = '\\'";
        } else {
            sql += "and parent_i.objectid = ?";
        }

        ResultSetHandler<List<DirEntry>> h = new ResultSetHandler<List<DirEntry>>() {
            public List<DirEntry> handle(ResultSet rs) throws SQLException {
                List<DirEntry> l = new ArrayList<DirEntry>();
                while (rs.next()) {
                    l.add(new DirEntry(rs.getString(2) + "", currentPath + rs.getInt(1)));
                }
                return l;
            }
        };
        if (parent == null) {
            return new QueryRunner().query(c, sql, h, TYPE_FEATURE_CLASS, TYPE_RASTER);
        } else {
            return new QueryRunner().query(c, sql, h, TYPE_FEATURE_CLASS, TYPE_RASTER, parent.getObjectID());
        }
    } finally {
        DbUtils.closeQuietly(c);
    }
}

From source file:nl.b3p.catalog.arcgis.ArcSDE10JDBCHelper.java

@Override
public String getMetadata(ArcSDEJDBCDataset dataset) throws NamingException, SQLException, IOException {
    Connection c = getConnection();
    try {/*from w ww .  j  a  v a 2  s  .  co m*/
        String sql = "select documentation from " + getTableName(TABLE_ITEMS) + " where objectid = ?";
        Clob xml = (Clob) new QueryRunner().query(c, sql, new ScalarHandler(), dataset.getObjectID());
        if (xml == null) {
            return DocumentHelper.EMPTY_METADATA;
        }
        return IOUtils.toString(xml.getCharacterStream());
    } finally {
        DbUtils.closeQuietly(c);
    }
}

From source file:nl.b3p.catalog.arcgis.ArcSDE10JDBCHelper.java

@Override
public void saveMetadata(ArcSDEJDBCDataset dataset, String metadata) throws Exception {
    Connection c = getConnection();
    PreparedStatement ps = null;//from   w w w.ja  v a 2 s. co  m
    try {

        // Sloop encoding uit XML declaratie, anders geeft MSSQL error 
        // "unable to switch the encoding" op column type xml

        Document doc = DocumentHelper.getMetadataDocument(metadata);
        metadata = new XMLOutputter(Format.getPrettyFormat().setOmitEncoding(true)).outputString(doc);

        String sql = "update " + getTableName(TABLE_ITEMS) + " set documentation = ? where objectid = ?";
        ps = c.prepareStatement(sql);
        ps.setCharacterStream(1, new StringReader(metadata), metadata.length());
        ps.setObject(2, dataset.getObjectID());
        int rowsAffected = ps.executeUpdate();
        if (rowsAffected != 1) {
            throw new Exception("Updating metadata should affect maximum one row; got rows affected count of "
                    + rowsAffected);
        }
    } finally {
        DbUtils.closeQuietly(ps);
        DbUtils.closeQuietly(c);
    }
}