Example usage for org.apache.commons.dbcp PoolingConnection close

List of usage examples for org.apache.commons.dbcp PoolingConnection close

Introduction

In this page you can find the example usage for org.apache.commons.dbcp PoolingConnection close.

Prototype

public synchronized void close() throws SQLException 

Source Link

Document

Close and free all PreparedStatement s or CallableStatement from the pool, and close the underlying connection.

Usage

From source file:org.opennms.features.newts.converter.eventd.EventdStresser.java

public static void stressEventd(final SnmpTrapBuilder builder)
        throws ClassNotFoundException, SQLException, IllegalStateException, InterruptedException {

    Connection connection = createConnection();
    PoolingConnection pool = new PoolingConnection(connection);
    if (m_deleteAllEvents) {
        System.out.println("Delete events from opennms DB");
        deleteAllEvents(connection);/* w  w  w.j  a va2s  . c  om*/
    }

    int initialEventCount = getEventCount(pool).intValue();
    System.out.println("Initial Event Count: " + initialEventCount);

    if (m_batchCount < 1) {
        throw new IllegalArgumentException("Batch count of < 1 is not allowed.");
    } else if (m_batchCount > m_trapCount) {
        throw new IllegalArgumentException("Batch count is > than trap count.");
    }

    long startTimeInMillis = Calendar.getInstance().getTimeInMillis();
    int trapsSent = sendTraps(builder, pool, startTimeInMillis, initialEventCount);

    int currentEventCount = getEventCount(pool) - initialEventCount;
    int finalEventCount = currentEventCount;
    long beginPersistenceCheck = Calendar.getInstance().getTimeInMillis();

    System.out
            .println("Watching Event Queue to complete persistence for " + m_persistWait + " milliseconds...");
    int equalCount = 0;
    while (currentEventCount < trapsSent) {
        Thread.sleep(1000);
        m_sleepMillis += 1000;

        currentEventCount = getEventCount(pool).intValue() - initialEventCount;

        if (currentEventCount == finalEventCount) {
            equalCount++;
        } else {
            equalCount = 0;
        }

        finalEventCount = currentEventCount;

        System.out.println(
                "Persist wait time (secs): " + ((System.currentTimeMillis() - beginPersistenceCheck) / 1000));
        System.out.println("Current Event count: " + Integer.valueOf(finalEventCount).toString());

        if (Calendar.getInstance().getTimeInMillis() - beginPersistenceCheck > m_persistWait) {
            System.out.println("Waited " + ((System.currentTimeMillis() - beginPersistenceCheck) / 1000)
                    + " millisecs for queue to flush.  Apparently missed " + (trapsSent - finalEventCount)
                    + " traps :(");
            break;
        }

        if (equalCount > 3) {
            System.out.println("Appears that event persistence is completed.");
            break;
        }
    }

    if (m_deleteTestEvents) {
        deleteTestEvents();
    }

    pool.close();
    connection.close();

    systemReport(startTimeInMillis, trapsSent, finalEventCount);
}

From source file:org.opennms.provisiond.utils.CsvRequisitionParser.java

private static void migrateDbNodes() throws SQLException, UnknownHostException, ClassNotFoundException {

    String distinctNodesQueryStr = "  " + "SELECT nodeId AS \"nodeid\"," + "       nodeLabel AS \"nodelabel\","
            + "       foreignSource AS \"foreignsource\"," + "       foreignId AS \"foreignid\" "
            + "  FROM node " + " WHERE nodeid in (" + "  SELECT " + "DISTINCT nodeid " + "    FROM ipinterface "
            + "   WHERE iplike(ipaddr, '" + m_iplikeQuery + "')) " + "ORDER BY nodeid";

    if (m_addOnly) {
        distinctNodesQueryStr = "  " + "SELECT nodeId AS \"nodeid\"," + "       nodeLabel AS \"nodelabel\","
                + "       foreignSource AS \"foreignsource\"," + "       foreignId AS \"foreignid\" "
                + "  FROM node " + " WHERE nodeid in (" + "  SELECT " + "DISTINCT nodeid "
                + "    FROM ipinterface " + "   WHERE iplike(ipaddr, '" + m_iplikeQuery + "')) "
                + "  AND foreignsource is NULL " + "ORDER BY nodeid";
    }// w  w  w.  j a  v a2  s.c  om

    Connection connection = null;
    Statement distinctNodesStatement = null;
    PoolingConnection pool = null;
    connection = createConnection();
    connection.setAutoCommit(false);
    pool = new PoolingConnection(connection);
    distinctNodesStatement = pool.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE);

    ResultSet distinctNodesResultSet = null;
    int rowsFound = 0;
    distinctNodesResultSet = distinctNodesStatement.executeQuery(distinctNodesQueryStr);
    distinctNodesResultSet.last();
    rowsFound = distinctNodesResultSet.getRow();
    distinctNodesResultSet.beforeFirst();

    System.out.println(rowsFound + " nodes found.");

    int nodesMigrated = 0;
    while (distinctNodesResultSet.next()) {
        System.out.println("Processing row: " + distinctNodesResultSet.getRow() + "...");

        int nodeId = distinctNodesResultSet.getInt("nodeid");
        String queryStr = "" + "  SELECT ipaddr " + "    FROM ipinterface " + "   WHERE nodeid = " + nodeId
                + " " + "     AND issnmpprimary = 'P' " + "ORDER BY inet(ipaddr)" + "   LIMIT 1";

        Statement findPrimaryStatement = pool.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,
                ResultSet.CONCUR_READ_ONLY);

        System.out.println("Querying DB for SNMP Primary interface for node: " + nodeId + "...");
        ResultSet findPrimaryResultSet = findPrimaryStatement.executeQuery(queryStr);

        String primaryIp = null;

        if (findPrimaryResultSet.next()) {
            primaryIp = findPrimaryResultSet.getString("ipaddr");
            System.out.println("SNMP Primary found: " + primaryIp);
        }

        findPrimaryResultSet.close();
        findPrimaryStatement.close();

        if (primaryIp == null) {
            System.out.println("SNMP Primary not found.  Determining lowest numbered IP to set as Primary...");
            queryStr = "" + "  SELECT ipaddr " + "    FROM ipinterface " + "   WHERE nodeid = " + nodeId + " "
                    + "ORDER BY inet(ipaddr)" + "   LIMIT 1";
            findPrimaryStatement = pool.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,
                    ResultSet.CONCUR_READ_ONLY);
            findPrimaryResultSet = findPrimaryStatement.executeQuery(queryStr);
        }

        if (primaryIp == null && findPrimaryResultSet.next()) {
            primaryIp = findPrimaryResultSet.getString("ipaddr");
            System.out.println("SNMP Primary found: " + primaryIp);
        }

        findPrimaryResultSet.close();
        findPrimaryStatement.close();

        if (primaryIp == null) {
            System.out.println(
                    "SNMP Primary not found.  Skipping node.  (This should never happen since it is the iplike query that finds the distinct nodes :( )");
            continue;
        }

        String foreignId = null;
        if (m_useNodeId) {
            foreignId = String.valueOf(nodeId);
        } else {
            foreignId = String.valueOf(System.currentTimeMillis());
        }

        String label = distinctNodesResultSet.getString("nodelabel");
        distinctNodesResultSet.updateString("foreignsource", m_foreignSource);
        distinctNodesResultSet.updateString("foreignId", foreignId);

        System.out.println("Updating node (" + nodeId + ":" + label + ") with foreignsource:" + m_foreignSource
                + " and foreignId:" + foreignId);
        distinctNodesResultSet.updateRow();
        System.out.println("Node updated.");

        RequisitionData rd = new RequisitionData(label, primaryIp, m_foreignSource, foreignId);

        if (m_categoryAddExisting) {
            String categoriesQueryString = "" + "SELECT c.categoryname as \"categoryname\" "
                    + "  FROM categories c " + "  JOIN category_node cn "
                    + "    ON cn.categoryid = c.categoryid " + "  JOIN node n on n.nodeid = cn.nodeid "
                    + " WHERE n.nodeid = " + nodeId;
            Statement categoriesStatement = pool.createStatement();

            ResultSet crs = categoriesStatement.executeQuery(categoriesQueryString);

            Set<String> categories = new LinkedHashSet<String>();
            while (crs.next()) {
                categories.add(crs.getString("categoryname"));
            }

            crs.close();
            categoriesStatement.close();
            rd.setCategories(categories);
        }

        System.out.println("Updating requistion...");
        createOrUpdateRequistion(rd);
        System.out.println("Requistion updated!  Next...\n");
        nodesMigrated++;
    }

    try {
        connection.commit();
    } catch (SQLException e) {
        e.printStackTrace();
        connection.rollback();
    }

    distinctNodesResultSet.close();
    distinctNodesStatement.close();
    pool.close();
    connection.close();

    System.out.println(nodesMigrated + " Nodes migrated to foreign source " + m_foreignSource);

}