Example usage for org.springframework.jdbc.datasource DataSourceUtils releaseConnection

List of usage examples for org.springframework.jdbc.datasource DataSourceUtils releaseConnection

Introduction

In this page you can find the example usage for org.springframework.jdbc.datasource DataSourceUtils releaseConnection.

Prototype

public static void releaseConnection(@Nullable Connection con, @Nullable DataSource dataSource) 

Source Link

Document

Close the given Connection, obtained from the given DataSource, if it is not managed externally (that is, not bound to the thread).

Usage

From source file:org.agnitas.dao.impl.RecipientDaoImpl.java

@Override
public CaseInsensitiveMap<CsvColInfo> readDBColumns(int companyID) {
    String sqlGetTblStruct = "SELECT * FROM customer_" + companyID + "_tbl WHERE 1=0";
    CsvColInfo aCol = null;//from   w  w  w.  j  av  a  2  s.  c o  m
    int colType;
    CaseInsensitiveMap<CsvColInfo> dbAllColumns = new CaseInsensitiveMap<CsvColInfo>();
    DataSource ds = (DataSource) this.applicationContext.getBean("dataSource");
    Connection con = DataSourceUtils.getConnection(ds);
    try {
        Statement stmt = con.createStatement();
        ResultSet rset = stmt.executeQuery(sqlGetTblStruct);
        ResultSetMetaData meta = rset.getMetaData();

        for (int i = 1; i <= meta.getColumnCount(); i++) {
            if (!meta.getColumnName(i).equals("change_date") && !meta.getColumnName(i).equals("creation_date")
                    && !meta.getColumnName(i).equals("datasource_id")) {
                //                  if (meta.getColumnName(i).equals("customer_id")) {
                //                     if (status == null) {
                //                        initStatus(getWebApplicationContext());
                //                     }
                //                     if (!( mode == ImportWizardServiceImpleImpl.MODE_ONLY_UPDATE && status.getKeycolumn().equals("customer_id"))) {
                //                        continue;
                //                     }
                //                  }

                aCol = new CsvColInfo();
                aCol.setName(meta.getColumnName(i));
                aCol.setLength(meta.getColumnDisplaySize(i));
                aCol.setType(CsvColInfo.TYPE_UNKNOWN);
                aCol.setActive(false);
                aCol.setNullable(meta.isNullable(i) != 0);

                colType = meta.getColumnType(i);
                aCol.setType(dbTypeToCsvType(colType));
                dbAllColumns.put(meta.getColumnName(i), aCol);
            }
        }
        rset.close();
        stmt.close();
    } catch (Exception e) {
        logger.error("readDBColumns (companyID: " + companyID + ")", e);
    }
    DataSourceUtils.releaseConnection(con, ds);
    return dbAllColumns;
}

From source file:org.agnitas.util.AgnUtils.java

/**
 * Getter for property bshInterpreter.//from  w  ww . java2  s. c  om
 *
 * @return Value of property bshInterpreter.
 */
public static Interpreter getBshInterpreter(int cID, int customerID, ApplicationContext con) {
    DataSource ds = (DataSource) con.getBean("dataSource");
    Interpreter aBsh = new Interpreter();
    NameSpace aNameSpace = aBsh.getNameSpace();
    aNameSpace.importClass("org.agnitas.util.AgnUtils");

    String sqlStatement = "select * from customer_" + cID + "_tbl cust where cust.customer_id=" + customerID;
    Connection dbCon = DataSourceUtils.getConnection(ds);

    try {
        Statement stmt = dbCon.createStatement();
        ResultSet rset = stmt.executeQuery(sqlStatement);
        ResultSetMetaData aMeta = rset.getMetaData();

        if (rset.next()) {
            for (int i = 1; i <= aMeta.getColumnCount(); i++) {
                switch (aMeta.getColumnType(i)) {
                case java.sql.Types.BIGINT:
                case java.sql.Types.INTEGER:
                case java.sql.Types.NUMERIC:
                case java.sql.Types.SMALLINT:
                case java.sql.Types.TINYINT:
                    if (rset.getObject(i) != null) {
                        aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.Integer.class,
                                new Integer(rset.getInt(i)), null);
                    } else {
                        aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.Integer.class, null,
                                null);
                    }
                    break;

                case java.sql.Types.DECIMAL:
                case java.sql.Types.DOUBLE:
                case java.sql.Types.FLOAT:
                    if (rset.getObject(i) != null) {
                        aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.Double.class,
                                new Double(rset.getDouble(i)), null);
                    } else {
                        aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.Double.class, null, null);
                    }
                    break;

                case java.sql.Types.CHAR:
                case java.sql.Types.LONGVARCHAR:
                case java.sql.Types.VARCHAR:
                    aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.lang.String.class,
                            rset.getString(i), null);
                    break;

                case java.sql.Types.DATE:
                case java.sql.Types.TIME:
                case java.sql.Types.TIMESTAMP:
                    aNameSpace.setTypedVariable(aMeta.getColumnName(i), java.util.Date.class,
                            rset.getTimestamp(i), null);
                    break;
                default:
                    logger.error("Ignoring: " + aMeta.getColumnName(i));
                }
            }
        }
        rset.close();
        stmt.close();
        // add virtual column "sysdate"
        aNameSpace.setTypedVariable(AgnUtils.getHibernateDialect().getCurrentTimestampSQLFunctionName(),
                Date.class, new Date(), null);
    } catch (Exception e) {
        sendExceptionMail("Sql: " + sqlStatement, e);
        logger.error("getBshInterpreter: " + e.getMessage());
        aBsh = null;
    }
    DataSourceUtils.releaseConnection(dbCon, ds);
    return aBsh;
}

From source file:org.agnitas.web.ExportWizardAction.java

/**
 * Creates sql query for getting recipients according to the export definition conditions;
 * gets recipients from database;/*from  w  w w.ja  v  a  2 s.  c om*/
 * stores the export result in temporary zip file.
 *
 * @param aForm ExportWizardForm object
 * @param aContext application context
 * @param req HTTP request
 */
protected void collectContent(ExportWizardForm aForm, ApplicationContext aContext, HttpServletRequest req) {
    int companyID = AgnUtils.getAdmin(req).getCompany().getId();
    Locale loc_old = Locale.getDefault();

    aForm.setDbExportStatusMessages(new LinkedList<String>());
    aForm.setDbExportStatus(100);
    aForm.setLinesOK(0);

    Target aTarget = null;
    if (aForm.getTargetID() != 0) {
        aTarget = targetDao.getTarget(aForm.getTargetID(), companyID);
        aForm.setTargetID(aTarget.getId());
    }

    String charset = aForm.getCharset();
    if (charset == null || charset.trim().equals("")) {
        charset = "UTF-8";
        aForm.setCharset(charset); // charset also in form
    }

    StringBuffer usedColumnsString = new StringBuffer();
    int exportStartColumn = 2;
    for (String columnName : aForm.getColumns()) {
        // customer_id is selected by default in the the base sql statement
        if ("customer_id".equalsIgnoreCase(columnName)) {
            // mark customer_id to be exported too
            exportStartColumn = 1;
        } else {
            usedColumnsString.append(", cust." + columnName + " " + columnName);
        }
    }

    if (aForm.getMailinglists() != null) {
        for (int i = 0; i < aForm.getMailinglists().length; i++) {
            String ml = aForm.getMailinglists()[i];
            usedColumnsString.append(", (SELECT m" + ml + ".user_status FROM customer_" + companyID
                    + "_binding_tbl m" + ml + " WHERE m" + ml + ".customer_id = cust.customer_id AND m" + ml
                    + ".mailinglist_id = " + ml + " AND m" + ml + ".mediatype = 0) as Userstate_Mailinglist_"
                    + ml);
            usedColumnsString.append(", (SELECT m" + ml + "." + AgnUtils.changeDateName() + " FROM customer_"
                    + companyID + "_binding_tbl m" + ml + " WHERE m" + ml
                    + ".customer_id = cust.customer_id AND m" + ml + ".mailinglist_id = " + ml + " AND m" + ml
                    + ".mediatype = 0) as Mailinglist_" + ml + "_Timestamp");
        }
    }

    StringBuffer whereString = new StringBuffer("");
    StringBuffer customerTableSql = new StringBuffer("SELECT * FROM (SELECT DISTINCT cust.customer_id"
            + usedColumnsString.toString() + " FROM customer_" + companyID + "_tbl cust");
    if (aForm.getMailinglistID() != -1 && (aForm.getMailinglistID() > 0 || !aForm.getUserType().equals("E")
            || aForm.getUserStatus() != 0)) {
        customerTableSql.append(", customer_" + companyID + "_binding_tbl bind");
        whereString.append(" cust.customer_id = bind.customer_id AND bind.mediatype=0");
    }

    if (aForm.getMailinglistID() > 0) {
        whereString.append(" and bind.mailinglist_id = " + aForm.getMailinglistID());
    }

    if (aForm.getMailinglistID() == NO_MAILINGLIST) {
        whereString.append(" NOT EXISTS (SELECT 1 FROM customer_" + companyID
                + "_binding_tbl bind WHERE cust.customer_id = bind.customer_id) ");
    } else {
        if (!aForm.getUserType().equals("E")) {
            whereString
                    .append(" AND bind.user_type = '" + SafeString.getSQLSafeString(aForm.getUserType()) + "'");
        }

        if (aForm.getUserStatus() != 0) {
            whereString.append(" AND bind.user_status = " + aForm.getUserStatus());
        }
    }

    if (aForm.getTargetID() != 0) {
        if (aForm.getMailinglistID() != 0 || !aForm.getUserType().equals("E") || aForm.getUserStatus() != 0) {
            whereString.append(" AND ");
        }
        whereString.append(" (" + aTarget.getTargetSQL() + ")");
    }

    String datesParametersString = getDatesParametersString(aForm);
    if (!StringUtils.isEmpty(whereString.toString())) {
        whereString.append(" and ");
    }
    whereString.append(datesParametersString);

    if (whereString.length() > 0) {
        customerTableSql.append(" WHERE " + whereString);
    }

    AgnUtils.userlogger().info("Generated export SQL query: " + customerTableSql);

    Connection con = DataSourceUtils.getConnection(dataSource);

    aForm.setCsvFile(null);
    PrintWriter out = null;
    Statement stmt = null;
    ResultSet rset = null;
    try {
        File systemUploadDirectory = AgnUtils.createDirectory(AgnUtils.getDefaultValue("system.upload"));
        File outFile = File.createTempFile("exp" + companyID + "_", ".zip", systemUploadDirectory);
        ZipOutputStream aZip = new ZipOutputStream(new FileOutputStream(outFile));
        AgnUtils.userlogger().info("Export file <" + outFile.getAbsolutePath() + ">");

        stmt = con.createStatement();
        rset = stmt.executeQuery(customerTableSql.toString());

        aZip.putNextEntry(new ZipEntry("emm_export.csv"));
        Locale.setDefault(new Locale("en"));
        out = new PrintWriter(new BufferedWriter(new OutputStreamWriter(aZip, charset)));

        ResultSetMetaData mData = rset.getMetaData();
        int columnCount = mData.getColumnCount();

        // Write CSV-Header line
        for (int i = exportStartColumn; i <= columnCount; i++) {
            if (i > exportStartColumn) {
                out.print(aForm.getSeparator());
            }
            String columnName = mData.getColumnName(i);
            out.print(aForm.getDelimiter() + escapeChars(columnName, aForm.getDelimiter())
                    + aForm.getDelimiter());
        }
        out.print("\n");

        // Write data lines
        while (rset.next()) {
            for (int i = exportStartColumn; i <= columnCount; i++) {
                if (i > exportStartColumn) {
                    out.print(aForm.getSeparator());
                }

                String aValue;
                try {
                    aValue = rset.getString(i);
                } catch (Exception ex) {
                    aValue = null;
                    // Exceptions should not break the export, but should be logged
                    AgnUtils.userlogger().error("Exception in export:collectContent:", ex);
                }

                if (aValue == null) {
                    // null values should be displayed as empty string
                    aValue = "";
                } else {
                    aValue = escapeChars(aValue, aForm.getDelimiter());
                    aValue = aForm.getDelimiter() + aValue + aForm.getDelimiter();
                }
                out.print(aValue);
            }
            out.print("\n");
            aForm.setLinesOK(aForm.getLinesOK() + 1);
        }
        aForm.setCsvFile(outFile);
    } catch (Exception e) {
        AgnUtils.userlogger().error("collectContent: " + e);
        e.printStackTrace();
    } finally {
        if (out != null) {
            out.close();
        }

        if (rset != null) {
            try {
                rset.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }

        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }

        DataSourceUtils.releaseConnection(con, dataSource);
        aForm.setDbExportStatus(1001);
        Locale.setDefault(loc_old);
    }
}

From source file:org.agnitas.web.ImportWizardForm.java

/**
 * Reads columns from database./*from ww w. j  a  v  a  2  s  . co  m*/
 */
protected void readDBColumns(int companyID, DataSource ds) {
    String sqlGetTblStruct = "SELECT * FROM customer_" + companyID + "_tbl WHERE 1=0";
    CsvColInfo aCol = null;
    String colType = null;

    dbAllColumns = new CaseInsensitiveMap();
    Connection con = DataSourceUtils.getConnection(ds);
    try {
        Statement stmt = con.createStatement();
        ResultSet rset = stmt.executeQuery(sqlGetTblStruct);
        ResultSetMetaData meta = rset.getMetaData();

        for (int i = 1; i <= meta.getColumnCount(); i++) {
            if (!meta.getColumnName(i).equals("change_date") && !meta.getColumnName(i).equals("creation_date")
                    && !meta.getColumnName(i).equals("datasource_id")) {
                if (meta.getColumnName(i).equals("customer_id")) {
                    if (status == null) {
                        initStatus(getWebApplicationContext());
                    }
                    if (!(this.mode == ImportWizardForm.MODE_ONLY_UPDATE
                            && this.status.getKeycolumn().equals("customer_id"))) {
                        continue;
                    }
                }

                aCol = new CsvColInfo();
                aCol.setName(meta.getColumnName(i));
                aCol.setLength(meta.getColumnDisplaySize(i));
                aCol.setType(CsvColInfo.TYPE_UNKNOWN);
                aCol.setActive(false);
                colType = meta.getColumnTypeName(i);
                if (colType.startsWith("VARCHAR")) {
                    aCol.setType(CsvColInfo.TYPE_CHAR);
                } else if (colType.startsWith("CHAR")) {
                    aCol.setType(CsvColInfo.TYPE_CHAR);
                } else if (colType.startsWith("NUM")) {
                    aCol.setType(CsvColInfo.TYPE_NUMERIC);
                } else if (colType.startsWith("INTEGER")) {
                    aCol.setType(CsvColInfo.TYPE_NUMERIC);
                } else if (colType.startsWith("DOUBLE")) {
                    aCol.setType(CsvColInfo.TYPE_NUMERIC);
                } else if (colType.startsWith("TIME")) {
                    aCol.setType(CsvColInfo.TYPE_DATE);
                } else if (colType.startsWith("DATE")) {
                    aCol.setType(CsvColInfo.TYPE_DATE);
                }
                this.dbAllColumns.put(meta.getColumnName(i), aCol);
            }
        }
        rset.close();
        stmt.close();
    } catch (Exception e) {
        AgnUtils.logger().error("readDBColumns: " + e);
    }
    DataSourceUtils.releaseConnection(con, ds);
}

From source file:org.apache.syncope.core.persistence.jpa.content.XMLContentExporter.java

@Override
public void export(final String domain, final OutputStream os, final String uwfPrefix, final String gwfPrefix,
        final String awfPrefix) throws SAXException, TransformerConfigurationException {

    if (StringUtils.isNotBlank(uwfPrefix)) {
        TABLE_PREFIXES_TO_BE_EXCLUDED.add(uwfPrefix);
    }//from w  w w.ja  v a2  s  .  c o m
    if (StringUtils.isNotBlank(gwfPrefix)) {
        TABLE_PREFIXES_TO_BE_EXCLUDED.add(gwfPrefix);
    }
    if (StringUtils.isNotBlank(awfPrefix)) {
        TABLE_PREFIXES_TO_BE_EXCLUDED.add(awfPrefix);
    }

    StreamResult streamResult = new StreamResult(os);
    final SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory
            .newInstance();
    transformerFactory.setFeature(javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING, true);

    TransformerHandler handler = transformerFactory.newTransformerHandler();
    Transformer serializer = handler.getTransformer();
    serializer.setOutputProperty(OutputKeys.ENCODING, StandardCharsets.UTF_8.name());
    serializer.setOutputProperty(OutputKeys.INDENT, "yes");
    handler.setResult(streamResult);
    handler.startDocument();
    handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl());

    DataSource dataSource = domainsHolder.getDomains().get(domain);
    if (dataSource == null) {
        throw new IllegalArgumentException("Could not find DataSource for domain " + domain);
    }

    String dbSchema = ApplicationContextProvider.getBeanFactory().getBean(domain + "DatabaseSchema",
            String.class);

    Connection conn = null;
    ResultSet rs = null;
    try {
        conn = DataSourceUtils.getConnection(dataSource);
        final DatabaseMetaData meta = conn.getMetaData();

        rs = meta.getTables(null, StringUtils.isBlank(dbSchema) ? null : dbSchema, null,
                new String[] { "TABLE" });

        final Set<String> tableNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);

        while (rs.next()) {
            String tableName = rs.getString("TABLE_NAME");
            LOG.debug("Found table {}", tableName);
            if (isTableAllowed(tableName)) {
                tableNames.add(tableName);
            }
        }

        LOG.debug("Tables to be exported {}", tableNames);

        // then sort tables based on foreign keys and dump
        for (String tableName : sortByForeignKeys(dbSchema, conn, tableNames)) {
            try {
                doExportTable(handler, dbSchema, conn, tableName,
                        TABLES_TO_BE_FILTERED.get(tableName.toUpperCase()));
            } catch (Exception e) {
                LOG.error("Failure exporting table {}", tableName, e);
            }
        }
    } catch (SQLException e) {
        LOG.error("While exporting database content", e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                LOG.error("While closing tables result set", e);
            }
        }

        DataSourceUtils.releaseConnection(conn, dataSource);
        if (conn != null) {
            try {
                if (!conn.isClosed()) {
                    conn.close();
                }
            } catch (SQLException e) {
                LOG.error("While releasing connection", e);
            }
        }
    }

    handler.endElement("", "", ROOT_ELEMENT);
    handler.endDocument();
}

From source file:org.apache.syncope.core.provisioning.java.job.JobManagerImpl.java

private boolean isRunningElsewhere(final JobKey jobKey) throws SchedulerException {
    if (!scheduler.getScheduler().getMetaData().isJobStoreClustered()) {
        return false;
    }/*from  w  w w  .j  a  v  a 2  s  .  c  om*/

    DataSource dataSource = domainsHolder.getDomains().get(SyncopeConstants.MASTER_DOMAIN);
    Connection conn = DataSourceUtils.getConnection(dataSource);
    PreparedStatement stmt = null;
    ResultSet resultSet = null;
    try {
        stmt = conn.prepareStatement("SELECT 1 FROM " + Constants.DEFAULT_TABLE_PREFIX + "FIRED_TRIGGERS "
                + "WHERE JOB_NAME = ? AND JOB_GROUP = ?");
        stmt.setString(1, jobKey.getName());
        stmt.setString(2, jobKey.getGroup());

        resultSet = stmt.executeQuery();
        return resultSet.next();
    } catch (SQLException e) {
        throw new SchedulerException(e);
    } finally {
        IOUtil.quietClose(resultSet);
        IOUtil.quietClose(stmt);
        DataSourceUtils.releaseConnection(conn, dataSource);
    }
}

From source file:org.apache.syncope.core.util.ContentExporter.java

public void export(final OutputStream os, final String wfTablePrefix)
        throws SAXException, TransformerConfigurationException {

    if (StringUtils.isNotBlank(wfTablePrefix)) {
        TABLE_PREFIXES_TO_BE_EXCLUDED.add(wfTablePrefix);
    }//from w  w w .j a  v a 2 s . c o  m

    StreamResult streamResult = new StreamResult(os);
    final SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory
            .newInstance();

    TransformerHandler handler = transformerFactory.newTransformerHandler();
    Transformer serializer = handler.getTransformer();
    serializer.setOutputProperty(OutputKeys.ENCODING, SyncopeConstants.DEFAULT_ENCODING);
    serializer.setOutputProperty(OutputKeys.INDENT, "yes");
    handler.setResult(streamResult);
    handler.startDocument();
    handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl());

    Connection conn = null;
    ResultSet rs = null;
    try {
        conn = DataSourceUtils.getConnection(dataSource);
        final DatabaseMetaData meta = conn.getMetaData();

        final String schema = dbSchema;

        rs = meta.getTables(null, schema, null, new String[] { "TABLE" });

        final Set<String> tableNames = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);

        while (rs.next()) {
            String tableName = rs.getString("TABLE_NAME");
            LOG.debug("Found table {}", tableName);
            if (isTableAllowed(tableName)) {
                tableNames.add(tableName);
            }
        }

        LOG.debug("Tables to be exported {}", tableNames);

        // then sort tables based on foreign keys and dump
        for (String tableName : sortByForeignKeys(conn, tableNames)) {
            try {
                doExportTable(handler, conn, tableName, TABLES_TO_BE_FILTERED.get(tableName.toUpperCase()));
            } catch (Exception e) {
                LOG.error("Failure exporting table {}", tableName, e);
            }
        }
    } catch (SQLException e) {
        LOG.error("While exporting database content", e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                LOG.error("While closing tables result set", e);
            }
        }

        DataSourceUtils.releaseConnection(conn, dataSource);
        if (conn != null) {
            try {
                if (!conn.isClosed()) {
                    conn.close();
                }
            } catch (SQLException e) {
                LOG.error("While releasing connection", e);
            }
        }
    }

    handler.endElement("", "", ROOT_ELEMENT);
    handler.endDocument();
}

From source file:org.eurekastreams.testing.DBUnitFixtureSetup.java

/**
 * Load up the XML dataset found at the input file path, using the data source found in the bean "dataSource" in
 * Spring files found in "classpath:applicationContext*-test.xml".
 *
 * @param newDatasetPath//from ww  w  .  j  a  v  a 2s  .  c  o m
 *            The resource path of the dataset xml file - for example: "/dataset.xml"
 * @throws Exception
 *             on any errors
 */
public static void loadDataSet(final String newDatasetPath) throws Exception {
    if (currentlyLoadedDatasetPath != null && currentlyLoadedDatasetPath.equalsIgnoreCase(newDatasetPath)) {
        System.out.println("Dataset '" + newDatasetPath + "' is currently loaded - skipping.");
        return;
    }
    long start = System.currentTimeMillis();
    System.out.println("Loading dataset '" + newDatasetPath + "'.");

    // Get the Spring application context
    ApplicationContext appContext = new ClassPathXmlApplicationContext(APPLICATION_CONTEXT_LOCATIONS);

    // Get the data source from Spring
    DataSource dataSource = (DataSource) appContext.getBean(DATA_SOURCE_BEAN_NAME);

    logger.info("About to load up the test dataset at path '" + newDatasetPath + "'");

    // Get the connection to the database
    Connection connection = DataSourceUtils.getConnection(dataSource);
    IDatabaseConnection dbUnitCon = new DatabaseConnection(connection);

    // Get the data from the file at the input resource path
    IDataSet dataSet = new FlatXmlDataSet(DBUnitFixtureSetup.class.getResourceAsStream(newDatasetPath));

    // insert the data set into the database with a clean insert
    try {
        DatabaseOperation.CLEAN_INSERT.execute(dbUnitCon, dataSet);
    } finally {
        DataSourceUtils.releaseConnection(connection, dataSource);
    }

    System.out.println(
            "Loaded dataset '" + newDatasetPath + "' in " + (System.currentTimeMillis() - start) + "ms");
    currentlyLoadedDatasetPath = newDatasetPath;
}

From source file:org.eurekastreams.testing.FeedReaderDBUnitFixtureSetup.java

/**
 * Load up the XML dataset found at the input file path, using the data
 * source found in the bean "dataSource" in Spring files found in
 * "classpath:applicationContext*-test.xml".
 * // w  w w.  j  av  a 2  s .  c  om
 * @param newDatasetPath
 *            The resource path of the dataset xml file - for example:
 *            "/dataset.xml"
 * @throws Exception
 *             on any errors
 */
public static void loadDataSet(final String newDatasetPath) throws Exception {
    // Get the Spring application context
    ApplicationContext appContext = new ClassPathXmlApplicationContext(APPLICATION_CONTEXT_LOCATIONS);

    // Get the data source from Spring
    DataSource dataSource = (DataSource) appContext.getBean(DATA_SOURCE_BEAN_NAME);

    logger.info("About to load up the test dataset at path '" + newDatasetPath + "'");

    // Get the connection to the database
    Connection connection = DataSourceUtils.getConnection(dataSource);
    IDatabaseConnection dbUnitCon = new DatabaseConnection(connection);

    // Get the data from the file at the input resource path
    IDataSet dataSet = new FlatXmlDataSet(
            FeedReaderDBUnitFixtureSetup.class.getResourceAsStream(newDatasetPath));

    // insert the data set into the database with a clean insert
    try {
        DatabaseOperation.CLEAN_INSERT.execute(dbUnitCon, dataSet);
    } finally {
        DataSourceUtils.releaseConnection(connection, dataSource);
    }
}

From source file:org.jamwiki.db.DatabaseConnection.java

/**
 * Utility method for closing a database connection.  This method must ALWAYS be
 * called for any connection retrieved by the
 * {@link DatabaseConnection#getConnection getConnection()} method, and the
 * connection SHOULD NOT have already been closed.
 *
 * @param conn A database connection, retrieved using DatabaseConnection.getConnection(),
 *  that is to be closed.  This connection SHOULD NOT have been previously closed.
 *///  ww w. j a v  a2s.c  o  m
protected static void closeConnection(Connection conn) {
    if (conn == null) {
        return;
    }
    DataSourceUtils.releaseConnection(conn, dataSource);
}