List of usage examples for org.springframework.jdbc.datasource DataSourceUtils getConnection
public static Connection getConnection(DataSource dataSource) throws CannotGetJdbcConnectionException
From source file:com.jfinal.plugin.activerecord.Config.java
/** * Return true if current thread in transaction. *//*from www . ja v a 2 s . c o m*/ public final boolean isInTransaction() { return DataSourceUtils.getConnection(dataSource) != null; }
From source file:org.mifos.test.framework.util.DatabaseTestUtils.java
/** * Return the current contents of the specified tables as a DBUnit dataset as XML string. * This method can be invoked safely inside a Spring-managed transaction. * /*from www. j av a 2 s . c om*/ * @param dataSource * @param tableNames variable parameter list of table names * @return XML string containing the current contents of the specified tables * @throws IOException * @throws DataSetException * @throws SQLException * @throws DatabaseUnitException */ @SuppressWarnings("PMD.DataflowAnomalyAnalysis") //Rationale: You cannot define new local variables in the try block because the finally block must reference it. public String saveTables(DriverManagerDataSource dataSource, String... tableNames) throws IOException, DataSetException, SQLException, DatabaseUnitException { Connection jdbcConnection = null; try { jdbcConnection = DataSourceUtils.getConnection(dataSource); ByteArrayOutputStream stream = new ByteArrayOutputStream(); FlatXmlDataSet.write(new DatabaseConnection(jdbcConnection).createDataSet(tableNames), stream); return stream.toString(); } finally { if (null != jdbcConnection) { jdbcConnection.close(); } DataSourceUtils.releaseConnection(jdbcConnection, dataSource); } }
From source file:au.org.ala.layers.dao.ObjectDAOImpl.java
public void writeObjectsToCSV(OutputStream output, String fid) throws Exception { String sql = MessageFormat.format("COPY (select o.pid as pid, o.id as id, o.name as name, " + "o.desc as description, " + "ST_AsText(ST_Centroid(o.the_geom)) as centroid, " + "GeometryType(o.the_geom) as featureType from objects o " + "where o.fid = ''{0}'') TO STDOUT WITH CSV HEADER", fid); DataSource ds = (DataSource) applicationContext.getBean("dataSource"); Connection conn = DataSourceUtils.getConnection(ds); try {/*from w w w . j a v a2 s. com*/ BaseConnection baseConn = (BaseConnection) new C3P0NativeJdbcExtractor().getNativeConnection(conn); Writer csvOutput = new OutputStreamWriter(output); CopyManager copyManager = new CopyManager(baseConn); copyManager.copyOut(sql, csvOutput); csvOutput.flush(); conn.close(); } catch (SQLException ex) { // something has failed and we print a stack trace to analyse the error logger.error(ex.getMessage(), ex); // ignore failure closing connection try { conn.close(); } catch (SQLException e) { /*do nothing for failure to close */ } } finally { // properly release our connection DataSourceUtils.releaseConnection(conn, ds); } }
From source file:org.syncope.core.util.ImportExport.java
public void export(final OutputStream os) throws SAXException, TransformerConfigurationException, CycleInMultiParentTreeException { StreamResult streamResult = new StreamResult(os); SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance(); TransformerHandler handler = transformerFactory.newTransformerHandler(); Transformer serializer = handler.getTransformer(); serializer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); serializer.setOutputProperty(OutputKeys.INDENT, "yes"); handler.setResult(streamResult);//from w w w . j a v a 2s.c o m handler.startDocument(); handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl()); Connection conn = DataSourceUtils.getConnection(dataSource); ResultSet rs = null; try { // first read all tables... rs = conn.getMetaData().getTables(null, null, null, new String[] { "TABLE" }); Set<String> tableNames = new HashSet<String>(); while (rs.next()) { String tableName = rs.getString("TABLE_NAME"); // these tables must be ignored if (!tableName.toUpperCase().startsWith("QRTZ_") && !tableName.toUpperCase().equals("ACT_GE_PROPERTY")) { tableNames.add(tableName); } } // then sort tables based on foreign keys and dump for (String tableName : sortByForeignKeys(conn, tableNames)) { doExportTable(handler, conn, tableName); } } catch (SQLException e) { LOG.error("While exporting database content", e); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { LOG.error("While closing tables result set", e); } } DataSourceUtils.releaseConnection(conn, dataSource); } handler.endElement("", "", ROOT_ELEMENT); handler.endDocument(); }
From source file:com.taobao.itest.listener.ITestDataSetListener.java
private void buildDataBaseConfig(TestContext testContext, ITestDataSet annotation, List<DatasetConfig> datasetConfigs, String location, String dsName, ReplacementDataSet dataSet) throws DatabaseUnitException, SQLException { DataSource dataSource = (DataSource) SpringContextManager.getApplicationContext().getBean(dsName); Connection connection = DataSourceUtils.getConnection(dataSource); // build databaseTester start IDatabaseConnection Iconn = getDatabaseConnection(dataSource, connection); DatabaseConfig config = Iconn.getConfig(); String dbType = connection.getMetaData().getDatabaseProductName(); if ("MySQL".equalsIgnoreCase(dbType)) { config.setProperty(DatabaseConfig.PROPERTY_METADATA_HANDLER, new MySqlMetadataHandler()); } else if ("Oracle".equalsIgnoreCase(dbType)) { config.setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, new Oracle10DataTypeFactory()); }//from ww w . j a v a 2 s . c o m Date dbNow = getDbCurrentTime(connection, dbType); addSysdateReplacement(dataSet, dbNow); addTimeStampReplacement(dataSet, dbNow); IDatabaseTester databaseTester = new DefaultDatabaseTester(Iconn); databaseTester.setDataSet(dataSet); String setUp = annotation.setupOperation(); DatabaseOperation setUpOperation = "REFRESH".equals(setUp) ? new RefreshOperation() : (DatabaseOperation) databaseOperations.asObject(setUp); databaseTester.setSetUpOperation(setUpOperation); String teardown = annotation.teardownOperation(); DatabaseOperation teardownOperation = "DELETE".equals(teardown) ? new DeleteOperation() : (DatabaseOperation) databaseOperations.asObject(teardown); databaseTester.setTearDownOperation(teardownOperation); // build databaseTester end boolean transactional = DataSourceUtils.isConnectionTransactional(connection, dataSource); DatasetConfig datasetConfig = new DatasetConfig(databaseTester, transactional).location(location) .dsName(dsName).setupOperation(annotation.setupOperation()) .teardownOperation(annotation.teardownOperation()); datasetConfigs.add(datasetConfig); }
From source file:org.cfr.capsicum.test.AbstractCayenneJUnit4DbUnitSpringContextTests.java
protected void loadDataSet(URL url) throws SQLException, IOException, DatabaseUnitException { // initialize your database connection here IDatabaseConnection connection = new DatabaseConnection(DataSourceUtils.getConnection(getDataSource())); // DatabaseConfig config = connection.getConfig(); // config.setFeature(DatabaseConfig.FEATURE_QUALIFIED_TABLE_NAMES, // true);// w ww . j a v a 2 s. com // initialize your dataset here IDataSet dataSet = getSrcDataSet(url, producerType, false); try { DatabaseOperation.INSERT.execute(connection, dataSet); } finally { DataSourceUtils.releaseConnection(connection.getConnection(), getDataSource()); } }
From source file:lib.JdbcTemplate.java
@Override public <T> T execute(ConnectionCallback<T> action) throws DataAccessException { Assert.notNull(action, "Callback object must not be null"); Connection con = DataSourceUtils.getConnection(getDataSource()); try {/*w ww . ja va 2s. c o m*/ Connection conToUse = con; if (this.nativeJdbcExtractor != null) { // Extract native JDBC Connection, castable to OracleConnection or the like. conToUse = this.nativeJdbcExtractor.getNativeConnection(con); } else { // Create close-suppressing Connection proxy, also preparing returned Statements. conToUse = createConnectionProxy(con); } return action.doInConnection(conToUse); } catch (SQLException ex) { // Release Connection early, to avoid potential connection pool deadlock // in the case when the exception translator hasn't been initialized yet. DataSourceUtils.releaseConnection(con, getDataSource()); con = null; throw getExceptionTranslator().translate("ConnectionCallback", getSql(action), ex); } finally { DataSourceUtils.releaseConnection(con, getDataSource()); } }
From source file:org.cfr.capsicum.test.AbstractCayenneJUnit4DbUnitSpringContextTests.java
/** * Executes a set of commands to drop/create database objects. *//*from www . j a v a 2s. co m*/ protected boolean safeExecute(String sql) throws SQLException { Connection connection = DataSourceUtils.getConnection(dataSource); Statement statement = connection.createStatement(); try { statement.execute(sql); return true; } catch (SQLException ex) { return false; } finally { statement.close(); DataSourceUtils.releaseConnection(connection, dataSource); } }
From source file:org.cfr.capsicum.test.AbstractCayenneJUnit4DbUnitSpringContextTests.java
/** * Set the DataSource, typically provided via Dependency Injection. */// ww w. j ava 2 s. com @Autowired(required = false) public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; this.simpleJdbcTemplate = new SimpleJdbcTemplate(dataSource); if (this.dataSource != null) { Connection con = null; try { con = DataSourceUtils.getConnection(dataSource); adapter = AdapterFactory.getCurrentAdapter(con); } catch (SQLException e) { throw new RuntimeException(e); } finally { DataSourceUtils.releaseConnection(con, dataSource); } } }
From source file:org.apache.syncope.core.util.ImportExport.java
public void export(final OutputStream os) throws SAXException, TransformerConfigurationException { StreamResult streamResult = new StreamResult(os); final SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory .newInstance();/*from w ww . j av a 2s. c om*/ TransformerHandler handler = transformerFactory.newTransformerHandler(); Transformer serializer = handler.getTransformer(); serializer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); serializer.setOutputProperty(OutputKeys.INDENT, "yes"); handler.setResult(streamResult); handler.startDocument(); handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl()); final Connection conn = DataSourceUtils.getConnection(dataSource); ResultSet rs = null; try { final DatabaseMetaData meta = conn.getMetaData(); final String schema = readSchema(); rs = meta.getTables(null, schema, null, new String[] { "TABLE" }); final Set<String> tableNames = new HashSet<String>(); while (rs.next()) { String tableName = rs.getString("TABLE_NAME"); // these tables must be ignored if (!tableName.toUpperCase().startsWith("QRTZ_") && !tableName.toUpperCase().startsWith("LOGGING_")) { tableNames.add(tableName); } } // then sort tables based on foreign keys and dump for (String tableName : sortByForeignKeys(conn, tableNames, schema)) { doExportTable(handler, conn, tableName); } } catch (SQLException e) { LOG.error("While exporting database content", e); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { LOG.error("While closing tables result set", e); } } DataSourceUtils.releaseConnection(conn, dataSource); } handler.endElement("", "", ROOT_ELEMENT); handler.endDocument(); }