Example usage for org.springframework.jdbc.datasource SingleConnectionDataSource SingleConnectionDataSource

List of usage examples for org.springframework.jdbc.datasource SingleConnectionDataSource SingleConnectionDataSource

Introduction

In this page you can find the example usage for org.springframework.jdbc.datasource SingleConnectionDataSource SingleConnectionDataSource.

Prototype

public SingleConnectionDataSource(Connection target, boolean suppressClose) 

Source Link

Document

Create a new SingleConnectionDataSource with a given Connection.

Usage

From source file:gov.nih.nci.cabig.ctms.tools.configuration.DatabaseBackedConfigurationTest.java

@Override
protected void setUp() throws Exception {
    super.setUp();
    sessionFactory = new AnnotationConfiguration().addAnnotatedClass(DefaultConfigurationEntry.class)
            .addAnnotatedClass(AlternateConfigurationEntry.class)
            .setProperty("hibernate.connection.driver_class", "org.hsqldb.jdbcDriver")
            .setProperty("hibernate.connection.url", "jdbc:hsqldb:mem:test" + Math.random())
            .setProperty("hibernate.connection.username", "sa").setProperty("hibernate.connection.password", "")
            .buildSessionFactory();// w w  w.j a va2s . c  o  m
    stubListener = new StubConfigurationListener();
    configuration = new ExampleConfiguration();
    configuration.setSessionFactory(sessionFactory);
    configuration.addConfigurationListener(stubListener);
    altConfiguration = new AlternateConfiguration();
    altConfiguration.setSessionFactory(sessionFactory);
    SingleConnectionDataSource ds = new SingleConnectionDataSource(sessionFactory.openSession().connection(),
            false);
    ds.setAutoCommit(true);
    jdbc = new JdbcTemplate(ds);

    for (String table : new String[] { DEFAULT_TABLE, ALT_TABLE }) {
        jdbc.execute(String.format(
                "CREATE TABLE %s (key VARCHAR(255) PRIMARY KEY, value VARCHAR(255), version INTEGER DEFAULT '0' NOT NULL)",
                table));
    }
}

From source file:org.ensembl.gti.seqstore.JdbcSeqStoreTest.java

@Before
public void setUp() throws Exception {

    String uri = System.getProperty(URI_PROP);

    assertNotNull(URI_PROP + " is null", uri);

    // create a tester connection so we can load it if needed
    databaseTester = new JdbcDatabaseTester("com.mysql.jdbc.Driver", uri);

    // load an empty set
    IDataSet dataset = new FlatXmlDataSetBuilder().build(getClass().getResourceAsStream("/seq_store.xml"));
    databaseTester.setDataSet(dataset);//from  ww  w .  j a  va  2 s .  c  o  m

    DatabaseOperation.CLEAN_INSERT.execute(databaseTester.getConnection(), dataset);

    // plug into SeqStore
    seqStore = new JdbcSeqStore(
            new SingleConnectionDataSource(databaseTester.getConnection().getConnection(), true));

    gene = new GeneDelegate(GeneProtos.Gene.parseFrom(getClass().getResourceAsStream("/gene.dat")));
    System.out.println(gene.getGenomeId());
}

From source file:io.cloudslang.engine.data.SimpleHiloIdentifierGenerator.java

private void updateCurrentChunk() {
    if (logger.isDebugEnabled()) {
        logger.debug("Updating HILO chunk...");
    }/*from  w  ww .j a  v  a 2  s  . co  m*/

    long t = System.currentTimeMillis();
    try (Connection conn = dataSource.getConnection()) {
        conn.setAutoCommit(false);
        JdbcTemplate jdbcTemplate = new JdbcTemplate(new SingleConnectionDataSource(conn, true));

        jdbcTemplate.update(SQL_LOCK);
        currentChunk = jdbcTemplate.queryForObject(SQL_SELECT, Integer.class);
        if (logger.isDebugEnabled())
            logger.debug("Current chunk: " + currentChunk);
        jdbcTemplate.execute(SQL_UPDATE);
        jdbcTemplate.execute("commit");

        if (logger.isDebugEnabled()) {
            logger.debug("Updating HILO chunk done in " + (System.currentTimeMillis() - t) + " ms");
        }
        currentId = 0;
    } catch (SQLException e) {
        logger.error("Unable to update current chunk", e);
        throw new IllegalStateException("Unable to update current chunk");
    }
}

From source file:com.jaxio.celerio.configuration.database.h2.H2Extension.java

private JdbcTemplate getJdbcTemplate(Connection connection) {
    DataSource dataSource = new SingleConnectionDataSource(connection, true);
    JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
    return jdbcTemplate;
}

From source file:org.zalando.stups.junit.postgres.PostgreSqlRule.java

private void applyScripts(PostgresConfig config) throws SQLException, IOException {
    log.info("Apply Scripts ...");
    Connection connection = getConnection(config);
    DataSource ds = new SingleConnectionDataSource(connection, false);
    FileSystemScanner scanner = new FileSystemScanner();
    for (String location : builder.locations) {
        File directory = new File(location);
        if (directory.exists() && directory.isDirectory()) {
            Resource[] resources = scanner.scanForResources(location, "", ".sql");
            ResourceDatabasePopulator populator = new ResourceDatabasePopulator(resources);
            populator.setSeparator(builder.separator);
            populator.execute(ds);/*from ww w  . j  a  v  a2s.co m*/
        } else {
            // log not existing directory
        }
    }
    log.info("Scripts applied!");
}

From source file:ru.adios.budgeter.BundleProvider.java

private static SingleConnectionDataSource createDataSource(String url) {
    final SingleConnectionDataSource dataSource = new SingleConnectionDataSource(url, true) {
        @Override/*  w  w  w  . j  a va 2  s. co  m*/
        protected Connection getCloseSuppressingConnectionProxy(Connection target) {
            return new DelegatingConnectionProxy(target);
        }
    };
    dataSource.setAutoCommit(true);
    dataSource.setDriverClassName("org.sqldroid.SQLDroidDriver");
    try {
        dataSource.initConnection();
    } catch (SQLException ex) {
        throw new DataAccessResourceFailureException("Unable to initialize SingleConnectionDataSource", ex);
    }
    return dataSource;
}

From source file:org.agnitas.dao.impl.ImportRecipientsDaoImpl.java

@Override
public void createTemporaryTable(int adminID, int datasource_id, String keyColumn, List<String> keyColumns,
        int companyId, String sessionId) {
    final DataSource dataSource = (DataSource) applicationContext.getBean("dataSource");
    try {/*from w ww .j a  v a 2  s.  co m*/
        if (temporaryConnection != null) {
            temporaryConnection.destroy();
            temporaryConnection = null;
        }
        SingleConnectionDataSource scds = null;
        scds = new SingleConnectionDataSource(dataSource.getConnection(), true);
        setTemporaryConnection(scds);
    } catch (SQLException e) {
        throw new DataAccessResourceFailureException("Unable to create single connection data source", e);
    }

    final JdbcTemplate template = getJdbcTemplateForTemporaryTable();
    final String prefix = "cust_" + adminID + "_tmp_";
    final String tableName = prefix + datasource_id + "_tbl";

    String indexSql = "";
    String duplicateSql = "";
    if (keyColumns.isEmpty()) {
        duplicateSql += keyColumn + " as column_duplicate_check_0 ";
        indexSql = "column_duplicate_check_0";
    } else {
        for (int i = 0; i < keyColumns.size(); i++) {
            duplicateSql += keyColumns.get(i) + " as column_duplicate_check_" + i;
            indexSql += "column_duplicate_check_" + i;
            if (i != keyColumns.size() - 1) {
                duplicateSql += ", ";
                indexSql += ", ";
            }
        }
    }
    duplicateSql += " from customer_" + companyId + "_tbl where 1=0)";

    if (AgnUtils.isMySQLDB()) {
        String query = "CREATE TEMPORARY TABLE IF NOT EXISTS " + tableName + " as (select ";
        query += duplicateSql;
        template.execute(query);
        query = "ALTER TABLE " + tableName + " ADD (recipient mediumblob NOT NULL, "
                + "validator_result mediumblob NOT NULL, " + "temporary_id varchar(128) NOT NULL, " + "INDEX ("
                + indexSql + "), " + "status_type int(3) NOT NULL)";
        template.execute(query);
        query = "alter table " + tableName + " collate utf8_unicode_ci";
        template.execute(query);
    } else if (AgnUtils.isOracleDB()) {
        // @todo: we need to decide when all those tables will be removed
        String query = "CREATE TABLE " + tableName + " as (select ";
        query += duplicateSql;
        template.execute(query);
        query = "ALTER TABLE " + tableName + " ADD (recipient blob NOT NULL, "
                + "validator_result blob NOT NULL, " + "temporary_id varchar2(128) NOT NULL, "
                + "status_type number(3) NOT NULL)";
        template.execute(query);
        String indexquery = "create index " + tableName + "_cdc on " + tableName + " (" + indexSql
                + ") nologging";
        template.execute(indexquery);
        query = " INSERT INTO IMPORT_TEMPORARY_TABLES (SESSION_ID, TEMPORARY_TABLE_NAME) VALUES('" + sessionId
                + "', '" + tableName + "')";
        template.execute(query);
    }
}

From source file:org.cloudfoundry.identity.uaa.db.FixFailedBackportMigrations_4_0_4.java

@Override
public void migrate(Connection connection) throws Exception {
    if ("sqlserver".equals(type) || "hsqldb".equals(type)) {
        //we don't have this problem with sqlserver or in memory DB
        logger.info("Skipping 4.0.4 migration for " + type + ", not affected by 3.9.9 back ports.");
        return;/*ww w.j  a  v  a2  s. c o  m*/
    }
    ResourceDatabasePopulator populator = new ResourceDatabasePopulator();
    SingleConnectionDataSource dataSource = new SingleConnectionDataSource(connection, true);
    JdbcTemplate template = new JdbcTemplate(dataSource);
    boolean run = false;
    for (Map.Entry<String, String> script : getScripts()) {
        int count = template.queryForObject(checkExistsSql, Integer.class, script.getKey());
        if (count == 0) {
            String path = "org/cloudfoundry/identity/uaa/db/" + type + "/" + script.getValue();
            logger.info(
                    String.format("[4.0.4] Adding script for version %s with path %s", script.getKey(), path));
            populator.addScript(new ClassPathResource(path));
            run = true;
        }
    }
    if (run) {
        logger.info("Running missing migrations.");
        populator.setContinueOnError(false);
        populator.setIgnoreFailedDrops(true);
        populator.populate(connection);
        logger.info("Completed missing migrations.");
    } else {
        logger.info("Skipping 4.0.4 migrations, no migrations missing.");
    }
}

From source file:org.geowebcache.storage.MetastoreRemover.java

public MetastoreRemover(DefaultStorageFinder finder) throws Exception {
    this.storageFinder = finder;
    File root = new File(storageFinder.getDefaultPath());
    Connection conn = null;//w w w. j ava  2  s  . co m
    try {
        conn = getMetaStoreConnection(root);
        if (conn != null) {
            log.info("Migrating the old metastore to filesystem storage");
            SingleConnectionDataSource ds = new SingleConnectionDataSource(conn, false);
            JdbcTemplate template = new JdbcTemplate(ds);

            // maybe we should make this optional?
            boolean migrateCreationDates = Boolean.getBoolean("MIGRATE_CREATION_DATES");
            if (migrateCreationDates) {
                migrateTileDates(template, new FilePathGenerator(root.getPath()));
            }
            migrateParameters(template, root);
            // remove all the tiles from storage to avoid further migration attempts
            // in the future, but only if the old metastore was external to the data dir
            if (!defaultLocation) {
                removeTiles(template);
            }
        }
    } finally {
        if (conn != null) {
            conn.close();
        }
    }

    // wipe out the entire database if the db location is the default one
    if (defaultLocation) {
        File dbFile = getDefaultH2Path(root).getParentFile();
        if (dbFile.exists()) {
            log.info("Cleaning up the old H2 database");
            FileUtils.deleteDirectory(dbFile);
        }
    }

    // remove disk quota if necessary (this we have to do regardless as we changed the 
    // structure of the params from int to string)
    String path = root.getPath() + File.separator + "diskquota_page_store";
    File quotaRoot = new File(path);
    if (quotaRoot.exists()) {
        File version = new File(quotaRoot, "version.txt");
        if (!version.exists()) {
            log.warn("Old style DiskQuota database found, removing it.");
            FileUtils.deleteDirectory(quotaRoot);
        }
    }
}