Example usage for org.springframework.jdbc.core JdbcTemplate batchUpdate

List of usage examples for org.springframework.jdbc.core JdbcTemplate batchUpdate

Introduction

In this page you can find the example usage for org.springframework.jdbc.core JdbcTemplate batchUpdate.

Prototype

@Override
    public int[] batchUpdate(final String... sql) throws DataAccessException 

Source Link

Usage

From source file:org.paxml.util.DBUtils.java

public static int[] runSqlResource(DataSource ds, String uri) {
    JdbcTemplate temp = new JdbcTemplate(ds);
    List<String> list = DBUtils.breakSql(PaxmlUtils.readResourceToString(uri, null));
    return temp.batchUpdate(list.toArray(new String[list.size()]));

}

From source file:fm.last.musicbrainz.data.AbstractHibernateModelIT.java

private void insertTestData(JdbcTemplate jdbcTemplate, File sqlFile) throws IOException {
    List<String> lines = Files.readLines(sqlFile, Charsets.UTF_8);
    jdbcTemplate.batchUpdate(lines.toArray(new String[] {}));
}

From source file:be.solidx.hot.spring.config.DataConfig.java

@Bean
public Map<be.solidx.hot.spring.config.HotConfig.DataSource, DataSource> dataSources() throws Exception {
    Map<be.solidx.hot.spring.config.HotConfig.DataSource, DataSource> dataSources = new HashMap<be.solidx.hot.spring.config.HotConfig.DataSource, DataSource>();
    for (be.solidx.hot.spring.config.HotConfig.DataSource dataSource : hotConfig.getDataSources()) {
        switch (dataSource.getEngine()) {
        case ORACLE:
            dataSources.put(dataSource,/*from   w  ww . j av  a2s.c  o m*/
                    oracleSimpleDriverDataSource(dataSource.getHostname(), dataSource.getPort(),
                            dataSource.getDatabase(), dataSource.getUsername(), dataSource.getPassword()));
            break;
        case DB2:
            dataSources.put(dataSource,
                    db2SimpleDriverDataSource(dataSource.getHostname(), dataSource.getPort(),
                            dataSource.getDatabase(), dataSource.getUsername(), dataSource.getPassword()));
            break;
        case MYSQL:
            dataSources.put(dataSource,
                    mysqlSimpleDriverDataSource(dataSource.getHostname(), dataSource.getPort(),
                            dataSource.getDatabase(), dataSource.getUsername(), dataSource.getPassword()));
            break;
        case PGSQL:
            dataSources.put(dataSource,
                    pgsqlSimpleDriverDataSource(dataSource.getHostname(), dataSource.getPort(),
                            dataSource.getDatabase(), dataSource.getUsername(), dataSource.getPassword()));
            break;
        case HSQLDB:
            DataSource sqlDataSource = hsqldbSimpleDriverDataSource(dataSource.getDatabase(),
                    dataSource.getUsername(), dataSource.getPassword());
            try {
                JdbcTemplate jdbcTemplate = new JdbcTemplate(sqlDataSource);
                jdbcTemplate.afterPropertiesSet();
                for (Resource resource : applicationContext.getResources("classpath*:/sql/*-init.sql")) {
                    String[] statements = IOUtils.toString(resource.getInputStream()).split(";");
                    jdbcTemplate.batchUpdate(statements);
                }
            } catch (Exception e) {
                logger.error("", e);
            }
            dataSources.put(dataSource, sqlDataSource);
            break;
        default:
            break;
        }
    }
    return dataSources;
}

From source file:org.sipfoundry.sipxconfig.homer.HomerDbManager.java

/**
 * Ensure each proxy is listed in homer's hosts table. 
 * NOTE: Records in node table are not removed, it's unclear if an admin would want that --Douglas 
 *///  w  w w  . j  av  a 2s  .  co m
public void syncNodes() {
    if (!m_featureManager.isFeatureEnabled(MySql.FEATURE)) {
        return;
    }
    // sync node info
    String sipxSql = "select fqdn || '@' || ip_address from location l inner join feature_local f on l.location_id = f.location_id and f.feature_id = ?";
    List<String> sipxHosts = m_configJdbcTemplate.queryForList(sipxSql, String.class,
            ProxyManager.FEATURE.getId());

    String homerSql = "select concat(name, '@', host) from homer_hosts";
    JdbcTemplate homerDb = (JdbcTemplate) m_beanFactory.getBean("homerDb");
    List<String> homerHosts = homerDb.queryForList(homerSql, String.class);

    sipxHosts.removeAll(homerHosts);
    if (!sipxHosts.isEmpty()) {
        List<String> add = new ArrayList<String>();
        for (String missingNode : sipxHosts) {
            String[] decode = StringUtils.split(missingNode, '@');
            String name = decode[0];
            String host = decode[1];
            String checkHostSql = "select count(0) from homer_hosts where host = ?";
            boolean hostExists = homerDb.queryForInt(checkHostSql, host) > 0;
            String sql;
            if (hostExists) {
                sql = format("update homer_hosts set name='%s' where host='%s'", name, host);
            } else {
                sql = format("insert into homer_hosts (name, host, status) values ('%s', '%s', 1)", name, host);
            }
            add.add(sql);
        }
        homerDb.batchUpdate(add.toArray(new String[0]));
    }

    // sync db info
    String dbhost = "127.0.0.1";
    int dbport = 3306;
    String dbname = "homer_db";
    String dbuser = "root";
    String dbpass = "";
    String dbtables = "sip_capture";
    String name = "local";
    int status = 1;
    // must have lowest ID to ensure it's the default node for statistics
    String nodeSql = "select 1 from homer_nodes where host = ? and dbport = ? and dbname = ? and dbpassword = ? "
            + "and dbusername = ? and dbtables = ? and name = ? and status = ? and id = 1";
    List<Integer> found = homerDb.queryForList(nodeSql, Integer.class, dbhost, dbport, dbname, dbpass, dbuser,
            dbtables, name, status);
    if (found.size() == 0) {
        String[] hosts = new String[2];
        hosts[0] = "delete from homer_nodes";
        String addNode = "insert into homer_nodes (id, host, dbport, dbname, dbpassword, dbusername, dbtables, name, status) "
                + "values (1, '%s',%d,'%s','%s','%s','%s','%s','%d')";
        hosts[1] = format(addNode, dbhost, dbport, dbname, dbpass, dbuser, dbtables, name, status);
        homerDb.batchUpdate(hosts);
    }
}

From source file:org.jasig.ssp.util.importer.job.staging.SqlServerExternalTableUpsertWriter.java

@Override
public void write(List<? extends RawItem> items) throws Exception {

    JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
    List<String> batchedStatements = new ArrayList<String>();

    String fileName = items.get(0).getResource().getFilename();
    String[] tableName = fileName.split("\\.");

    Object batchStart = stepExecution.getExecutionContext().get("batchStart");
    Object batchStop = stepExecution.getExecutionContext().get("batchStop");

    RawItem item = items.get(0);// w w w .j  a  v a  2 s  . com
    if (currentResource == null) {
        this.orderedHeaders = writeHeader(items.get(0));
        this.currentResource = items.get(0).getResource();
    }
    Resource itemResource = item.getResource();
    if (!(this.currentResource.equals(itemResource))) {
        this.orderedHeaders = writeHeader(item);
        this.currentResource = itemResource;
    }
    StringBuilder insertSql = new StringBuilder();
    insertSql
            .append(" MERGE INTO " + tableName[0] + " as target USING stg_" + tableName[0] + " as source ON (");

    List<String> tableKeys = metadataRepository.getRepository().getColumnMetadataRepository()
            .getTableMetadata(new TableReference(tableName[0])).getTableKeys();

    // There are a few external tables that don't (yet) have natural keys,
    // in these cases we've enforced the key on the staging table
    // so in cases where the external table does not have any keys, we look
    // towards the corresponding staging table for them
    if (tableKeys.isEmpty()) {
        tableKeys = metadataRepository.getRepository().getColumnMetadataRepository()
                .getTableMetadata(new TableReference("stg_" + tableName[0])).getTableKeys();
    }
    for (String key : tableKeys) {
        insertSql.append("target." + key + " = source." + key + " and ");
    }
    insertSql.setLength(insertSql.length() - 4); // trim comma

    insertSql.append(") WHEN NOT MATCHED AND source.batch_id >= " + batchStart + " and source.batch_id <= "
            + batchStop + " THEN INSERT (");

    StringBuilder valuesSqlBuilder = new StringBuilder();
    valuesSqlBuilder.append(" VALUES ( ");
    for (String header : this.orderedHeaders) {

        insertSql.append(header).append(",");
        valuesSqlBuilder.append("source." + header).append(",");
    }
    insertSql.setLength(insertSql.length() - 1); // trim comma
    insertSql.append(")");
    valuesSqlBuilder.setLength(valuesSqlBuilder.length() - 1); // trim comma
    insertSql.append(valuesSqlBuilder);
    insertSql.append(")");
    insertSql.append(" WHEN MATCHED AND source.batch_id >= " + batchStart + " and source.batch_id <= "
            + batchStop + " THEN UPDATE SET ");

    for (String header : this.orderedHeaders) {
        // We don't skip key columns b/c some tables are entirely keys.
        // so a bit wasteful, but makes statement building logic a bit
        // simpler than figuring out if we can leave the update
        // clause off altogether
        insertSql.append("target.").append(header).append("=source.").append(header).append(",");
    }

    insertSql.setLength(insertSql.length() - 1); // trim comma
    insertSql.append(";");

    batchedStatements.add(insertSql.toString());
    sayQuery(insertSql);
    try {
        int[] results = jdbcTemplate.batchUpdate(batchedStatements.toArray(new String[] {}));
        Integer numInsertedUpdated = (Integer) stepExecution.getExecutionContext().get("numInsertedUpdated");
        numInsertedUpdated = numInsertedUpdated == null ? 0 : numInsertedUpdated;
        if (results.length >= 1) {
            numInsertedUpdated = numInsertedUpdated + results[0];
        }
        if (results.length >= 2) {
            numInsertedUpdated = numInsertedUpdated + results[1];
        }
        stepExecution.getExecutionContext().put("numInsertedUpdated", numInsertedUpdated);

        say("******UPSERT******" + " batch start:" + batchStart + " batchstop:" + batchStop);
    } catch (Exception e) {
        throw new NotSkippableException(e);
    }
}

From source file:org.jasig.ssp.util.importer.job.staging.PostgresExternalTableUpsertWriter.java

@Override
public void write(List<? extends RawItem> items) throws Exception {

    JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
    List<String> batchedStatements = new ArrayList<String>();

    String fileName = items.get(0).getResource().getFilename();
    String[] fileNameSplit = fileName.split("\\.");
    String tableName = fileNameSplit[0];

    Object batchStart = stepExecution.getExecutionContext().get("batchStart");
    Object batchStop = stepExecution.getExecutionContext().get("batchStop");

    RawItem item = items.get(0);//from ww  w .  j a va2  s .  c  o m
    if (currentResource == null) {
        this.orderedHeaders = writeHeader(items.get(0));
        this.currentResource = items.get(0).getResource();
    }
    Resource itemResource = item.getResource();
    if (!(this.currentResource.equals(itemResource))) {
        this.orderedHeaders = writeHeader(item);
        this.currentResource = itemResource;
    }
    StringBuilder updateSql = new StringBuilder();
    updateSql.append(" UPDATE " + tableName + " AS target SET ");
    for (String header : this.orderedHeaders) {
        updateSql.append(header + "=source." + header + ",");
    }
    updateSql.deleteCharAt(updateSql.lastIndexOf(","));
    updateSql.append(" FROM stg_" + tableName + " AS source WHERE ");
    List<String> tableKeys = metadataRepository.getRepository().getColumnMetadataRepository()
            .getTableMetadata(new TableReference(tableName)).getTableKeys();

    // There are a few external tables that don't (yet) have natural keys,
    // in these cases we've enforced the key on the staging table
    // so in cases where the external table does not have any keys, we look
    // towards the corresponding staging table for them
    if (tableKeys.isEmpty()) {
        tableKeys = metadataRepository.getRepository().getColumnMetadataRepository()
                .getTableMetadata(new TableReference("stg_" + tableName)).getTableKeys();
    }
    for (String key : tableKeys) {
        updateSql.append(" target." + key + " = source." + key + " AND ");
    }
    updateSql.append(" source.batch_id >= " + batchStart + " and source.batch_id <= " + batchStop + ";");
    batchedStatements.add(updateSql.toString());
    sayQuery(updateSql);

    StringBuilder insertSql = new StringBuilder();
    insertSql.append(" INSERT INTO " + tableName + "(");
    for (String header : this.orderedHeaders) {
        insertSql.append(header + ",");
    }
    insertSql.setLength(insertSql.length() - 1); // trim comma

    insertSql.append(") SELECT ");
    for (String header : this.orderedHeaders) {
        insertSql.append(" source." + header).append(",");
    }
    insertSql.setLength(insertSql.length() - 1); // trim comma
    insertSql.append(" FROM stg_" + tableName + " AS source ");
    insertSql.append(" LEFT OUTER JOIN " + tableName + " AS target ON ");
    for (String key : tableKeys) {
        insertSql.append(" source." + key + " = target." + key + " AND");
    }
    insertSql.setLength(insertSql.length() - 3); // trim comma
    insertSql.append(" WHERE ");
    for (String key : tableKeys) {
        insertSql.append(" target." + key + " IS NULL AND ");
    }
    insertSql.append(" source.batch_id >= " + batchStart + " and source.batch_id <= " + batchStop + "");

    batchedStatements.add(insertSql.toString());
    sayQuery(insertSql);
    try {
        int[] results = jdbcTemplate.batchUpdate(batchedStatements.toArray(new String[] {}));

        Integer numInsertedUpdated = (Integer) stepExecution.getExecutionContext().get("numInsertedUpdated");
        numInsertedUpdated = numInsertedUpdated == null ? 0 : numInsertedUpdated;
        numInsertedUpdated = numInsertedUpdated + results[0] + results[1];
        stepExecution.getExecutionContext().put("numInsertedUpdated", numInsertedUpdated);
    } catch (Exception e) {
        throw new NotSkippableException(e);
    }
}

From source file:org.agnitas.dao.impl.ImportRecipientsDaoImpl.java

@Override
public void updateExistRecipients(final Collection<ProfileRecipientFields> recipientsForUpdate,
        final ImportProfile importProfile, final CSVColumnState[] columns, Integer adminId) {
    if (recipientsForUpdate.isEmpty()) {
        return;//from  w w w.  ja v  a2s  .  c om
    }

    final JdbcTemplate template = createJdbcTemplate();
    final ProfileRecipientFields[] recipientsBean = recipientsForUpdate
            .toArray(new ProfileRecipientFields[recipientsForUpdate.size()]);
    final String[] querys = new String[recipientsForUpdate.size()];
    for (int i = 0; i < querys.length; i++) {
        String query = "UPDATE  customer_" + importProfile.getCompanyId() + "_tbl SET ";
        if (recipientsBean[i].getMailtypeDefined().equals(ImportUtils.MAIL_TYPE_DEFINED))
            query = query + "mailtype=" + recipientsBean[i].getMailtype() + ", ";
        for (CSVColumnState column : columns) {
            if (column.getImportedColumn() && !column.getColName().equals("mailtype")) {
                String value = Toolkit.getValueFromBean(recipientsBean[i], column.getColName());

                // @todo: agn: value == null
                if (StringUtils.isEmpty(value)
                        && importProfile.getNullValuesAction() == NullValuesAction.OVERWRITE.getIntValue()
                        && !column.getColName().equals("gender")) {
                    query = query + column.getColName() + "=NULL, ";
                } else if (!StringUtils.isEmpty(value)) {
                    if (column.getColName().equals("gender")) {
                        if (StringUtils.isEmpty(value)) {
                            query = query + column.getColName() + "=2, ";
                        } else {
                            if (GenericValidator.isInt(value)) {
                                query = query + column.getColName() + "=" + value + ", ";
                            } else {
                                final Integer intValue = importProfile.getGenderMapping().get(value);
                                query = query + column.getColName() + "=" + intValue + ", ";
                            }
                        }
                    } else {
                        switch (column.getType()) {
                        case CSVColumnState.TYPE_CHAR:
                            if (column.getColName().equals("email")) {
                                value = value.toLowerCase();
                            }
                            if (AgnUtils.isOracleDB()) {
                                query = query + column.getColName() + "='" + value.replace("'", "''") + "', ";
                            } else if (AgnUtils.isMySQLDB()) {
                                query = query + column.getColName() + "='"
                                        + value.replace("\\", "\\\\").replace("'", "\\'") + "', ";
                            }
                            break;
                        case CSVColumnState.TYPE_NUMERIC:
                            query = query + column.getColName() + "=" + value + ", ";
                            break;
                        case CSVColumnState.TYPE_DATE:
                            if (StringUtils.isEmpty(value) || value == null) {
                                query = query + column.getColName() + "=null, ";
                            } else {
                                final int format = importProfile.getDateFormat();
                                Date date = ImportUtils.getDateAsString(value, format);
                                if (AgnUtils.isMySQLDB()) {
                                    String temTimestamp = new Timestamp(date.getTime()).toString();
                                    query = query + column.getColName() + "='"
                                            + temTimestamp.substring(0, temTimestamp.indexOf(" ")) + "', ";
                                }
                                if (AgnUtils.isOracleDB()) {
                                    final String dateAsFormatedString = DB_DATE_FORMAT.format(date);
                                    query = query + column.getColName() + "=to_date('" + dateAsFormatedString
                                            + "', 'dd.MM.YYYY HH24:MI:SS'), ";
                                }
                            }
                            break;
                        }
                    }
                }
            }
        }

        query = query.substring(0, query.length() - 2);
        String value = Toolkit.getValueFromBean(recipientsBean[i], importProfile.getKeyColumn());
        value = value.toLowerCase();
        if (!importProfile.getUpdateAllDuplicates()) {
            query = query + " WHERE customer_id = " + recipientsBean[i].getUpdatedIds().get(0);
        } else {
            query = query + " WHERE customer_id IN(";
            final int countUpdatedRecipients = recipientsBean[i].getUpdatedIds().size();
            for (int index = 0; index < countUpdatedRecipients; index++) {
                query = query + recipientsBean[i].getUpdatedIds().get(index)
                        + ((index + 1) != countUpdatedRecipients ? "," : "");
            }
            query = query + ")";

        }

        if (logger.isInfoEnabled()) {
            logger.info("Import ID: " + importProfile.getImportId() + " Updating recipient in recipient-table: "
                    + Toolkit.getValueFromBean(recipientsBean[i], importProfile.getKeyColumn()));
        }

        querys[i] = query;
    }
    template.batchUpdate(querys);
}

From source file:org.agnitas.dao.impl.RecipientDaoImpl.java

@Override
public void deleteRecipients(int companyID, List<Integer> list) {
    if (list == null || list.size() < 1) {
        throw new RuntimeException("Invalid customerID list size");
    }/*from  w ww  .  j  a  va2s. co  m*/
    StringBuilder sb = new StringBuilder("WHERE customer_id in (");
    for (Integer customerId : list) {
        sb.append(customerId);
        sb.append(",");
    }
    sb.setCharAt(sb.length() - 1, ')');
    String where = sb.toString();

    sb = new StringBuilder("DELETE FROM customer_");
    sb.append(companyID);
    sb.append("_binding_tbl ");
    sb.append(where);
    String bindingQuery = sb.toString();

    sb = new StringBuilder("DELETE FROM customer_");
    sb.append(companyID);
    sb.append("_tbl ");
    sb.append(where);
    String customerQuery = sb.toString();

    JdbcTemplate tmpl = new JdbcTemplate((DataSource) this.applicationContext.getBean("dataSource"));
    tmpl.batchUpdate(new String[] { bindingQuery, customerQuery });
}

From source file:org.apache.ctakes.ytex.kernel.tree.InstanceTreeBuilderImpl.java

/**
 * run 'preparation' statements.  These may e.g. create temporary tables in the database.
 * @param prepareStatementList//from   w ww .j ava  2s  .  c  om
 */
protected void prepare(String prepareScript, String prepareScriptDelimiter) {
    if (prepareScript != null && prepareScript.length() > 0) {
        String[] statements = prepareScript.split(prepareScriptDelimiter);
        List<String> listStatements = new ArrayList<String>(statements.length);
        // throw out empty lines
        for (String sql : statements) {
            if (sql != null && sql.trim().length() > 0)
                listStatements.add(sql);
        }
        JdbcTemplate jt = new JdbcTemplate(this.getDataSource());
        jt.batchUpdate(listStatements.toArray(new String[] {}));
    }
}