List of usage examples for org.springframework.jdbc.core JdbcTemplate batchUpdate
@Override public int[] batchUpdate(String sql, List<Object[]> batchArgs) throws DataAccessException
From source file:com.alibaba.otter.node.etl.common.db.DbPerfIntergration.java
@Test public void test_stack() { DbMediaSource dbMediaSource = new DbMediaSource(); dbMediaSource.setId(1L);/*from w ww . j a va 2 s .co m*/ dbMediaSource.setDriver("com.mysql.jdbc.Driver"); dbMediaSource.setUsername("otter"); dbMediaSource.setPassword("otter"); dbMediaSource.setUrl("jdbc:mysql://127.0.0.1:3306/retl"); dbMediaSource.setEncode("UTF-8"); dbMediaSource.setType(DataMediaType.MYSQL); DbDataMedia dataMedia = new DbDataMedia(); dataMedia.setSource(dbMediaSource); dataMedia.setId(1L); dataMedia.setName("ljhtable1"); dataMedia.setNamespace("otter"); final DbDialect dbDialect = dbDialectFactory.getDbDialect(2L, dataMedia.getSource()); want.object(dbDialect).clazIs(MysqlDialect.class); final TransactionTemplate transactionTemplate = dbDialect.getTransactionTemplate(); // ?? int minute = 5; int nextId = 1; final int thread = 10; final int batch = 50; final String sql = "insert into otter.ljhtable1 values(? , ? , ? , ?)"; final CountDownLatch latch = new CountDownLatch(thread); ExecutorService executor = new ThreadPoolExecutor(thread, thread, 60, TimeUnit.SECONDS, new ArrayBlockingQueue(thread * 2), new NamedThreadFactory("load"), new ThreadPoolExecutor.CallerRunsPolicy()); for (int sec = 0; sec < minute * 60; sec++) { // long startTime = System.currentTimeMillis(); for (int i = 0; i < thread; i++) { final int start = nextId + i * batch; executor.submit(new Runnable() { public void run() { try { transactionTemplate.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { JdbcTemplate jdbcTemplate = dbDialect.getJdbcTemplate(); return jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int idx) throws SQLException { int id = start + idx; StatementCreatorUtils.setParameterValue(ps, 1, Types.INTEGER, null, id); StatementCreatorUtils.setParameterValue(ps, 2, Types.VARCHAR, null, RandomStringUtils.randomAlphabetic(1000)); // RandomStringUtils.randomAlphabetic() long time = new Date().getTime(); StatementCreatorUtils.setParameterValue(ps, 3, Types.TIMESTAMP, new Timestamp(time)); StatementCreatorUtils.setParameterValue(ps, 4, Types.TIMESTAMP, new Timestamp(time)); } public int getBatchSize() { return batch; } }); } }); } finally { latch.countDown(); } } }); } long endTime = System.currentTimeMillis(); try { latch.await(1000 * 60L - (endTime - startTime), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { e.printStackTrace(); } if (latch.getCount() != 0) { System.out.println("perf is not enough!"); System.exit(-1); } endTime = System.currentTimeMillis(); System.out.println("Time cost : " + (System.currentTimeMillis() - startTime)); try { TimeUnit.MILLISECONDS.sleep(1000L - (endTime - startTime)); } catch (InterruptedException e) { e.printStackTrace(); } nextId = nextId + thread * batch; } executor.shutdown(); }
From source file:data.DefaultExchanger.java
private int[] batchUpdate(JdbcTemplate jdbcTemplate, final List<JsonNode> nodes) { int[] updateCounts = jdbcTemplate.batchUpdate(getInsertSql(), new BatchPreparedStatementSetter() { @Override//from w w w . j a v a 2 s. c om public void setValues(PreparedStatement ps, int i) throws SQLException { setPreparedStatement(ps, nodes.get(i)); } @Override public int getBatchSize() { return nodes.size(); } }); return updateCounts; }
From source file:com.esofthead.mycollab.module.project.service.ibatis.GanttAssignmentServiceImpl.java
@Override public void massUpdatePredecessors(Integer taskSourceId, final List<TaskPredecessor> predecessors, Integer sAccountId) {//from w w w . j a v a2 s. co m Lock lock = DistributionLockUtil.getLock("task-service" + sAccountId); try { PredecessorMapper predecessorMapper = ApplicationContextUtil.getSpringBean(PredecessorMapper.class); PredecessorExample ex = new PredecessorExample(); ex.createCriteria().andSourceidEqualTo(taskSourceId); predecessorMapper.deleteByExample(ex); JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); final long now = new GregorianCalendar().getTimeInMillis(); if (lock.tryLock(30, TimeUnit.SECONDS)) { jdbcTemplate.batchUpdate( "INSERT INTO `m_prj_predecessor`(`sourceType`, `descType`, `predestype`,`lagDay`, " + "`sourceId`,`descId`, `createdTime`) VALUES (?, ?, ?, ?, ?, ?, ?)", new BatchPreparedStatementSetter() { @Override public void setValues(PreparedStatement preparedStatement, int i) throws SQLException { preparedStatement.setString(1, predecessors.get(i).getSourcetype()); preparedStatement.setString(2, predecessors.get(i).getDesctype()); preparedStatement.setString(3, predecessors.get(i).getPredestype()); preparedStatement.setInt(4, predecessors.get(i).getLagday()); preparedStatement.setInt(5, predecessors.get(i).getSourceid()); preparedStatement.setInt(6, predecessors.get(i).getDescid()); preparedStatement.setDate(7, new Date(now)); } @Override public int getBatchSize() { return predecessors.size(); } }); } } catch (Exception e) { throw new MyCollabException(e); } finally { DistributionLockUtil.removeLock("task-service" + sAccountId); lock.unlock(); } }
From source file:com.taobao.tddl.common.sync.RowBasedReplicationExecutor.java
/** * jdbcdelete/*from w w w . j ava2s . c om*/ */ public static void batchDeleteSyncLog(Collection<RowBasedReplicationContext> contexts) { long timeused, time0 = System.currentTimeMillis(); String sqlpattern = "delete from sync_log_{0} where id = ?"; /** * RowBasedReplicationContextupdateSql */ Map<JdbcTemplate, Map<String/*logSQL*/, List<RowBasedReplicationContext>>> sortedContexts = buildSortedContexts( contexts, sqlpattern.toString()); for (Map.Entry<JdbcTemplate, Map<String, List<RowBasedReplicationContext>>> e0 : sortedContexts .entrySet()) { JdbcTemplate jt = e0.getKey(); for (Map.Entry<String, List<RowBasedReplicationContext>> e : e0.getValue().entrySet()) { final List<RowBasedReplicationContext> endContexts = e.getValue(); BatchPreparedStatementSetter setter = new BatchPreparedStatementSetter() { public int getBatchSize() { return endContexts.size(); } public void setValues(PreparedStatement ps, int i) throws SQLException { RowBasedReplicationContext context = endContexts.get(i); ps.setString(1, context.getSyncLogId()); } }; jt.batchUpdate(e.getKey(), setter); if (log.isDebugEnabled()) { log.debug("[batchDeleteSyncLog], sql = [" + e.getKey() + "], batch size=" + endContexts.size()); } } } timeused = System.currentTimeMillis() - time0; log.warn(contexts.size() + " replication logs deleted, time used:" + timeused); Monitor.add(Monitor.KEY1, Monitor.KEY2_SYNC, Monitor.KEY3_BatchDeleteSyncLog, contexts.size(), timeused); }
From source file:com.taobao.tddl.common.sync.RowBasedReplicationExecutor.java
/** * jdbcupdate/*from ww w . j av a 2s. c om*/ */ public static void batchUpdateSyncLog(Collection<RowBasedReplicationContext> contexts, final long extraPlusTime) { long timeused, time0 = System.currentTimeMillis(); String sqlpattern = "update sync_log_{0} set next_sync_time=? where id = ?"; /** * RowBasedReplicationContextupdateSql */ Map<JdbcTemplate, Map<String/*logSQL*/, List<RowBasedReplicationContext>>> sortedContexts = buildSortedContexts( contexts, sqlpattern.toString()); for (Map.Entry<JdbcTemplate, Map<String, List<RowBasedReplicationContext>>> e0 : sortedContexts .entrySet()) { JdbcTemplate jt = e0.getKey(); for (Map.Entry<String, List<RowBasedReplicationContext>> e : e0.getValue().entrySet()) { final List<RowBasedReplicationContext> endContexts = e.getValue(); BatchPreparedStatementSetter setter = new BatchPreparedStatementSetter() { public int getBatchSize() { return endContexts.size(); } public void setValues(PreparedStatement ps, int i) throws SQLException { RowBasedReplicationContext context = endContexts.get(i); ps.setTimestamp(1, getNextSyncTime(context, extraPlusTime)); ps.setString(2, context.getSyncLogId()); } }; jt.batchUpdate(e.getKey(), setter); if (log.isDebugEnabled()) { log.debug("[batchUpdateSyncLog], sql = [" + e.getKey() + "], batch size=" + endContexts.size()); } } } timeused = System.currentTimeMillis() - time0; log.warn(contexts.size() + " replication logs updated, time used:" + timeused); Monitor.add(Monitor.KEY1, Monitor.KEY2_SYNC, Monitor.KEY3_BatchUpdateSyncLog, contexts.size(), timeused); }
From source file:com.github.ferstl.spring.jdbc.oracle.DatabaseConfiguration.java
private void initDatabase(final JdbcTemplate jdbcTemplate) { TransactionTemplate trxTemplate = new TransactionTemplate(transactionManager()); trxTemplate.execute(new TransactionCallback<int[]>() { @Override// ww w .j a v a2s . com public int[] doInTransaction(TransactionStatus status) { List<Object[]> batchArgs = new ArrayList<>(NUMBER_OF_ROWS); for (int i = 0; i < NUMBER_OF_ROWS; i++) { int value = i + 1; batchArgs.add(new Object[] { String.format("Value_%05d", value), value }); } return jdbcTemplate.batchUpdate(INSERT_SQL, batchArgs); } }); }
From source file:com.alibaba.otter.node.etl.load.loader.db.interceptor.operation.AbstractOperationInterceptor.java
private void init(final JdbcTemplate jdbcTemplate, final String markTableName, final String markTableColumn) { int count = jdbcTemplate .queryForInt(MessageFormat.format(checkDataSql, markTableName, GLOBAL_THREAD_COUNT - 1)); if (count != GLOBAL_THREAD_COUNT) { if (logger.isInfoEnabled()) { logger.info("Interceptor: init " + markTableName + "'s data."); }//from w w w .java 2 s . c om TransactionTemplate transactionTemplate = new TransactionTemplate(); transactionTemplate .setTransactionManager(new DataSourceTransactionManager(jdbcTemplate.getDataSource())); transactionTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_NOT_SUPPORTED);// ?????? transactionTemplate.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { jdbcTemplate.execute(MessageFormat.format(deleteDataSql, markTableName)); String batchSql = MessageFormat.format(updateSql, new Object[] { markTableName, markTableColumn }); jdbcTemplate.batchUpdate(batchSql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int idx) throws SQLException { ps.setInt(1, idx); ps.setInt(2, 0); // ps.setNull(3, Types.VARCHAR); } public int getBatchSize() { return GLOBAL_THREAD_COUNT; } }); return null; } }); if (logger.isInfoEnabled()) { logger.info("Interceptor: Init EROSA Client Data: " + updateSql); } } }
From source file:uta.ak.usttmp.dmcore.task.QuartzMiningJob.java
@Override public void execute(JobExecutionContext jec) { //====== statistic time ====== Date statisticStartTime = new Date(); Date preprocessTime = new Date(); Date miningTime = new Date(); Date trackingTime = new Date(); long statistic_TotalNum = 0; String updateSql = "UPDATE `c_miningtask` " + "SET `qrtz_job_name` = ?, " + " `qrtz_job_exec_count` = ?, " + " `status` = ? " + "WHERE " + " `mme_eid` = ?;"; DataSource ds = null;//from ww w . j a v a2 s . co m JdbcTemplate jt = null; int nextExecCount = 0; int totalExecCount = 0; boolean hasPC = false; boolean hasTC = false; MiningTask mt; try { miningTaskId = Long.parseLong((String) jec.getMergedJobDataMap().get("miningTaskId")); jobName = (String) jec.getMergedJobDataMap().get("jobName"); applicationContext = new ClassPathXmlApplicationContext("applicationContext.xml"); ds = (DataSource) applicationContext.getBean("dataSource"); jt = new JdbcTemplate(ds); //Load Miningtask String querySql = "select * from c_miningtask where mme_eid=?"; mt = (MiningTask) jt.queryForObject(querySql, new Object[] { miningTaskId }, new MiningTaskRowMapper()); if (mt.getQrtzJobExecCount() == mt.getQrtzJobTotalCount()) { return; } totalExecCount = mt.getQrtzJobTotalCount(); if (null != mt.getPreprocessComponent() && !"NONE".equals(mt.getPreprocessComponent().toUpperCase())) { hasPC = true; } if (null != mt.getTrackingComponent() && !"NONE".equals(mt.getTrackingComponent().toUpperCase())) { hasTC = true; } List<Text> textList; List<Topic> topicList; if (hasPC) { preprocessComponent = (PreprocessComponent) applicationContext.getBean(mt.getPreprocessComponent()); } miningComponent = (MiningComponent) applicationContext.getBean(mt.getMiningComponent()); if (hasTC) { trackingComponent = (TrackingComponent) applicationContext.getBean(mt.getTrackingComponent()); } miningTaskService = (MiningTaskService) applicationContext.getBean("miningTaskService"); topicMiningService = (TopicMiningService) applicationContext.getBean("topicMiningService"); nextExecCount = mt.getQrtzJobExecCount() + 1; SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); //Calculate the time period Calendar cal = Calendar.getInstance(); cal.setTime(mt.getStartTime()); cal.add(Calendar.HOUR_OF_DAY, mt.getMiningInterval() * mt.getQrtzJobExecCount()); Date startTime = cal.getTime(); cal.setTime(mt.getStartTime()); cal.add(Calendar.HOUR_OF_DAY, mt.getMiningInterval() * nextExecCount); Date endTime = cal.getTime(); /************************************************************** * 1. Preprocess text data * **************************************************************/ String sTag = mt.getId() + "_" + mt.getTag(); //Clear the text table if there is existed text. String clearTextSQL = "DELETE " + "FROM " + " c_text " + "WHERE " + " ( " + " text_createdate " + " BETWEEN ? " + " AND ? " + " ) " + "AND tag = ?"; jt.update(clearTextSQL, formatter.format(startTime), formatter.format(endTime), sTag); System.out.println("Query raw text records..."); String querySQL = "SELECT " + " * " + "FROM " + " c_rawtext " + "WHERE " + " ( " + " text_createdate " + " BETWEEN ? " + " AND ? " + " ) " + "AND tag = ?"; List<RawText> rawTextList = jt.query(querySQL, new Object[] { formatter.format(startTime), formatter.format(endTime), mt.getTag() }, new RawTextRowMapper()); //if no raw text to be mined, then return if (null == rawTextList || rawTextList.isEmpty()) { int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED : MiningTask.STATUS_RUNNING; //Update task status jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId); return; } statistic_TotalNum = (null != rawTextList) ? rawTextList.size() : 0; if (hasPC) { textList = preprocessComponent.preprocess(mt, rawTextList); } else { textList = new ArrayList<>(); for (RawText rt : rawTextList) { Text tx = new Text(); tx.setCreateTime(rt.getCreateTime()); tx.setRawTextId(rt.getId()); tx.setTag(sTag); tx.setText(rt.getText()); tx.setTitle(rt.getTitle()); textList.add(tx); } } //if no text to be mined, then return if (null == textList || textList.isEmpty()) { int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED : MiningTask.STATUS_RUNNING; //Update task status jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId); return; } List<Object[]> text_lines = new ArrayList<>(); for (Text tx : textList) { Object[] ojarr = new Object[] { tx.getTitle(), tx.getText(), sTag, String.valueOf(tx.getRawTextId()), formatter.format(tx.getCreateTime()) }; text_lines.add(ojarr); } String insertSQL = "INSERT INTO c_text(mme_lastupdate, mme_updater, title, text, tag, rawtext_id, text_createdate) " + "VALUES (NOW(), \"USTTMP\", ?, ?, ?, ?, ?)"; System.out.println("Start to insert text records..."); jt.batchUpdate(insertSQL, text_lines); //statistic time preprocessTime = new Date(); /************************************************************** * 2. Mining topics * **************************************************************/ //Clear the existed topics String clearTopicSQL = "DELETE " + "FROM " + " c_topic " + "WHERE " + "miningtask_id=? AND seq_no = ?"; jt.update(clearTopicSQL, miningTaskId, nextExecCount); topicList = miningComponent.generateTopics(mt, textList); String insertTpSQL = "INSERT INTO `c_topic` ( " + " `mme_lastupdate`, " + " `mme_updater`, " + " `name`, " + " `content`, " + " `remark`, " + " `miningtask_id`, " + " `seq_no` " + ") " + "VALUES " + " (NOW(), 'USTTMP' ,?,?,?,?,?)"; List<Object[]> tpArgsList = new ArrayList<>(); for (Topic tm : topicList) { Object[] objarr = new Object[] { tm.getName(), tm.toString(), (null != tm.getRemark()) ? tm.getRemark() : "", miningTaskId, nextExecCount }; tpArgsList.add(objarr); } System.out.println("Inserting records into the c_topic table..."); jt.batchUpdate(insertTpSQL, tpArgsList); //statistic time miningTime = new Date(); /************************************************************** * 3. Evolution tracking * **************************************************************/ if (hasTC) { if (nextExecCount > 1) { if (null != topicList && !topicList.isEmpty()) { int preTopicSeq = nextExecCount - 1; int nextTopicSeq = nextExecCount; //Clear existed topic evolution rela String clearEvSQL = "DELETE " + "FROM " + " c_topicevolutionrela " + "WHERE " + " miningtask_id =? " + "AND pre_topic_seq =? " + "AND next_topic_seq =?"; jt.update(clearEvSQL, miningTaskId, preTopicSeq, nextTopicSeq); List<Topic> preTopics = topicMiningService.getTopics(miningTaskId, preTopicSeq); List<Topic> nextTopics = topicMiningService.getTopics(miningTaskId, nextTopicSeq); if (null == preTopics || preTopics.isEmpty()) { UsttmpProcessException upe = new UsttmpProcessException( UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION); throw upe; } if (null == nextTopics || nextTopics.isEmpty()) { UsttmpProcessException upe = new UsttmpProcessException( UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION); throw upe; } List<EvolutionRelationship> evRelaList = trackingComponent .getTopicEvolutionRelationships(mt, preTopics, nextTopics); String insertEvSql = "INSERT INTO `c_topicevolutionrela` ( " + " `pre_topic_id`, " + " `next_topic_id`, " + " `rank_against_pre_topic_in_next_group`, " + " `rank_against_next_topic_in_pre_group`, " + " `similarity` ," + " `miningtask_id` ," + " `pre_topic_seq` ," + " `next_topic_seq` " + ") " + "VALUES " + " (?, ?, ?, ?, ?, ?, ?, ?)"; List<Object[]> argsList = new ArrayList<>(); for (EvolutionRelationship er : evRelaList) { Object[] objarr = new Object[] { er.getPreTopic().getId(), er.getNextTopic().getId(), er.getRankAgainstPreTopicInNextGroup(), er.getRankAgainstNextTopicInPreGroup(), er.getSimilarity(), miningTaskId, preTopicSeq, nextTopicSeq }; argsList.add(objarr); } jt.batchUpdate(insertEvSql, argsList); } } } int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED : MiningTask.STATUS_RUNNING; //Update task status jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId); //statistic time trackingTime = new Date(); } catch (UsttmpProcessException e) { if (UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION.equals(e.getMessage())) { //Update task status int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED : MiningTask.STATUS_RUNNING; jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId); } else { } e.printStackTrace(); StringWriter errors = new StringWriter(); e.printStackTrace(new PrintWriter(errors)); logger.error(errors.toString()); //log exception table miningTaskService.logMiningTask(MiningTaskService.LOG_TYPE_EXCEPTION, miningTaskId, errors.toString()); //statistic time trackingTime = new Date(); } catch (Exception e) { e.printStackTrace(); StringWriter errors = new StringWriter(); e.printStackTrace(new PrintWriter(errors)); logger.error(errors.toString()); //log exception table miningTaskService.logMiningTask(MiningTaskService.LOG_TYPE_EXCEPTION, miningTaskId, errors.toString()); //statistic time trackingTime = new Date(); } finally { long statistic_PreprocessTime = (preprocessTime.getTime() - statisticStartTime.getTime()) / 1000; long statistic_MiningTime = (miningTime.getTime() - preprocessTime.getTime()) / 1000; ; long statistic_TrackingTime = (trackingTime.getTime() - miningTime.getTime()) / 1000; long statistic_TotalTime = (trackingTime.getTime() - statisticStartTime.getTime()) / 1000; logger.info("============================ " + "This is start log. " + "============================"); logger.info("Total number of texts being processed is " + statistic_TotalNum + "."); logger.info("Preprocess time is " + statistic_PreprocessTime + " seconds."); logger.info("Mining time is " + statistic_MiningTime + " seconds."); logger.info("Tracking time is " + statistic_TrackingTime + " seconds."); logger.info("Total time is " + statistic_TotalTime + " seconds."); logger.info("============================ " + "This is end log. " + "============================"); } }
From source file:org.agnitas.dao.impl.ImportRecipientsDaoImpl.java
@Override public void createRecipients(final Map<ProfileRecipientFields, ValidatorResults> recipientBeansMap, final Integer adminID, final ImportProfile profile, final Integer type, int datasource_id, CSVColumnState[] columns) {/*from w w w .ja v a 2 s . c o m*/ if (recipientBeansMap.isEmpty()) { return; } final JdbcTemplate template = getJdbcTemplateForTemporaryTable(); final String prefix = "cust_" + adminID + "_tmp_"; final String tableName = prefix + datasource_id + "_tbl"; final ProfileRecipientFields[] recipients = recipientBeansMap.keySet() .toArray(new ProfileRecipientFields[recipientBeansMap.keySet().size()]); String keyColumn = profile.getKeyColumn(); List<String> keyColumns = profile.getKeyColumns(); String duplicateSql = ""; String duplicateSqlParams = ""; if (keyColumns.isEmpty()) { duplicateSql += " column_duplicate_check_0 "; duplicateSqlParams = "?"; } else { for (int i = 0; i < keyColumns.size(); i++) { duplicateSql += "column_duplicate_check_" + i; duplicateSqlParams += "?"; if (i != keyColumns.size() - 1) { duplicateSql += ","; duplicateSqlParams += ","; } } } final List<CSVColumnState> temporaryKeyColumns = new ArrayList<CSVColumnState>(); for (CSVColumnState column : columns) { if (keyColumns.isEmpty()) { if (column.getColName().equals(keyColumn) && column.getImportedColumn()) { temporaryKeyColumns.add(column); } } else { for (String columnName : keyColumns) { if (column.getColName().equals(columnName) && column.getImportedColumn()) { temporaryKeyColumns.add(column); break; } } } } final String query = "INSERT INTO " + tableName + " (recipient, validator_result, temporary_id, status_type, " + duplicateSql + ") VALUES (?,?,?,?," + duplicateSqlParams + ")"; final BatchPreparedStatementSetter setter = new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { ps.setBytes(1, ImportUtils.getObjectAsBytes(recipients[i])); ps.setBytes(2, ImportUtils.getObjectAsBytes(recipientBeansMap.get(recipients[i]))); ps.setString(3, recipients[i].getTemporaryId()); ps.setInt(4, type); for (int j = 0; j < temporaryKeyColumns.size(); j++) { setPreparedStatmentForCurrentColumn(ps, 5 + j, temporaryKeyColumns.get(j), recipients[i], profile, recipientBeansMap.get(recipients[i])); } if (logger.isInfoEnabled()) { logger.info("Import ID: " + profile.getImportId() + " Adding recipient to temp-table: " + Toolkit.getValueFromBean(recipients[i], profile.getKeyColumn())); } } public int getBatchSize() { return recipientBeansMap.size(); } }; template.batchUpdate(query, setter); }
From source file:org.agnitas.dao.impl.ImportRecipientsDaoImpl.java
@Override public void updateRecipients(final Map<ProfileRecipientFields, ValidatorResults> recipientBeans, Integer adminID, final int type, final ImportProfile profile, int datasource_id, CSVColumnState[] columns) {// w w w .j av a 2 s.c o m if (recipientBeans.isEmpty()) { return; } final JdbcTemplate template = getJdbcTemplateForTemporaryTable(); final String prefix = "cust_" + adminID + "_tmp_"; final String tableName = prefix + datasource_id + "_tbl"; final ProfileRecipientFields[] recipients = recipientBeans.keySet() .toArray(new ProfileRecipientFields[recipientBeans.keySet().size()]); String keyColumn = profile.getKeyColumn(); List<String> keyColumns = profile.getKeyColumns(); String duplicateSql = ""; if (keyColumns.isEmpty()) { duplicateSql += " column_duplicate_check_0=? "; } else { for (int i = 0; i < keyColumns.size(); i++) { duplicateSql += " column_duplicate_check_" + i + "=? "; if (i != keyColumns.size() - 1) { duplicateSql += ", "; } } } final String query = "UPDATE " + tableName + " SET recipient=?, validator_result=?, status_type=?, " + duplicateSql + " WHERE temporary_id=?"; final List<CSVColumnState> temporaryKeyColumns = new ArrayList<CSVColumnState>(); for (CSVColumnState column : columns) { if (keyColumns.isEmpty()) { if (column.getColName().equals(keyColumn) && column.getImportedColumn()) { temporaryKeyColumns.add(column); } } else { for (String columnName : keyColumns) { if (column.getColName().equals(columnName) && column.getImportedColumn()) { temporaryKeyColumns.add(column); break; } } } } final BatchPreparedStatementSetter setter = new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { ps.setBytes(1, ImportUtils.getObjectAsBytes(recipients[i])); ps.setBytes(2, ImportUtils.getObjectAsBytes(recipientBeans.get(recipients[i]))); ps.setInt(3, type); for (int j = 0; j < temporaryKeyColumns.size(); j++) { setPreparedStatmentForCurrentColumn(ps, 4 + j, temporaryKeyColumns.get(j), recipients[i], profile, recipientBeans.get(recipients[i])); } ps.setString(4 + temporaryKeyColumns.size(), recipients[i].getTemporaryId()); if (logger.isInfoEnabled()) { logger.info("Import ID: " + profile.getImportId() + " Updating recipient in temp-table: " + Toolkit.getValueFromBean(recipients[i], profile.getKeyColumn())); } } public int getBatchSize() { return recipientBeans.size(); } }; template.batchUpdate(query, setter); }