List of usage examples for org.springframework.jdbc.core BatchPreparedStatementSetter BatchPreparedStatementSetter
BatchPreparedStatementSetter
From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaMediaDao.java
/** * Insert new media in a single batch statement * /*from w w w . j a v a 2 s .co m*/ * @param newMediaIndex * @param drops */ private void batchInsert(final Map<String, List<int[]>> newMediaIndex, final List<Drop> drops, Sequence seq) { final List<String> hashes = new ArrayList<String>(); hashes.addAll(newMediaIndex.keySet()); final long startKey = sequenceDao.getIds(seq, hashes.size()); String sql = "INSERT INTO media (id, hash, url, type) " + "VALUES (?,?,?,?)"; jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { String hash = hashes.get(i); // Update drops with the newly generated id for (int[] index : newMediaIndex.get(hash)) { drops.get(index[0]).getMedia().get(index[1]).setId(startKey + i); } int[] index = newMediaIndex.get(hash).get(0); Media media = drops.get(index[0]).getMedia().get(index[1]); ps.setLong(1, media.getId()); ps.setString(2, media.getHash()); ps.setString(3, media.getUrl()); ps.setString(4, media.getType()); } public int getBatchSize() { return hashes.size(); } }); // Media Thumbnails sql = "INSERT INTO media_thumbnails(`media_id`, `size`, `url`) VALUES(?, ?, ?)"; jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { private int thumbnailCount = 0; public void setValues(PreparedStatement ps, int i) throws SQLException { String hash = hashes.get(i); int[] index = newMediaIndex.get(hash).get(0); Media media = drops.get(index[0]).getMedia().get(index[1]); for (MediaThumbnail thumbnail : media.getThumbnails()) { ps.setLong(1, media.getId()); ps.setInt(2, thumbnail.getSize()); ps.setString(3, thumbnail.getUrl()); thumbnailCount++; } } public int getBatchSize() { return thumbnailCount; } }); // Update the droplet_media table insertDropletMedia(drops); }
From source file:gov.nih.nci.cabig.caaers.dao.MedDRADao.java
/** * This method populates the meddra_hlgt table. It uses meddra_hlgt.asc file to load data into this table. Different sqls are * used for postgres and oracle db.//from www .j av a2s . co m * * @param llts * @param startIndex * @return */ public int[] insertHighLevelGroupTerms(final List llts, final int startIndex, final int version_id) { String sql = "insert into meddra_hlgt (meddra_code,meddra_term, version_id) " + "values (?,?,?)"; String dataBase = ""; if (properties.getProperty(DB_NAME) != null) { dataBase = properties.getProperty(DB_NAME); } if (dataBase.equals(ORACLE_DB)) sql = "insert into meddra_hlgt (id,meddra_code,meddra_term, version_id) " + "values (SEQ_MEDDRA_HLGT_ID.NEXTVAL,?,?,?)"; BatchPreparedStatementSetter setter = null; setter = new BatchPreparedStatementSetter() { public int getBatchSize() { return llts.size(); } public void setValues(PreparedStatement ps, int index) throws SQLException { String[] llt = (String[]) llts.get(index); ps.setString(1, llt[0]); ps.setString(2, llt[1]); ps.setInt(3, version_id); } }; return jdbcTemplate.batchUpdate(sql, setter); }
From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaTagDao.java
/** * Insert new tags in a single batch statement * /*from w ww .ja va 2 s. c o m*/ * @param newTagIndex * @param drops */ private void batchInsert(final Map<String, List<int[]>> newTagIndex, final List<Drop> drops, Sequence seq) { final List<String> hashes = new ArrayList<String>(); hashes.addAll(newTagIndex.keySet()); final long startKey = sequenceDao.getIds(seq, hashes.size()); String sql = "INSERT INTO tags (id, hash, tag, " + "tag_canonical, tag_type) " + "VALUES (?,?,?,?,?)"; jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { String hash = hashes.get(i); // Update drops with the newly generated id for (int[] index : newTagIndex.get(hash)) { drops.get(index[0]).getTags().get(index[1]).setId(startKey + i); } int[] index = newTagIndex.get(hash).get(0); Tag tag = drops.get(index[0]).getTags().get(index[1]); ps.setLong(1, tag.getId()); ps.setString(2, tag.getHash()); ps.setString(3, tag.getTag()); ps.setString(4, tag.getTagCanonical()); ps.setString(5, tag.getType()); } public int getBatchSize() { return hashes.size(); } }); // Update the droplet_tags table insertDropletTags(drops); }
From source file:org.ala.dao.DocumentDAOImpl.java
/** * @param docs/* ww w. jav a 2 s.c om*/ */ public void save(final List<Document> docs) { getJdbcTemplate().batchUpdate( "insert into document (infosource_id, uri, file_path, mime_type) values (?,?,?,?)", new BatchPreparedStatementSetter() { @Override public int getBatchSize() { return docs.size(); } @Override public void setValues(PreparedStatement pstmt, int rowId) throws SQLException { pstmt.setString(1, docs.get(rowId).getUri()); pstmt.setString(2, docs.get(rowId).getFilePath()); pstmt.setString(3, docs.get(rowId).getMimeType()); } }); }
From source file:architecture.ee.web.community.page.dao.jdbc.JdbcPageDao.java
private void updateProperties(final Page page) { Map<String, String> oldProps = loadProperties(page); log.debug("old:" + oldProps); log.debug("new:" + page.getProperties()); final List<String> deleteKeys = getDeletedPropertyKeys(oldProps, page.getProperties()); final List<String> modifiedKeys = getModifiedPropertyKeys(oldProps, page.getProperties()); final List<String> addedKeys = getAddedPropertyKeys(oldProps, page.getProperties()); log.debug("deleteKeys:" + deleteKeys.size()); if (!deleteKeys.isEmpty()) { getExtendedJdbcTemplate().batchUpdate( getBoundSql("ARCHITECTURE_COMMUNITY.DELETE_PAGE_PROPERTY_BY_NAME").getSql(), new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { ps.setLong(1, page.getPageId()); ps.setLong(2, page.getVersionId()); ps.setString(3, deleteKeys.get(i)); }//from w ww. j a v a 2s .co m public int getBatchSize() { return deleteKeys.size(); } }); } log.debug("modifiedKeys:" + modifiedKeys.size()); if (!modifiedKeys.isEmpty()) { getExtendedJdbcTemplate().batchUpdate( getBoundSql("ARCHITECTURE_COMMUNITY.UPDATE_PAGE_PROPERTY_BY_NAME").getSql(), new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { String key = modifiedKeys.get(i); String value = page.getProperties().get(key); log.debug("batch[" + key + "=" + value + "]"); ps.setString(1, value); ps.setLong(2, page.getPageId()); ps.setLong(3, page.getVersionId()); ps.setString(4, key); } public int getBatchSize() { return modifiedKeys.size(); } }); } log.debug("addedKeys:" + addedKeys.size()); if (!addedKeys.isEmpty()) { getExtendedJdbcTemplate().batchUpdate( getBoundSql("ARCHITECTURE_COMMUNITY.INSERT_PAGE_PROPERTY").getSql(), new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { ps.setLong(1, page.getPageId()); ps.setLong(2, page.getVersionId()); String key = addedKeys.get(i); String value = page.getProperty(key, null); log.debug("batch[" + key + "=" + value + "]"); ps.setString(3, key); ps.setString(4, value); } public int getBatchSize() { return addedKeys.size(); } }); } }
From source file:shell.framework.organization.agentcode.service.impl.TblSysAgentCodeService4JdbcImpl.java
public int unAssignUser(TblSysAgentCodeVO agentCodeVO) { String sql = "update TBL_SYS_USER set AGENTCODE_ID = NULL where ID=?"; String sysUserIds[] = agentCodeVO.getSysUser().getId().split("-"); final List<String> idList = new ArrayList<String>(); for (String id : sysUserIds) { idList.add(id);/*from ww w. ja v a2 s. c o m*/ } int[] deleteNumbers = jdbcBaseDao.batchUpdate(sql, idList, new BatchPreparedStatementSetter() { /* * (non-Javadoc) * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#setValues(java.sql.PreparedStatement, int) */ public void setValues(PreparedStatement ps, int index) throws SQLException { String sysUser_id = idList.get(index); ps.setString(1, sysUser_id); } /* * (non-Javadoc) * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#getBatchSize() */ public int getBatchSize() { return idList.size(); } }); return deleteNumbers.length; }
From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaPlaceDao.java
/** * Insert new places in a single batch statement * //from w w w . ja v a 2 s .c o m * @param newPlaceIndex * @param drops */ private void batchInsert(final Map<String, List<int[]>> newPlaceIndex, final List<Drop> drops, Sequence seq) { final List<String> hashes = new ArrayList<String>(); hashes.addAll(newPlaceIndex.keySet()); final long startKey = sequenceDao.getIds(seq, hashes.size()); String sql = "INSERT INTO places (id, hash, place_name, " + "place_name_canonical, longitude, latitude) " + "VALUES (?,?,?,?,?,?)"; jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { String hash = hashes.get(i); // Update drops with the newly generated id for (int[] index : newPlaceIndex.get(hash)) { drops.get(index[0]).getPlaces().get(index[1]).setId(startKey + i); } int[] index = newPlaceIndex.get(hash).get(0); Place place = drops.get(index[0]).getPlaces().get(index[1]); ps.setLong(1, place.getId()); ps.setString(2, place.getHash()); ps.setString(3, place.getPlaceName()); ps.setString(4, place.getPlaceNameCanonical()); ps.setFloat(5, place.getLongitude()); ps.setFloat(6, place.getLatitude()); } public int getBatchSize() { return hashes.size(); } }); // Update the droplet_places table insertDropletPlaces(drops); }
From source file:shell.framework.organization.user.service.impl.TblSysUserService4JdbcImpl.java
public int deleteByID(TblSysUserVO userVO) { String sql = "delete from TBL_SYS_USER where ID = ?"; final List<String> idList = new ArrayList<String>(); String ids[] = userVO.getId().split("-"); for (String id : ids) { idList.add(id);//from w w w . j av a 2s .c o m } //TODO ? ?????????? int[] deleteNumbers = jdbcBaseDao.batchUpdate(sql, idList, new BatchPreparedStatementSetter() { /* (non-Javadoc) * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#getBatchSize() */ public int getBatchSize() { return idList.size(); } /* (non-Javadoc) * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#setValues(java.sql.PreparedStatement, int) */ public void setValues(PreparedStatement ps, int index) throws SQLException { String id = idList.get(index); ps.setString(1, id); } }); return deleteNumbers.length; }
From source file:com.serotonin.mango.db.dao.UserDao.java
private void saveRelationalData(final User user) { // Delete existing permissions. ejt.update("delete from dataSourceUsers where userId=?", new Object[] { user.getId() }); ejt.update("delete from dataPointUsers where userId=?", new Object[] { user.getId() }); // Save the new ones. ejt.batchUpdate("insert into dataSourceUsers (dataSourceId, userId) values (?,?)", new BatchPreparedStatementSetter() { public int getBatchSize() { return user.getDataSourcePermissions().size(); }/*from ww w . ja v a 2s . c om*/ public void setValues(PreparedStatement ps, int i) throws SQLException { ps.setInt(1, user.getDataSourcePermissions().get(i)); ps.setInt(2, user.getId()); } }); ejt.batchUpdate("insert into dataPointUsers (dataPointId, userId, permission) values (?,?,?)", new BatchPreparedStatementSetter() { public int getBatchSize() { return user.getDataPointPermissions().size(); } public void setValues(PreparedStatement ps, int i) throws SQLException { ps.setInt(1, user.getDataPointPermissions().get(i).getDataPointId()); ps.setInt(2, user.getId()); ps.setInt(3, user.getDataPointPermissions().get(i).getPermission()); } }); }
From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaDropDao.java
/** * Insert new drops in a single batch statement * /* w ww .jav a2 s. c om*/ * @param newDropIndex * @param drops */ private void batchInsert(final Map<String, List<Integer>> newDropIndex, final List<Drop> drops, Sequence seq) { final List<String> hashes = new ArrayList<String>(); hashes.addAll(newDropIndex.keySet()); final long startKey = sequenceDao.getIds(seq, hashes.size()); String sql = "INSERT INTO droplets (id, channel, droplet_hash, " + "droplet_orig_id, droplet_title, " + "droplet_content, droplet_date_pub, droplet_date_add, " + "identity_id) VALUES (?,?,?,?,?,?,?,?,?)"; jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { String hash = hashes.get(i); // Update drops with the newly generated id for (int index : newDropIndex.get(hash)) { drops.get(index).setId(startKey + i); } Drop drop = drops.get(newDropIndex.get(hash).get(0)); ps.setLong(1, drop.getId()); ps.setString(2, drop.getChannel()); ps.setString(3, drop.getHash()); ps.setString(4, drop.getOriginalId()); ps.setString(5, drop.getTitle()); ps.setString(6, drop.getContent()); ps.setTimestamp(7, new java.sql.Timestamp(drop.getDatePublished().getTime())); ps.setTimestamp(8, new java.sql.Timestamp((new Date()).getTime())); ps.setLong(9, drop.getIdentity().getId()); } public int getBatchSize() { return hashes.size(); } }); logger.debug("Successfully saved {} drops in the database", hashes.size()); }