Example usage for org.springframework.jdbc.core BatchPreparedStatementSetter BatchPreparedStatementSetter

List of usage examples for org.springframework.jdbc.core BatchPreparedStatementSetter BatchPreparedStatementSetter

Introduction

In this page you can find the example usage for org.springframework.jdbc.core BatchPreparedStatementSetter BatchPreparedStatementSetter.

Prototype

BatchPreparedStatementSetter

Source Link

Usage

From source file:gov.nih.nci.cabig.caaers.dao.MedDRADao.java

/**
 * This method loads the meddra_soc_hlgt mapping table. New ids that were generated while loading data into meddra_soc and meddra_hlgt
 * tables are used. Earlier meddra_code were used as ids and supporting multiple versions was not possible in that case.
 * /* w  w  w  .j a v  a 2 s  .co m*/
 * @param llts
 * @param startIndex
 * @param socCodeToIdMap
 * @param hlgtCodeToIdMap
 * @return
 */
public int[] insertSOCxHLGT(final List llts, final int startIndex, final int version_id,
        final Map<String, Integer> socCodeToIdMap, final Map<String, Integer> hlgtCodeToIdMap) {

    String sql = "insert into meddra_soc_hlgt (meddra_soc_id, meddra_hlgt_id, version_id) values (?,?,?)";

    String dataBase = "";
    if (properties.getProperty(DB_NAME) != null) {
        dataBase = properties.getProperty(DB_NAME);
    }
    if (dataBase.equals(ORACLE_DB))
        sql = "insert into meddra_soc_hlgt (id,meddra_soc_id, meddra_hlgt_id, version_id) values (SEQ_MEDDRA_SOC_HLGT_ID.NEXTVAL,?,?,?)";

    BatchPreparedStatementSetter setter = null;
    setter = new BatchPreparedStatementSetter() {

        int stIndex = startIndex;

        public int getBatchSize() {
            return llts.size();
        }

        public void setValues(PreparedStatement ps, int index) throws SQLException {
            String[] llt = (String[]) llts.get(index);

            ps.setInt(1, (socCodeToIdMap.get(llt[0]).intValue()));
            ps.setInt(2, (hlgtCodeToIdMap.get(llt[1]).intValue()));
            ps.setInt(3, version_id);
        }
    };

    return jdbcTemplate.batchUpdate(sql, setter);

}

From source file:info.raack.appliancelabeler.data.JDBCDatabase.java

public void storeData(final EnergyMonitor energyMonitor, final List<SecondData> data, final Long lastOffset) {
    // check to see if we need a new row in energy_monitor
    final String userId = energyMonitor.getUserId();

    final String monitorId = energyMonitor.getMonitorId();
    final String monitorType = energyMonitor.getType();

    int tempEnergyMonitorId = getIdForEnergyMonitor(energyMonitor);

    final int energyMonitorId = tempEnergyMonitorId;

    // save the batch
    long timestart = System.currentTimeMillis();

    // don't insert more than a million rows at a time

    boolean lastLoop = false;
    for (int j = 0; lastLoop != true; j++) {
        final List<SecondData> tempData = new ArrayList<SecondData>();

        final int MAX_INSERTS = 2000000;

        int start = j * MAX_INSERTS;

        if (data.size() < (j + 1) * MAX_INSERTS) {
            // remaining data is less than 1 million rows
            int end = data.size();
            logger.debug("Inserting records from " + start + " to " + end);
            tempData.addAll(data.subList(start, end));
            lastLoop = true;//from ww  w . ja va 2s .c  om
        } else {
            int end = (j + 1) * MAX_INSERTS;
            logger.debug("Inserting records from " + start + " to " + end);
            tempData.addAll(data.subList(start, end));
        }

        logger.debug("Inserting " + tempData.size() + " energy measurements for " + userId);
        jdbcTemplate.batchUpdate(insertNewEnergyReadings, new BatchPreparedStatementSetter() {
            public void setValues(PreparedStatement ps, int i) throws SQLException {

                SecondData secondData = tempData.get(i);

                if (i % 10000 == 0) {
                    logger.debug("Preparing for record " + i);
                }

                ps.setInt(1, energyMonitorId);
                ps.setLong(2, secondData.getCalLong());
                ps.setInt(3, secondData.getPower());
                ps.setFloat(4, secondData.getVoltage());
            }

            public int getBatchSize() {
                return tempData.size();
            }
        });
    }
    long timestop = System.currentTimeMillis();
    float total = (timestop - timestart) / 1000;
    logger.debug("Inserting " + data.size() + " rows into database took " + total + " seconds; "
            + (data.size() / total) + " rows per second");

    jdbcTemplate.update(updateLastOffset, new Object[] { lastOffset, userId, monitorId });
}

From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaDropDao.java

/**
 * Populate the rivers_droplets table// w  ww.  j  a v a2s.c  o  m
 * 
 * @param drops
 */
private void insertRiverDrops(final List<Drop> drops) {

    // Get a lock on rivers_droplets
    Sequence seq = sequenceDao.findById("rivers_droplets");

    // Mapping of drop id to list index position
    final Map<Long, Integer> dropIndex = new HashMap<Long, Integer>();

    // List of rivers in a drop
    Map<Long, Set<Long>> dropRiversMap = new HashMap<Long, Set<Long>>();
    Map<Long, Set<Long>> dropChannelsMap = new HashMap<Long, Set<Long>>();

    // Registry for all channels and rivers
    Set<Long> allChannelIds = new HashSet<Long>();

    int i = 0;
    for (Drop drop : drops) {
        if (drop.getRiverIds() == null || drop.getChannelIds() == null) {
            logger.debug("No rivers or channels for drop {}", drop.getId());
            continue;
        }

        Set<Long> rivers = new HashSet<Long>();
        Set<Long> channels = new HashSet<Long>();

        rivers.addAll(drop.getRiverIds());
        channels.addAll(drop.getChannelIds());

        dropRiversMap.put(drop.getId(), rivers);
        dropChannelsMap.put(drop.getId(), channels);

        allChannelIds.addAll(channels);

        dropIndex.put(drop.getId(), i++);
    }

    // No rivers found, exit
    if (dropIndex.size() == 0)
        return;

    // Find already existing rivers_droplets
    String sql = "SELECT droplet_id, river_id FROM rivers_droplets WHERE droplet_id in (:ids)";

    MapSqlParameterSource params = new MapSqlParameterSource();
    params.addValue("ids", dropIndex.keySet());

    List<Map<String, Object>> results = this.namedJdbcTemplate.queryForList(sql, params);

    logger.debug("Skipping {} entries from rivers_droplets", results.size());

    // Remove existing rivers_droplets entries from our Set
    for (Map<String, Object> row : results) {
        Long dropletId = ((Number) row.get("droplet_id")).longValue();
        Long riverId = ((Number) row.get("river_id")).longValue();

        Set<Long> riverSet = dropRiversMap.remove(dropletId);
        if (riverSet != null) {
            riverSet.remove(riverId);

            // Only add back the destination rivers if the set is non empty
            if (!riverSet.isEmpty()) {
                dropRiversMap.put(dropletId, riverSet);
            }
        }
    }

    // If all drops are duplicates, return early
    if (dropRiversMap.isEmpty()) {
        logger.info("No drops to add to the rivers");
        return;
    }

    // Associate the channels with active rivers
    sql = "SELECT rc.id, rc.river_id " + "FROM river_channels rc "
            + "INNER JOIN rivers r ON (rc.river_id = r.id) " + "WHERE rc.id IN (:channelIds) "
            + "AND r.river_active = 1";
    MapSqlParameterSource channelParams = new MapSqlParameterSource();
    channelParams.addValue("channelIds", allChannelIds);

    Map<Long, Long> riverChannelsMap = new HashMap<Long, Long>();
    for (Map<String, Object> row : namedJdbcTemplate.queryForList(sql, channelParams)) {

        Long channelId = ((Number) row.get("id")).longValue();
        Long riverId = ((Number) row.get("river_id")).longValue();

        riverChannelsMap.put(channelId, riverId);
    }

    // Map to hold the association between a drop, river and channel
    // During the association, we verify that the river is in the drop's
    // destination river list
    final List<Map<String, Long>> riverDropChannelList = new ArrayList<Map<String, Long>>();
    Set<RiverDropKey> riverDropKeySet = new HashSet<JpaDropDao.RiverDropKey>();
    for (Long dropletId : dropChannelsMap.keySet()) {
        for (Long channelId : dropChannelsMap.get(dropletId)) {
            if (riverChannelsMap.containsKey(channelId)) {
                Long riverId = riverChannelsMap.get(channelId);

                // Does the river drop key already exist? 
                RiverDropKey riverDropKey = new RiverDropKey(riverId, dropletId);
                if (riverDropKeySet.contains(riverDropKey))
                    continue;

                // Does not exist. Add to the in-memory registry
                riverDropKeySet.add(riverDropKey);

                if (dropRiversMap.containsKey(dropletId) && dropRiversMap.get(dropletId).contains(riverId)) {
                    Map<String, Long> entry = new HashMap<String, Long>();
                    entry.put("dropletId", dropletId);
                    entry.put("channelId", channelId);
                    entry.put("riverId", riverId);
                    riverDropChannelList.add(entry);
                }
            }
        }
    }

    logger.debug("Posting drops to rivers");

    // Insert the remaining items in the set into the DB
    sql = "INSERT INTO `rivers_droplets` (`id`, `droplet_id`, `river_id`, "
            + "`river_channel_id`, `droplet_date_pub`) " + "VALUES (?, ?, ?, ?, ?)";

    final long startKey = sequenceDao.getIds(seq, riverDropChannelList.size());

    // Map to hold to hold the no. of drops created per channel
    final Map<Long, Long> channelDropCountMap = new HashMap<Long, Long>();

    // A map to hold the new max_drop_id and drop_count per river
    final Map<Long, long[]> riverDropsMap = new HashMap<Long, long[]>();
    jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() {
        public void setValues(PreparedStatement ps, int i) throws SQLException {
            Map<String, Long> dropEntry = riverDropChannelList.get(i);
            long id = startKey + i;

            Long dropletId = dropEntry.get("dropletId");
            Long riverId = dropEntry.get("riverId");
            Long channelId = dropEntry.get("channelId");
            Drop drop = drops.get(dropIndex.get(dropletId));

            ps.setLong(1, id);
            ps.setLong(2, dropletId);
            ps.setLong(3, riverId);
            ps.setLong(4, channelId);
            ps.setTimestamp(5, new java.sql.Timestamp(drop.getDatePublished().getTime()));

            // Get updated max_drop_id and drop_count for the rivers table
            long[] update = riverDropsMap.get(riverId);
            if (update == null) {
                long[] u = { id, 1 };
                riverDropsMap.put(riverId, u);
            } else {
                update[0] = Math.max(update[0], id);
                update[1] = update[1] + 1;
            }

            // Update the drop count for the channel
            Long channelDropCount = channelDropCountMap.remove(channelId);
            channelDropCount = (channelDropCount == null) ? 1L : Long.valueOf(channelDropCount.longValue() + 1);
            channelDropCountMap.put(channelId, channelDropCount);
        }

        public int getBatchSize() {
            return riverDropChannelList.size();
        }
    });
    logger.debug("Drops successfully posted to rivers");

    // Update river max_drop_id and drop_count
    logger.debug("Updating river drop counters");
    sql = "UPDATE rivers SET max_drop_id = ?, drop_count = drop_count + ? WHERE id = ?";
    final List<Entry<Long, long[]>> riverUpdate = new ArrayList<Entry<Long, long[]>>();
    riverUpdate.addAll(riverDropsMap.entrySet());

    this.jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() {
        public void setValues(PreparedStatement ps, int i) throws SQLException {
            Entry<Long, long[]> entry = riverUpdate.get(i);
            ps.setLong(1, entry.getValue()[0]);
            ps.setLong(2, entry.getValue()[1]);
            ps.setLong(3, entry.getKey());
        }

        public int getBatchSize() {
            return riverUpdate.size();
        }
    });
    logger.debug("{} rivers successfully updated", riverUpdate.size());

    // Update the drop_count in TABLE `river_channels`
    logger.debug("Updating river channel statistics");
    sql = "UPDATE river_channels SET drop_count = drop_count + ? WHERE id = ?";
    final List<Entry<Long, Long>> riverChannelUpdate = new ArrayList<Entry<Long, Long>>();
    riverChannelUpdate.addAll(channelDropCountMap.entrySet());

    this.jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() {
        public void setValues(PreparedStatement ps, int i) throws SQLException {
            Entry<Long, Long> entry = riverChannelUpdate.get(i);
            ps.setLong(1, entry.getValue());
            ps.setLong(2, entry.getKey());
        }

        @Override
        public int getBatchSize() {
            return riverChannelUpdate.size();
        }
    });
    logger.debug("{} channels updated", riverChannelUpdate.size());

    // Insert the trend data
    logger.debug("Updating trend statistics");
    try {
        insertRiverTagTrends(drops, dropIndex, riverDropChannelList);
    } catch (Exception e) {
        logger.error("An error occurred while inserting the trend data", e);
    }

}

From source file:shell.framework.organization.department.service.impl.TblSysDepartmentServiceI4JdbcImpl.java

public int unassignSysUser(final String departmentId, String[] sysUserIds) {
    String sql = "delete user_department from TBL_SYS_USER_DEPARTMENT user_department where user_department.USER_ID=? and user_department.DEPARTMENT_ID=?";
    final List<String> idList = new ArrayList<String>();
    for (String id : sysUserIds) {
        idList.add(id);//from  w  w  w .j  a  va  2 s.co  m
    }

    int[] deleteNumbers = jdbcBaseDao.batchUpdate(sql, idList, new BatchPreparedStatementSetter() {

        /*
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#setValues(java.sql.PreparedStatement, int)
         */
        public void setValues(PreparedStatement ps, int index) throws SQLException {
            String sysUser_id = idList.get(index);
            ps.setString(1, sysUser_id);
            ps.setString(2, departmentId);
        }

        /*
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#getBatchSize()
         */
        public int getBatchSize() {
            return idList.size();
        }
    });
    return deleteNumbers.length;
}

From source file:net.sf.infrared.collector.impl.persistence.ApplicationStatisticsDaoImpl.java

void saveExecutionTimes(final ApplicationStatistics stats) {
    final String appName = stats.getApplicationName();
    final String instanceId = stats.getInstanceId();

    final String[] layers = stats.getLayers();
    for (int i = 0; i < layers.length; i++) {
        final AggregateExecutionTime[] aets = stats.getExecutionsInLayer(layers[i]);
        final String layerName = layers[i];
        getJdbcTemplate().batchUpdate(SQL_INSERT_AGGREGATE_EXECUTION_TIME, new BatchPreparedStatementSetter() {
            public int getBatchSize() {
                return aets.length;
            }//from www.j  a  v a2 s  .c om

            public void setValues(PreparedStatement ps, int j) throws SQLException {
                String name = aets[j].getContext().getName();

                ps.setString(1, appName);
                ps.setString(2, instanceId);
                ps.setString(3, aets[j].getContext().getLayer());
                ps.setString(4, aets[j].getContext().getClass().getName());

                // since sql query strigs can be very large and hence those shall be stored
                // after trimming.
                if (name != null && name.length() > MAX_LENGTH_OF_NAME) {
                    name = name.substring(0, MAX_LENGTH_OF_NAME - 1);
                    log.debug("The name element : " + name + " of the " + aets[j].getContext()
                            + " context exceeded its maximum allotted length of " + MAX_LENGTH_OF_NAME
                            + " and shall be trimmed down to the permitted size before persisting to database.");
                }
                ps.setString(5, name);

                ps.setLong(6, aets[j].getExecutionCount());
                ps.setLong(7, aets[j].getTotalInclusiveTime());
                ps.setLong(8, aets[j].getMaxInclusiveTime());
                ps.setLong(9, aets[j].getMinInclusiveTime());
                ps.setLong(10, aets[j].getTotalExclusiveTime());
                ps.setLong(11, aets[j].getMaxExclusiveTime());
                ps.setLong(12, aets[j].getMinExclusiveTime());
                ps.setLong(13, aets[j].getTimeOfFirstExecution());
                ps.setLong(14, aets[j].getTimeOfLastExecution());
                ps.setLong(15, aets[j].getInclusiveFirstExecutionTime());
                ps.setLong(16, aets[j].getInclusiveLastExecutionTime());
                ps.setLong(17, aets[j].getExclusiveFirstExecutionTime());
                ps.setLong(18, aets[j].getExclusiveLastExecutionTime());
                ps.setString(19, aets[j].getLayerName());
                ps.setTimestamp(20, new Timestamp(System.currentTimeMillis()));
            }
        });
        if (log.isDebugEnabled()) {
            log.debug("Scheduled batch update for saving " + aets.length + " executions times in layer "
                    + layerName + " to DB; stats=" + stats);
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("Saved all execution times of " + layers.length + " layers in " + stats + " to DB");
    }
}

From source file:shell.framework.organization.user.service.impl.TblSysUserService4JdbcImpl.java

public int assignSysRole(TblSysUserVO userVO) {
    String sql = "insert into TBL_SYS_USER_ROLE values (?,?)";
    final String userID = userVO.getId();
    String[] roleIDs = userVO.getRole().getId().split("-");
    final List<String> roleIDList = Arrays.asList(roleIDs);

    int[] deleteNumbers = jdbcBaseDao.batchUpdate(sql, roleIDList, new BatchPreparedStatementSetter() {
        /*/*from  w  ww.  j  av a2  s.  c  o m*/
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#setValues(java.sql.PreparedStatement, int)
         */
        public void setValues(PreparedStatement ps, int index) throws SQLException {
            ps.setString(1, userID);
            ps.setString(2, roleIDList.get(index));
        }

        /*
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#getBatchSize()
         */
        public int getBatchSize() {
            return roleIDList.size();
        }
    });
    return deleteNumbers.length;
}

From source file:shell.framework.organization.department.service.impl.TblSysDepartmentServiceI4JdbcImpl.java

public int unassignPosition(final String departmentId, String[] positionIds) {
    String sql = "delete from TBL_SYS_DEPARTMENT_POSITION department_position where department_position.DEPARTMENT_ID=? and department_position.POSITION_ID=?";
    final List<String> idList = new ArrayList<String>();
    for (String id : positionIds) {
        idList.add(id);/*from  ww  w  .j av a 2s  . co m*/
    }

    int[] deleteNumbers = jdbcBaseDao.batchUpdate(sql, idList, new BatchPreparedStatementSetter() {

        /*
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#setValues(java.sql.PreparedStatement, int)
         */
        public void setValues(PreparedStatement ps, int index) throws SQLException {
            String position_id = idList.get(index);
            ps.setString(1, departmentId);
            ps.setString(2, position_id);
        }

        /*
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#getBatchSize()
         */
        public int getBatchSize() {
            return idList.size();
        }
    });
    return deleteNumbers.length;
}

From source file:shell.framework.organization.user.service.impl.TblSysUserService4JdbcImpl.java

public int unAssignSysRole(TblSysUserVO userVO) {
    String sql = "delete from TBL_SYS_USER_ROLE where USER_ID=? and ROLE_ID=?";
    final String userID = userVO.getId();
    String sysRoleIds[] = userVO.getRole().getId().split("-");
    final List<String> idList = Arrays.asList(sysRoleIds);

    int[] deleteNumbers = jdbcBaseDao.batchUpdate(sql, idList, new BatchPreparedStatementSetter() {

        /*//  w w  w . ja v a 2  s  .co m
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#setValues(java.sql.PreparedStatement, int)
         */
        public void setValues(PreparedStatement ps, int index) throws SQLException {
            String sysRole_id = idList.get(index);
            ps.setString(1, userID);
            ps.setString(2, sysRole_id);
        }

        /*
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#getBatchSize()
         */
        public int getBatchSize() {
            return idList.size();
        }
    });
    return deleteNumbers.length;
}

From source file:shell.framework.organization.department.service.impl.TblSysDepartmentServiceI4JdbcImpl.java

public int unAssignSysRole(TblSysDepartmentVO departmentVO) {
    String sql = "delete from TBL_SYS_ROLE_DEPARTMENT where ROLE_ID=? and DEPARTMENT_ID=?";
    final String departmentID = departmentVO.getId();
    String sysRoleIds[] = departmentVO.getRole().getRole().getId().split("-");
    final List<String> idList = Arrays.asList(sysRoleIds);

    int[] deleteNumbers = jdbcBaseDao.batchUpdate(sql, idList, new BatchPreparedStatementSetter() {

        /*/*from ww w  .  j a v  a2s .c om*/
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#setValues(java.sql.PreparedStatement, int)
         */
        public void setValues(PreparedStatement ps, int index) throws SQLException {
            String sysRole_id = idList.get(index);
            ps.setString(1, sysRole_id);
            ps.setString(2, departmentID);
        }

        /*
         * (non-Javadoc)
         * @see org.springframework.jdbc.core.BatchPreparedStatementSetter#getBatchSize()
         */
        public int getBatchSize() {
            return idList.size();
        }
    });
    return deleteNumbers.length;
}

From source file:net.sf.infrared.collector.impl.persistence.ApplicationStatisticsDaoImpl.java

void saveLayerTimes(final ApplicationStatistics stats) {
    final String appName = stats.getApplicationName();
    final String instanceId = stats.getInstanceId();

    final String[] layers = stats.getLayers();

    getJdbcTemplate().batchUpdate(SQL_INSERT_LAYER_TIME, new BatchPreparedStatementSetter() {
        public int getBatchSize() {
            return layers.length;
        }//from  w  w w.ja  va 2 s. com

        public void setValues(PreparedStatement ps, int i) throws SQLException {
            ps.setString(1, appName);
            ps.setString(2, instanceId);
            ps.setString(3, layers[i]);
            ps.setLong(4, stats.getTimeInLayer(layers[i]));
            ps.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
        }
    });
    if (log.isDebugEnabled()) {
        log.debug("Saved " + layers.length + " layer times in " + stats + " to DB");
    }
}