Example usage for org.springframework.jdbc.core BatchPreparedStatementSetter BatchPreparedStatementSetter

List of usage examples for org.springframework.jdbc.core BatchPreparedStatementSetter BatchPreparedStatementSetter

Introduction

In this page you can find the example usage for org.springframework.jdbc.core BatchPreparedStatementSetter BatchPreparedStatementSetter.

Prototype

BatchPreparedStatementSetter

Source Link

Usage

From source file:com.hygenics.parser.getDAOTemplate.java

private BatchPreparedStatementSetter jsonBatchSetter(final ArrayList<String> jsondata,
        final ArrayList<String> keys) {
    return (new BatchPreparedStatementSetter() {

        @Override//  www. ja va2s  .  co m
        public int getBatchSize() {
            return jsondata.size();
        }

        @Override
        public void setValues(PreparedStatement ps, int i) throws SQLException {
            Map<String, Json> jmap = Json.read(jsondata.get(i)).asJsonMap();
            int j = 0;

            for (String k : keys) {
                if (k.toLowerCase().compareTo("table") != 0 && !k.toLowerCase().contains("narrow")) {
                    j++;
                    ps.setString(j, StringEscapeUtils.unescapeJson(jmap.get(k).asString()));
                }
            }
        }

    });

}

From source file:com.hp.avmon.config.service.AgentManageService.java

/**
 * ?amp?/* w ww.  j a va2s  . co m*/
 * @param request
 * @return
 */
public Map saveNormalAmpAttr(HttpServletRequest request) {
    Map<String, Object> map = new HashMap<String, Object>();
    final String agentId = request.getParameter("agentId");
    final String ampInstId = request.getParameter("ampInstId");
    String ampAttrJson = request.getParameter("ampAttr");
    Map<String, String> jsonMap = JackJson.fromJsonToObject(ampAttrJson, Map.class);
    List<String> nameList = new LinkedList<String>();
    List<String> valueLis = new LinkedList<String>();
    for (Map.Entry<String, String> tempMap : jsonMap.entrySet()) {
        nameList.add(tempMap.getKey());
        valueLis.add(tempMap.getValue().equals("") ? "" : tempMap.getValue());
    }
    //?
    String delSql = "DELETE FROM TD_AVMON_AMP_INST_ATTR WHERE AMP_INST_ID = ? AND AGENT_ID = ?";
    Object[] delParams = new Object[] { ampInstId, agentId };
    int delCout = jdbcTemplate.update(delSql, delParams);

    //??
    String newAttrSql = "INSERT INTO TD_AVMON_AMP_INST_ATTR(AMP_INST_ID,AGENT_ID,NAME,VALUE)VALUES(?,?,?,?)";
    final List<String> nameListF = nameList;
    final List<String> valueLisF = valueLis;
    BatchPreparedStatementSetter batchPreparedStatementSetter = new BatchPreparedStatementSetter() {
        @Override
        public void setValues(PreparedStatement preparedStatement, int i) throws SQLException {
            preparedStatement.setString(1, ampInstId);
            preparedStatement.setString(2, agentId);
            preparedStatement.setString(3, nameListF.get(i));
            preparedStatement.setString(4, valueLisF.get(i));
        }

        @Override
        public int getBatchSize() {
            return nameListF.size(); //To change body of implemented methods use File | Settings | File Templates.
        }
    };
    int[] newCount = jdbcTemplate.batchUpdate(newAttrSql, batchPreparedStatementSetter);

    logger.debug("del count: " + delCout);
    logger.debug("insert count:" + newCount.length);
    map.put("success", true);
    return map;
}

From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaDropDao.java

/**
 * Populates the river_tag_trends table// www . j a va  2 s. co m
 * 
 * @param drops
 * @param dropIndex
 * @param riverDropChannelList
 * @throws Exception
 */
private void insertRiverTagTrends(List<Drop> drops, Map<Long, Integer> dropIndex,
        List<Map<String, Long>> riverDropChannelList) throws Exception {

    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd H:00:00");
    Map<String, RiverTagTrend> trendsData = new HashMap<String, RiverTagTrend>();

    for (Map<String, Long> entry : riverDropChannelList) {
        Long dropletId = entry.get("dropletId");
        Long riverId = entry.get("riverId");

        River river = new River();
        river.setId(riverId);

        Drop drop = drops.get(dropIndex.get(dropletId));
        String datePublishedStr = dateFormat.format(drop.getDatePublished());
        Date datePublished = dateFormat.parse(datePublishedStr);

        // Tags
        if (drop.getTags() != null) {
            for (Tag tag : drop.getTags()) {
                String hash = MD5Util.md5Hex(riverId.toString(), datePublishedStr, tag.getTag(), tag.getType());

                RiverTagTrend tagTrend = trendsData.remove(hash);
                if (tagTrend == null) {
                    tagTrend = new RiverTagTrend();
                    tagTrend.setRiver(river);
                    tagTrend.setDatePublished(datePublished);
                    tagTrend.setTag(tag.getTag());
                    tagTrend.setTagType(tag.getType());
                    tagTrend.setHash(hash);
                    tagTrend.setCount(1L);
                } else {
                    Long count = new Long(tagTrend.getCount() + 1L);
                    tagTrend.setCount(count);
                }

                trendsData.put(hash, tagTrend);
            }
        }

        // Places
        if (drop.getPlaces() != null) {
            for (Place place : drop.getPlaces()) {
                String hash = MD5Util.md5Hex(riverId.toString(), datePublishedStr, place.getPlaceName(),
                        "place");

                RiverTagTrend tagTrend = trendsData.remove(hash);
                if (tagTrend == null) {
                    tagTrend = new RiverTagTrend();
                    tagTrend.setRiver(river);
                    tagTrend.setDatePublished(datePublished);
                    tagTrend.setTag(place.getPlaceName());
                    tagTrend.setTagType("place");
                    tagTrend.setHash(hash);
                    tagTrend.setCount(1L);
                } else {
                    Long count = new Long(tagTrend.getCount() + 1L);
                    tagTrend.setCount(count);
                }

                trendsData.put(hash, tagTrend);
            }
        }
    }

    if (trendsData.keySet().isEmpty())
        return;

    // Check for existing trends
    String sql = "SELECT `id`, `hash` FROM `river_tag_trends` WHERE `hash` IN (:hashes)";
    MapSqlParameterSource params = new MapSqlParameterSource();
    params.addValue("hashes", trendsData.keySet());

    // List of trend IDs whose count is to be updated
    final List<long[]> trendCountUpdate = new ArrayList<long[]>();
    for (Map<String, Object> row : namedJdbcTemplate.queryForList(sql, params)) {
        String hash = (String) row.get("hash");
        long trendId = ((Number) row.get("id")).longValue();

        RiverTagTrend tagTrend = trendsData.remove(hash);

        long[] counters = { trendId, tagTrend.getCount() };
        trendCountUpdate.add(counters);
    }

    // Update existing counters
    if (!trendCountUpdate.isEmpty()) {
        sql = "UPDATE `river_tag_trends` SET `count` = `count` + ? WHERE `id` = ?";

        jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() {

            public void setValues(PreparedStatement ps, int i) throws SQLException {
                long[] updateIndex = trendCountUpdate.get(i);
                ps.setLong(1, updateIndex[1]);
                ps.setLong(2, updateIndex[0]);
            }

            public int getBatchSize() {
                return trendCountUpdate.size();
            }
        });
    }

    if (trendsData.isEmpty()) {
        return;
    }

    Sequence sequence = sequenceDao.findById("river_tag_trends");
    final long startKey = sequenceDao.getIds(sequence, trendsData.size());

    // SQL to update the river_tag_trends table
    sql = "INSERT INTO river_tag_trends(`id`, `hash`, `river_id`, `date_pub`, `tag`, "
            + "`tag_type`, `count`) VALUES(?, ?, ?, ?, ?, ?, ?)";

    final List<Entry<String, RiverTagTrend>> tagTrendsList = new ArrayList<Entry<String, RiverTagTrend>>();
    tagTrendsList.addAll(trendsData.entrySet());

    jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() {
        public void setValues(PreparedStatement ps, int i) throws SQLException {
            long id = startKey + i;

            Entry<String, RiverTagTrend> entry = tagTrendsList.get(i);
            RiverTagTrend tagTrend = entry.getValue();

            ps.setLong(1, id);
            ps.setString(2, entry.getKey());
            ps.setLong(3, tagTrend.getRiver().getId());
            ps.setTimestamp(4, new java.sql.Timestamp(tagTrend.getDatePublished().getTime()));
            ps.setString(5, tagTrend.getTag());
            ps.setString(6, tagTrend.getTagType());
            ps.setLong(7, tagTrend.getCount());
        }

        public int getBatchSize() {
            return tagTrendsList.size();
        }
    });
}

From source file:gov.nih.nci.cabig.caaers.datamigrator.UserDataMigrator.java

/**
 * This method inserts appropriate records into site_rs_staff_roles table based on existing role_code.
 * @param map/*from   ww w.j av a  2  s.  c  o m*/
 * @param groups
 */
@SuppressWarnings("unchecked")
protected void insertIntoSiteResearchStaffRoles(final Map map, final List groups, final boolean onOracleDB) {
    String sql = getInsertSiteResearchStaffRoleSql(onOracleDB);
    BatchPreparedStatementSetter setter = null;
    setter = new BatchPreparedStatementSetter() {

        public int getBatchSize() {
            return groups.size();
        }

        public void setValues(PreparedStatement ps, int index) throws SQLException {

            java.sql.Timestamp startDate = (java.sql.Timestamp) map.get("start_date");
            java.sql.Timestamp endDate = (java.sql.Timestamp) map.get("end_date");

            ps.setString(1, groups.get(index).toString());

            if (onOracleDB) {
                BigDecimal siteResearchStaffId = (BigDecimal) map.get("site_research_staffs_id");
                ps.setBigDecimal(2, siteResearchStaffId);
            } else {
                int siteResearchStaffId = ((Integer) map.get("site_research_staffs_id")).intValue();
                ps.setInt(2, siteResearchStaffId);
            }
            ps.setTimestamp(3, startDate);
            ps.setTimestamp(4, endDate);
        }
    };
    getJdbcTemplate().batchUpdate(sql, setter);
}

From source file:architecture.common.spring.jdbc.core.ExtendedJdbcTemplate.java

public int[] batchUpdate(String sql, final List<ParameterMapping> parameterMappings,
        final List<Map<String, Object>> parameters) {
    return batchUpdate(sql, new BatchPreparedStatementSetter() {
        public void setValues(PreparedStatement ps, int i) throws SQLException {
            Map<String, Object> row = parameters.get(i);
            for (ParameterMapping mapping : parameterMappings) {
                JdbcType jdbcType = mapping.getJdbcType();
                Object value = row.get(mapping.getProperty());
                Object valueToUse = value;

                if (valueToUse == null && mapping.getJavaType() == Date.class) {
                    valueToUse = new Date();
                }//ww  w. ja v a 2 s  . co m

                if (valueToUse instanceof Date && jdbcType == JdbcType.VARCHAR) {
                    valueToUse = DateFormatUtils.format((Date) valueToUse, mapping.getPattern());
                }

                if (valueToUse instanceof String && jdbcType == JdbcType.VARCHAR) {
                    String stringValue = (String) valueToUse;
                    if (!StringUtils.isEmpty(mapping.getEncoding())) {
                        if (!StringUtils.isEmpty(stringValue)) {
                            String[] encoding = StringUtils.split(mapping.getEncoding(), ">");
                            try {
                                if (encoding.length == 2)
                                    valueToUse = new String(stringValue.getBytes(encoding[0]), encoding[1]);
                                else if (encoding.length == 1)
                                    valueToUse = new String(stringValue.getBytes(), encoding[0]);

                            } catch (UnsupportedEncodingException e) {
                                logger.error(e);
                            }
                        }
                    }
                }
                if (valueToUse == null)
                    ps.setNull(mapping.getIndex(), jdbcType.TYPE_CODE);
                else
                    ps.setObject(mapping.getIndex(), valueToUse, jdbcType.TYPE_CODE);
            }
        }

        public int getBatchSize() {
            return parameters.size();
        }
    });
}

From source file:architecture.user.dao.impl.ExternalJdbcUserDao.java

public void switchCompanies(long companyId, Set<Long> users) {

    final List<Long> userIdsToUse = Lists.newArrayListWithExpectedSize(users.size());
    for (Long userId : users) {
        userIdsToUse.add(userId);/*from  w w  w  .j  a  v a 2  s  .c om*/
    }

    final Long companyIdToUse = companyId;
    getExtendedJdbcTemplate().batchUpdate(getSql("UPDATE_USER_COMPANY"), new BatchPreparedStatementSetter() {
        public void setValues(PreparedStatement ps, int i) throws SQLException {
            ps.setLong(1, companyIdToUse);
            ps.setLong(2, userIdsToUse.get(i));
        }

        public int getBatchSize() {
            return userIdsToUse.size();
        }
    });
}

From source file:com.hygenics.parser.getDAOTemplate.java

private BatchPreparedStatementSetter getJsonwithTableSetter(final ArrayList<Map<String, Json>> json,
        final ArrayList<String> keys) {
    return (new BatchPreparedStatementSetter() {

        @Override//from   w  w  w.  j a v a 2 s.co  m
        public int getBatchSize() {
            return json.size();
        }

        @Override
        public void setValues(PreparedStatement ps, int i) throws SQLException {
            Map<String, Json> jmap = json.get(i);
            int k = 1;

            for (String key : keys) {
                if (key.compareTo("table") != 0) {
                    ps.setString(k, jmap.get(key).asString().trim());
                    k++;
                }
            }

        }

    });
}

From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaDropDao.java

/**
 * Populates the buckets_droplets table//w  ww .j  a  v a 2s  . com
 * 
 * @param drops
 */
private void insertBucketDrops(final List<Drop> drops) {
    // Stores the drop id against the destination bucket ids
    Map<Long, Set<Long>> dropBucketsMap = new HashMap<Long, Set<Long>>();

    // Stores the drop id against its index in the drops list
    final Map<Long, Integer> dropsIndex = new HashMap<Long, Integer>();
    int i = 0;
    for (Drop drop : drops) {
        if (drop.getBucketIds() == null)
            continue;

        Set<Long> bucketSet = new HashSet<Long>();
        bucketSet.addAll(drop.getBucketIds());
        dropBucketsMap.put(drop.getId(), bucketSet);
        dropsIndex.put(drop.getId(), i);
        i++;
    }

    if (dropsIndex.isEmpty())
        return;

    // Exclude existing drops
    String existsSQL = "SELECT `bucket_id`, `droplet_id` "
            + "FROM `buckets_droplets` WHERE `droplet_id` IN (:ids)";

    MapSqlParameterSource params = new MapSqlParameterSource();
    params.addValue("ids", dropsIndex.keySet());

    for (Map<String, Object> row : namedJdbcTemplate.queryForList(existsSQL, params)) {
        Long dropId = ((Number) row.get("droplet_id")).longValue();
        Long bucketId = ((Number) row.get("bucket_id")).longValue();

        if (dropBucketsMap.containsKey(dropId)) {
            Set<Long> bucketIdSet = dropBucketsMap.get(dropId);
            bucketIdSet.remove(bucketId);
        }
    }

    // List of arrays comprised of the drop id and bucket id
    final List<Long[]> bucketDropList = new ArrayList<Long[]>();
    for (Map.Entry<Long, Set<Long>> entry : dropBucketsMap.entrySet()) {
        for (Long bucketId : entry.getValue()) {
            Long[] bucketDrop = { bucketId, entry.getKey() };
            bucketDropList.add(bucketDrop);
        }
    }

    if (bucketDropList.isEmpty())
        return;

    // Store for the no. of drops inserted per bucket
    final Map<Long, Integer> bucketDropCount = new HashMap<Long, Integer>();

    // Query for populating TABLE buckets_droplets
    String insertSQL = "INSERT INTO `buckets_droplets` (`bucket_id`, `droplet_id`, `droplet_date_added`) "
            + "VALUES (?, ?, ?)";

    jdbcTemplate.batchUpdate(insertSQL, new BatchPreparedStatementSetter() {
        public void setValues(PreparedStatement ps, int index) throws SQLException {
            Long[] bucketDrop = bucketDropList.get(index);
            Long bucketId = bucketDrop[0];

            ps.setLong(1, bucketId);
            ps.setLong(2, bucketDrop[1]);
            ps.setTimestamp(3, new java.sql.Timestamp(new Date().getTime()));

            Integer count = bucketDropCount.remove(bucketId);
            count = (count == null) ? 1 : new Integer(count.intValue() + 1);
            bucketDropCount.put(bucketId, count);
        }

        @Override
        public int getBatchSize() {
            return bucketDropList.size();
        }
    });

    // Update the drop count for the updated buckets
    final List<Entry<Long, Integer>> bucketDropCountList = new ArrayList<Map.Entry<Long, Integer>>();
    bucketDropCountList.addAll(bucketDropCount.entrySet());

    String updateSQL = "UPDATE `buckets` SET `drop_count` = `drop_count` + ? WHERE `id` = ?";
    jdbcTemplate.batchUpdate(updateSQL, new BatchPreparedStatementSetter() {

        public void setValues(PreparedStatement ps, int i) throws SQLException {
            Entry<Long, Integer> entry = bucketDropCountList.get(i);
            ps.setLong(1, entry.getValue());
            ps.setLong(2, entry.getKey());
        }

        public int getBatchSize() {
            return bucketDropCountList.size();
        }
    });

}

From source file:anyframe.core.query.impl.QueryServiceImpl.java

/**
 * Dynamic SQL?  Spring JdbcTemplate? batchUpdate   Insert,
 * Update, Delete Batch . ( ?? ? ?? ? ?? ? ,  SQL?
 * isDynamic? true? )//from  www  . ja v a  2  s.c  o  m
 * 
 * @param sql
 *            dynamic query statement.
 * @param targets
 *            object of class which is matched with specified table in XML
 *            files. is the List type of Object Array.
 * @return an array of the number of rows affected by each statement
 */
protected int[] batchDynamicExecutor(final String sql, final List targets) {
    // NamedParameterUtils   Bind Variables
    // ? Prepared Statement
    //  .
    final ParsedSql parsedSql = NamedParameterUtils.parseSqlStatement(sql);
    return jdbcTemplate.batchUpdate(parsedSql.getNewSql(), new BatchPreparedStatementSetter() {

        public int getBatchSize() {
            return targets.size();
        }

        // Spring JdbcTemplate? ? ?
        // callback method
        public void setValues(PreparedStatement ps, int index) throws SQLException {
            Map properties = new HashMap();
            properties.put("anyframe", targets.get(index));
            // NamedParameterUtils? 
            // inputMap?  dynamic SQL?
            //    Bind Variables? ? 
            //   .
            Object[] args = NamedParameterUtils.buildValueArray(parsedSql,
                    new ExtMapSqlParameterSource(properties));
            // Set the value for the parameter
            for (int i = 0; i < args.length; i++) {
                StatementCreatorUtils.setParameterValue(ps, i + 1, SqlTypeValue.TYPE_UNKNOWN, null, args[i]);
            }
        }
    });
}

From source file:gov.nih.nci.cabig.caaers.datamigrator.UserDataMigrator.java

/**
 * This method inserts appropriate records into study personnel table based on existing role_code.
 * @param map/* w w w  .  jav a  2s. c  o  m*/
 * @param groups
 */
@SuppressWarnings("unchecked")
protected void insertIntoStudyPersonnel(final Map map, final List groups, final boolean onOracleDB) {

    String sql = getInsertStudyPersonnelSql(onOracleDB);
    BatchPreparedStatementSetter setter = null;
    setter = new BatchPreparedStatementSetter() {

        public int getBatchSize() {
            return groups.size();
        }

        public void setValues(PreparedStatement ps, int index) throws SQLException {

            java.sql.Timestamp startDate = (java.sql.Timestamp) map.get("start_date");
            java.sql.Timestamp endDate = (java.sql.Timestamp) map.get("end_date");

            if (onOracleDB) {
                BigDecimal studySiteId = (BigDecimal) map.get("study_sites_id");
                ps.setBigDecimal(1, studySiteId);
            } else {
                int studySiteId = ((Integer) map.get("study_sites_id")).intValue();
                ps.setInt(1, studySiteId);
            }
            ps.setString(2, groups.get(index).toString());
            if (onOracleDB) {
                BigDecimal retiredIndicator = (BigDecimal) map.get("retired_indicator");
                ps.setBigDecimal(3, retiredIndicator);
            } else {
                Boolean retiredIndicator = (Boolean) map.get("retired_indicator");
                ps.setBoolean(3, retiredIndicator);
            }

            ps.setTimestamp(4, startDate);
            ps.setTimestamp(5, endDate);

            if (onOracleDB) {
                BigDecimal siteResearchStaffId = (BigDecimal) map.get("site_research_staffs_id");
                ps.setBigDecimal(6, siteResearchStaffId);
            } else {
                int siteResearchStaffId = ((Integer) map.get("site_research_staffs_id")).intValue();
                ps.setInt(6, siteResearchStaffId);
            }

        }
    };
    getJdbcTemplate().batchUpdate(sql, setter);
}