List of usage examples for org.springframework.jdbc.core.namedparam MapSqlParameterSource MapSqlParameterSource
public MapSqlParameterSource()
From source file:ch.digitalfondue.npjt.QueryType.java
private static SqlParameterSource extractParameters(Method m, Object[] args, Collection<ParameterConverter> parameterConverters) { Annotation[][] parameterAnnotations = m.getParameterAnnotations(); if (parameterAnnotations == null || parameterAnnotations.length == 0) { return new EmptySqlParameterSource(); }//from w w w .j ava 2 s .com MapSqlParameterSource ps = new MapSqlParameterSource(); Class<?>[] parameterTypes = m.getParameterTypes(); for (int i = 0; i < args.length; i++) { String name = parameterName(parameterAnnotations[i]); if (name != null) { Object arg = args[i]; Class<?> parameterType = parameterTypes[i]; boolean hasAccepted = false; for (ParameterConverter parameterConverter : parameterConverters) { if (parameterConverter.accept(parameterType)) { hasAccepted = true; parameterConverter.processParameter(name, arg, parameterType, ps); break; } } if (!hasAccepted) { throw new IllegalStateException( "Was not able to find a ParameterConverter able to process object: " + arg + " with class " + parameterType); } } } return ps; }
From source file:com.joliciel.frenchTreebank.TreebankDaoImpl.java
public void savePhraseUnit(PhraseUnitInternal phraseUnit) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("punit_word_id", phraseUnit.getWordId() == 0 ? null : phraseUnit.getWordId()); paramSource.addValue("punit_phrase_id", phraseUnit.getPhraseId() == 0 ? null : phraseUnit.getPhraseId()); paramSource.addValue("punit_position", phraseUnit.getPositionInSentence()); paramSource.addValue("punit_lemma_id", phraseUnit.getLemmaId() == 0 ? null : phraseUnit.getLemmaId()); paramSource.addValue("punit_cat_id", phraseUnit.getCategoryId() == 0 ? null : phraseUnit.getCategoryId()); paramSource.addValue("punit_subcat_id", phraseUnit.getSubCategoryId() == 0 ? null : phraseUnit.getSubCategoryId()); paramSource.addValue("punit_morph_id", phraseUnit.getMorphologyId() == 0 ? null : phraseUnit.getMorphologyId()); paramSource.addValue("punit_compound", phraseUnit.isCompound()); paramSource.addValue("punit_pos_in_phrase", phraseUnit.getPositionInPhrase()); paramSource.addValue("punit_compound_next", phraseUnit.getNextCompoundPartId() == 0 ? null : phraseUnit.getNextCompoundPartId()); paramSource.addValue("punit_guessed_postag_id", phraseUnit.getGuessedPosTagId() == 0 ? null : phraseUnit.getGuessedPosTagId()); if (phraseUnit.isNew()) { String sql = "SELECT nextval('ftb_phrase_unit_punit_id_seq')"; LOG.info(sql);/* ww w .j av a 2 s . c om*/ int phraseUnitId = jt.queryForInt(sql, paramSource); paramSource.addValue("punit_id", phraseUnitId); sql = "INSERT INTO ftb_phrase_unit (punit_id, punit_word_id, punit_phrase_id, punit_position, punit_lemma_id, punit_cat_id" + ", punit_subcat_id, punit_morph_id, punit_compound, punit_pos_in_phrase, punit_compound_next, punit_guessed_postag_id) " + "VALUES (:punit_id, :punit_word_id, :punit_phrase_id, :punit_position, :punit_lemma_id, :punit_cat_id" + ", :punit_subcat_id, :punit_morph_id, :punit_compound, :punit_pos_in_phrase, :punit_compound_next, :punit_guessed_postag_id)"; LOG.info(sql); TreebankDaoImpl.LogParameters(paramSource); jt.update(sql, paramSource); phraseUnit.setId(phraseUnitId); } else { paramSource.addValue("punit_id", phraseUnit.getId()); String sql = "UPDATE ftb_phrase_unit" + " SET punit_word_id = :punit_word_id" + ", punit_phrase_id=:punit_phrase_id" + ", punit_position=:punit_position" + ", punit_lemma_id=:punit_lemma_id" + ", punit_cat_id=:punit_cat_id" + ", punit_subcat_id=:punit_subcat_id" + ", punit_morph_id=:punit_morph_id" + ", punit_compound=:punit_compound" + ", punit_pos_in_phrase=:punit_pos_in_phrase" + ", punit_compound_next=:punit_compound_next" + ", punit_guessed_postag_id=:punit_guessed_postag_id" + " WHERE punit_id=:punit_id"; LOG.info(sql); TreebankDaoImpl.LogParameters(paramSource); jt.update(sql, paramSource); } }
From source file:com.joliciel.lefff.LefffDaoImpl.java
public void saveCategory(CategoryInternal category) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("category_code", category.getCode()); paramSource.addValue("category_description", category.getDescription()); if (category.isNew()) { String sql = "SELECT nextval('seq_category_id')"; LOG.info(sql);//from ww w. java2s . c om int categoryId = jt.queryForInt(sql, paramSource); paramSource.addValue("category_id", categoryId); sql = "INSERT INTO lef_category (category_id, category_code, category_description) VALUES (:category_id, :category_code, :category_description)"; LOG.info(sql); LefffDaoImpl.LogParameters(paramSource); jt.update(sql, paramSource); category.setId(categoryId); } else { String sql = "UPDATE lef_category" + " SET category_code = :category_code" + ", category_description = :category_description" + " WHERE category_id = :category_id"; paramSource.addValue("category_id", category.getId()); LOG.info(sql); LefffDaoImpl.LogParameters(paramSource); jt.update(sql, paramSource); } }
From source file:com.joliciel.talismane.terminology.postgres.PostGresTerminologyBase.java
@Override public List<Term> getTermsByText(final String searchText) { MONITOR.startTask("getTermsByText"); try {//w w w . j a v a2s . co m NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT " + SELECT_TERM + " FROM term" + " INNER JOIN text ON term_text_id=text_id" + " WHERE text_text LIKE :term_text" + " AND term_project_id = :term_project_id" + " ORDER BY text_text"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("term_text", searchText + "%"); paramSource.addValue("term_project_id", this.getCurrentProjectId()); LOG.trace(sql); LogParameters(paramSource); @SuppressWarnings("unchecked") List<Term> terms = jt.query(sql, paramSource, new TermMapper()); return terms; } finally { MONITOR.endTask("getTermsByText"); } }
From source file:org.owasp.proxy.http.dao.JdbcMessageDAO.java
public int getMessageContentSize(int id) throws DataAccessException { try {/* w ww. j a v a 2 s . com*/ MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue(ID, id, Types.INTEGER); SimpleJdbcTemplate template = new SimpleJdbcTemplate(getNamedParameterJdbcTemplate()); return template.queryForInt(SELECT_CONTENT_SIZE, params); } catch (EmptyResultDataAccessException erdae) { return -1; } }
From source file:Implement.DAO.CommonDAOImpl.java
@Override public AdvancedSearchForm liveSearch(String searchText) { simpleJdbcCall = new SimpleJdbcCall(dataSource).withProcedureName("liveSearch"); simpleJdbcCall.returningResultSet("rs1", new RowMapper<String>() { @Override//from ww w .ja v a 2 s . c o m public String mapRow(ResultSet rs, int i) throws SQLException { return rs.getString("Keyword"); } }).returningResultSet("rs2", PopularPackageMapper.getInstance()).returningResultSet("rs3", LocationDTOMapper.getInstance()); SqlParameterSource in = new MapSqlParameterSource().addValue("searchText", searchText); Map<String, Object> record = simpleJdbcCall.execute(in); List<String> keywords = (List<String>) record.get("rs1"); List<LocationDTO> locations = (List<LocationDTO>) record.get("rs3"); List<PopularPackageDTO> popularPackages = (List<PopularPackageDTO>) record.get("rs2"); return new AdvancedSearchForm(keywords, locations, popularPackages); }
From source file:com.joliciel.jochre.graphics.GraphicsDaoJdbc.java
@Override public void loadOriginalImage(JochreImageInternal jochreImage) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT image_image FROM ocr_image WHERE image_id=:image_id"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("image_id", jochreImage.getId()); LOG.debug(sql);//from w ww . j a va2 s .c o m logParameters(paramSource); byte[] pixels = (byte[]) jt.query(sql, paramSource, new ResultSetExtractor() { @Override public Object extractData(ResultSet rs) throws SQLException, DataAccessException { if (rs.next()) { byte[] pixels = rs.getBytes("image_image"); return pixels; } else { return null; } } }); ByteArrayInputStream is = new ByteArrayInputStream(pixels); BufferedImage image; try { image = ImageIO.read(is); is.close(); } catch (IOException e) { throw new RuntimeException(e); } jochreImage.setOriginalImageDB(image); }
From source file:org.terasoluna.gfw.tutorial.selenium.DBLogAssertOperations.java
/** * ???(??)???????//from w ww.ja v a2 s . c om * @param stackTracePattern ?() */ public void assertContainsByRegexStackTrace(String stackTracePattern) { StringBuilder sql = new StringBuilder(); StringBuilder where = new StringBuilder(); sql.append("SELECT COUNT(e.*) FROM logging_event_exception e"); where.append(" WHERE e.TRACE_LINE REGEXP :stackTrace"); sql.append(where); MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("stackTrace", stackTracePattern); Long count = jdbcOperations.queryForObject(sql.toString(), params, Long.class); assertThat(count, is(1L)); }
From source file:com.ushahidi.swiftriver.core.api.service.DropIndexService.java
/** * Sets the <code>bucketIds</code> property for each {@link Drop} * in <code>drops</code>/*from w ww .j a v a 2s .c o m*/ * @param drops */ private void populateBucketIds(List<Drop> drops) { // Store the drop index Map<Long, Integer> dropIndex = new HashMap<Long, Integer>(); List<Long> dropIds = new ArrayList<Long>(); int index = 0; for (Drop drop : drops) { dropIds.add(drop.getId()); dropIndex.put(drop.getId(), index); index++; } String sql = "SELECT `droplet_id`, `bucket_id` " + "FROM `buckets_droplets` WHERE `droplet_id` IN (:dropIds)"; MapSqlParameterSource paramMap = new MapSqlParameterSource(); paramMap.addValue("dropIds", dropIds); for (Map<String, Object> row : namedJdbcTemplate.queryForList(sql, paramMap)) { Long dropId = ((Number) row.get("droplet_id")).longValue(); Long bucketId = ((Number) row.get("bucket_id")).longValue(); Drop drop = drops.get(dropIndex.get(dropId)); if (drop.getBucketIds() == null) { drop.setBucketIds(new ArrayList<Long>()); } drop.getBucketIds().add(bucketId); } }
From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaDropDao.java
/** * Populate the rivers_droplets table//w ww . java 2 s. co m * * @param drops */ private void insertRiverDrops(final List<Drop> drops) { // Get a lock on rivers_droplets Sequence seq = sequenceDao.findById("rivers_droplets"); // Mapping of drop id to list index position final Map<Long, Integer> dropIndex = new HashMap<Long, Integer>(); // List of rivers in a drop Map<Long, Set<Long>> dropRiversMap = new HashMap<Long, Set<Long>>(); Map<Long, Set<Long>> dropChannelsMap = new HashMap<Long, Set<Long>>(); // Registry for all channels and rivers Set<Long> allChannelIds = new HashSet<Long>(); int i = 0; for (Drop drop : drops) { if (drop.getRiverIds() == null || drop.getChannelIds() == null) { logger.debug("No rivers or channels for drop {}", drop.getId()); continue; } Set<Long> rivers = new HashSet<Long>(); Set<Long> channels = new HashSet<Long>(); rivers.addAll(drop.getRiverIds()); channels.addAll(drop.getChannelIds()); dropRiversMap.put(drop.getId(), rivers); dropChannelsMap.put(drop.getId(), channels); allChannelIds.addAll(channels); dropIndex.put(drop.getId(), i++); } // No rivers found, exit if (dropIndex.size() == 0) return; // Find already existing rivers_droplets String sql = "SELECT droplet_id, river_id FROM rivers_droplets WHERE droplet_id in (:ids)"; MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("ids", dropIndex.keySet()); List<Map<String, Object>> results = this.namedJdbcTemplate.queryForList(sql, params); logger.debug("Skipping {} entries from rivers_droplets", results.size()); // Remove existing rivers_droplets entries from our Set for (Map<String, Object> row : results) { Long dropletId = ((Number) row.get("droplet_id")).longValue(); Long riverId = ((Number) row.get("river_id")).longValue(); Set<Long> riverSet = dropRiversMap.remove(dropletId); if (riverSet != null) { riverSet.remove(riverId); // Only add back the destination rivers if the set is non empty if (!riverSet.isEmpty()) { dropRiversMap.put(dropletId, riverSet); } } } // If all drops are duplicates, return early if (dropRiversMap.isEmpty()) { logger.info("No drops to add to the rivers"); return; } // Associate the channels with active rivers sql = "SELECT rc.id, rc.river_id " + "FROM river_channels rc " + "INNER JOIN rivers r ON (rc.river_id = r.id) " + "WHERE rc.id IN (:channelIds) " + "AND r.river_active = 1"; MapSqlParameterSource channelParams = new MapSqlParameterSource(); channelParams.addValue("channelIds", allChannelIds); Map<Long, Long> riverChannelsMap = new HashMap<Long, Long>(); for (Map<String, Object> row : namedJdbcTemplate.queryForList(sql, channelParams)) { Long channelId = ((Number) row.get("id")).longValue(); Long riverId = ((Number) row.get("river_id")).longValue(); riverChannelsMap.put(channelId, riverId); } // Map to hold the association between a drop, river and channel // During the association, we verify that the river is in the drop's // destination river list final List<Map<String, Long>> riverDropChannelList = new ArrayList<Map<String, Long>>(); Set<RiverDropKey> riverDropKeySet = new HashSet<JpaDropDao.RiverDropKey>(); for (Long dropletId : dropChannelsMap.keySet()) { for (Long channelId : dropChannelsMap.get(dropletId)) { if (riverChannelsMap.containsKey(channelId)) { Long riverId = riverChannelsMap.get(channelId); // Does the river drop key already exist? RiverDropKey riverDropKey = new RiverDropKey(riverId, dropletId); if (riverDropKeySet.contains(riverDropKey)) continue; // Does not exist. Add to the in-memory registry riverDropKeySet.add(riverDropKey); if (dropRiversMap.containsKey(dropletId) && dropRiversMap.get(dropletId).contains(riverId)) { Map<String, Long> entry = new HashMap<String, Long>(); entry.put("dropletId", dropletId); entry.put("channelId", channelId); entry.put("riverId", riverId); riverDropChannelList.add(entry); } } } } logger.debug("Posting drops to rivers"); // Insert the remaining items in the set into the DB sql = "INSERT INTO `rivers_droplets` (`id`, `droplet_id`, `river_id`, " + "`river_channel_id`, `droplet_date_pub`) " + "VALUES (?, ?, ?, ?, ?)"; final long startKey = sequenceDao.getIds(seq, riverDropChannelList.size()); // Map to hold to hold the no. of drops created per channel final Map<Long, Long> channelDropCountMap = new HashMap<Long, Long>(); // A map to hold the new max_drop_id and drop_count per river final Map<Long, long[]> riverDropsMap = new HashMap<Long, long[]>(); jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { Map<String, Long> dropEntry = riverDropChannelList.get(i); long id = startKey + i; Long dropletId = dropEntry.get("dropletId"); Long riverId = dropEntry.get("riverId"); Long channelId = dropEntry.get("channelId"); Drop drop = drops.get(dropIndex.get(dropletId)); ps.setLong(1, id); ps.setLong(2, dropletId); ps.setLong(3, riverId); ps.setLong(4, channelId); ps.setTimestamp(5, new java.sql.Timestamp(drop.getDatePublished().getTime())); // Get updated max_drop_id and drop_count for the rivers table long[] update = riverDropsMap.get(riverId); if (update == null) { long[] u = { id, 1 }; riverDropsMap.put(riverId, u); } else { update[0] = Math.max(update[0], id); update[1] = update[1] + 1; } // Update the drop count for the channel Long channelDropCount = channelDropCountMap.remove(channelId); channelDropCount = (channelDropCount == null) ? 1L : Long.valueOf(channelDropCount.longValue() + 1); channelDropCountMap.put(channelId, channelDropCount); } public int getBatchSize() { return riverDropChannelList.size(); } }); logger.debug("Drops successfully posted to rivers"); // Update river max_drop_id and drop_count logger.debug("Updating river drop counters"); sql = "UPDATE rivers SET max_drop_id = ?, drop_count = drop_count + ? WHERE id = ?"; final List<Entry<Long, long[]>> riverUpdate = new ArrayList<Entry<Long, long[]>>(); riverUpdate.addAll(riverDropsMap.entrySet()); this.jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { Entry<Long, long[]> entry = riverUpdate.get(i); ps.setLong(1, entry.getValue()[0]); ps.setLong(2, entry.getValue()[1]); ps.setLong(3, entry.getKey()); } public int getBatchSize() { return riverUpdate.size(); } }); logger.debug("{} rivers successfully updated", riverUpdate.size()); // Update the drop_count in TABLE `river_channels` logger.debug("Updating river channel statistics"); sql = "UPDATE river_channels SET drop_count = drop_count + ? WHERE id = ?"; final List<Entry<Long, Long>> riverChannelUpdate = new ArrayList<Entry<Long, Long>>(); riverChannelUpdate.addAll(channelDropCountMap.entrySet()); this.jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { Entry<Long, Long> entry = riverChannelUpdate.get(i); ps.setLong(1, entry.getValue()); ps.setLong(2, entry.getKey()); } @Override public int getBatchSize() { return riverChannelUpdate.size(); } }); logger.debug("{} channels updated", riverChannelUpdate.size()); // Insert the trend data logger.debug("Updating trend statistics"); try { insertRiverTagTrends(drops, dropIndex, riverDropChannelList); } catch (Exception e) { logger.error("An error occurred while inserting the trend data", e); } }