List of usage examples for org.springframework.jdbc.core.namedparam MapSqlParameterSource addValue
public MapSqlParameterSource addValue(String paramName, @Nullable Object value)
From source file:com.joliciel.jochre.graphics.GraphicsDaoJdbc.java
@Override public void saveOriginalImage(JochreImage jochreImage) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("image_id", jochreImage.getId()); ByteArrayOutputStream os = new ByteArrayOutputStream(); try {//from ww w. ja va 2 s .c o m ImageIO.write(jochreImage.getOriginalImage(), "png", os); os.flush(); paramSource.addValue("image_image", os.toByteArray()); os.close(); } catch (IOException e) { throw new RuntimeException(e); } String sql = "UPDATE ocr_image SET image_image = :image_image" + " WHERE image_id = :image_id"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); }
From source file:com.joliciel.lefff.LefffDaoImpl.java
@Override public Map<String, List<LexicalEntry>> findEntryMap(List<String> categories) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT " + SELECT_ENTRY + "," + SELECT_WORD + "," + SELECT_LEMMA + "," + SELECT_CATEGORY + "," + SELECT_PREDICATE + "," + SELECT_ATTRIBUTE + " FROM lef_entry" + " INNER JOIN lef_word ON entry_word_id = word_id" + " INNER JOIN lef_lemma ON entry_lemma_id = lemma_id" + " INNER JOIN lef_category ON entry_category_id = category_id" + " INNER JOIN lef_predicate ON entry_predicate_id = predicate_id" + " INNER JOIN lef_attribute ON entry_morph_id = attribute_id" + " WHERE entry_status < 3"; if (categories != null && categories.size() > 0) { sql += " AND category_code in (:categoryCodes)"; }// w w w .j a v a 2 s . co m sql += " ORDER BY entry_status, entry_id"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); if (categories != null && categories.size() > 0) { paramSource.addValue("categoryCodes", categories); } LOG.info(sql); LefffDaoImpl.LogParameters(paramSource); double requiredCapacity = 500000; Map<String, List<LexicalEntry>> entryMap = new HashMap<String, List<LexicalEntry>>( ((int) Math.ceil(requiredCapacity / 0.75))); EntryMapper entryMapper = new EntryMapper(this.lefffServiceInternal); WordMapper wordMapper = new WordMapper(this.lefffServiceInternal); CategoryMapper categoryMapper = new CategoryMapper(this.lefffServiceInternal); LemmaMapper lemmaMapper = new LemmaMapper(this.lefffServiceInternal); PredicateMapper predicateMapper = new PredicateMapper(this.lefffServiceInternal); AttributeMapper attributeMapper = new AttributeMapper(this.lefffServiceInternal); SqlRowSet rowSet = jt.queryForRowSet(sql, paramSource); Map<Integer, Category> categoryMap = new HashMap<Integer, Category>(); Map<Integer, Predicate> predicateMap = new HashMap<Integer, Predicate>(); Map<Integer, Attribute> attributeMap = new HashMap<Integer, Attribute>(); Map<Integer, Lemma> lemmaMap = new HashMap<Integer, Lemma>(); while (rowSet.next()) { LefffEntryInternal entry = entryMapper.mapRow(rowSet); WordInternal word = wordMapper.mapRow(rowSet); entry.setWord(word); int categoryId = rowSet.getInt("category_id"); Category category = categoryMap.get(categoryId); if (category == null) { category = categoryMapper.mapRow(rowSet); categoryMap.put(categoryId, category); } entry.setCategory(category); int predicateId = rowSet.getInt("predicate_id"); Predicate predicate = predicateMap.get(predicateId); if (predicate == null) { predicate = predicateMapper.mapRow(rowSet); predicateMap.put(predicateId, predicate); } entry.setPredicate(predicate); int lemmaId = rowSet.getInt("lemma_id"); Lemma lemma = lemmaMap.get(lemmaId); if (lemma == null) { lemma = lemmaMapper.mapRow(rowSet); lemmaMap.put(lemmaId, lemma); } entry.setLemma(lemma); int attributeId = rowSet.getInt("attribute_id"); Attribute attribute = attributeMap.get(attributeId); if (attribute == null) { attribute = attributeMapper.mapRow(rowSet); attributeMap.put(attributeId, attribute); } entry.setMorphology(attribute); List<LexicalEntry> entries = entryMap.get(word.getText()); if (entries == null) { entries = new ArrayList<LexicalEntry>(); entryMap.put(word.getText(), entries); } entries.add(entry); } for (String word : entryMap.keySet()) { List<LexicalEntry> entries = entryMap.get(word); ArrayList<LexicalEntry> entriesArrayList = (ArrayList<LexicalEntry>) entries; entriesArrayList.trimToSize(); } return entryMap; }
From source file:com.joliciel.jochre.graphics.GraphicsDaoJdbc.java
@Override public void loadOriginalImage(JochreImageInternal jochreImage) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT image_image FROM ocr_image WHERE image_id=:image_id"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("image_id", jochreImage.getId()); LOG.debug(sql);//from w ww .j a va2 s .co m logParameters(paramSource); byte[] pixels = (byte[]) jt.query(sql, paramSource, new ResultSetExtractor() { @Override public Object extractData(ResultSet rs) throws SQLException, DataAccessException { if (rs.next()) { byte[] pixels = rs.getBytes("image_image"); return pixels; } else { return null; } } }); ByteArrayInputStream is = new ByteArrayInputStream(pixels); BufferedImage image; try { image = ImageIO.read(is); is.close(); } catch (IOException e) { throw new RuntimeException(e); } jochreImage.setOriginalImageDB(image); }
From source file:com.joliciel.lefff.LefffDaoImpl.java
public Word loadWord(String text) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); MapSqlParameterSource paramSource = new MapSqlParameterSource(); if (text == null) text = ""; String sql = "SELECT " + SELECT_WORD + " FROM lef_word WHERE word_text=:word_text"; paramSource.addValue("word_text", text); LOG.info(sql);/*from w w w. j a va 2s. co m*/ LefffDaoImpl.LogParameters(paramSource); Word word = null; try { word = (Word) jt.queryForObject(sql, paramSource, new WordMapper(this.lefffServiceInternal)); } catch (EmptyResultDataAccessException ex) { ex.hashCode(); } return word; }
From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaDropDao.java
/** * Populates the river_tag_trends table//from ww w .ja v a 2 s . c om * * @param drops * @param dropIndex * @param riverDropChannelList * @throws Exception */ private void insertRiverTagTrends(List<Drop> drops, Map<Long, Integer> dropIndex, List<Map<String, Long>> riverDropChannelList) throws Exception { DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd H:00:00"); Map<String, RiverTagTrend> trendsData = new HashMap<String, RiverTagTrend>(); for (Map<String, Long> entry : riverDropChannelList) { Long dropletId = entry.get("dropletId"); Long riverId = entry.get("riverId"); River river = new River(); river.setId(riverId); Drop drop = drops.get(dropIndex.get(dropletId)); String datePublishedStr = dateFormat.format(drop.getDatePublished()); Date datePublished = dateFormat.parse(datePublishedStr); // Tags if (drop.getTags() != null) { for (Tag tag : drop.getTags()) { String hash = MD5Util.md5Hex(riverId.toString(), datePublishedStr, tag.getTag(), tag.getType()); RiverTagTrend tagTrend = trendsData.remove(hash); if (tagTrend == null) { tagTrend = new RiverTagTrend(); tagTrend.setRiver(river); tagTrend.setDatePublished(datePublished); tagTrend.setTag(tag.getTag()); tagTrend.setTagType(tag.getType()); tagTrend.setHash(hash); tagTrend.setCount(1L); } else { Long count = new Long(tagTrend.getCount() + 1L); tagTrend.setCount(count); } trendsData.put(hash, tagTrend); } } // Places if (drop.getPlaces() != null) { for (Place place : drop.getPlaces()) { String hash = MD5Util.md5Hex(riverId.toString(), datePublishedStr, place.getPlaceName(), "place"); RiverTagTrend tagTrend = trendsData.remove(hash); if (tagTrend == null) { tagTrend = new RiverTagTrend(); tagTrend.setRiver(river); tagTrend.setDatePublished(datePublished); tagTrend.setTag(place.getPlaceName()); tagTrend.setTagType("place"); tagTrend.setHash(hash); tagTrend.setCount(1L); } else { Long count = new Long(tagTrend.getCount() + 1L); tagTrend.setCount(count); } trendsData.put(hash, tagTrend); } } } if (trendsData.keySet().isEmpty()) return; // Check for existing trends String sql = "SELECT `id`, `hash` FROM `river_tag_trends` WHERE `hash` IN (:hashes)"; MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("hashes", trendsData.keySet()); // List of trend IDs whose count is to be updated final List<long[]> trendCountUpdate = new ArrayList<long[]>(); for (Map<String, Object> row : namedJdbcTemplate.queryForList(sql, params)) { String hash = (String) row.get("hash"); long trendId = ((Number) row.get("id")).longValue(); RiverTagTrend tagTrend = trendsData.remove(hash); long[] counters = { trendId, tagTrend.getCount() }; trendCountUpdate.add(counters); } // Update existing counters if (!trendCountUpdate.isEmpty()) { sql = "UPDATE `river_tag_trends` SET `count` = `count` + ? WHERE `id` = ?"; jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { long[] updateIndex = trendCountUpdate.get(i); ps.setLong(1, updateIndex[1]); ps.setLong(2, updateIndex[0]); } public int getBatchSize() { return trendCountUpdate.size(); } }); } if (trendsData.isEmpty()) { return; } Sequence sequence = sequenceDao.findById("river_tag_trends"); final long startKey = sequenceDao.getIds(sequence, trendsData.size()); // SQL to update the river_tag_trends table sql = "INSERT INTO river_tag_trends(`id`, `hash`, `river_id`, `date_pub`, `tag`, " + "`tag_type`, `count`) VALUES(?, ?, ?, ?, ?, ?, ?)"; final List<Entry<String, RiverTagTrend>> tagTrendsList = new ArrayList<Entry<String, RiverTagTrend>>(); tagTrendsList.addAll(trendsData.entrySet()); jdbcTemplate.batchUpdate(sql, new BatchPreparedStatementSetter() { public void setValues(PreparedStatement ps, int i) throws SQLException { long id = startKey + i; Entry<String, RiverTagTrend> entry = tagTrendsList.get(i); RiverTagTrend tagTrend = entry.getValue(); ps.setLong(1, id); ps.setString(2, entry.getKey()); ps.setLong(3, tagTrend.getRiver().getId()); ps.setTimestamp(4, new java.sql.Timestamp(tagTrend.getDatePublished().getTime())); ps.setString(5, tagTrend.getTag()); ps.setString(6, tagTrend.getTagType()); ps.setLong(7, tagTrend.getCount()); } public int getBatchSize() { return tagTrendsList.size(); } }); }
From source file:com.joliciel.jochre.graphics.GraphicsDaoJdbc.java
@Override public void deleteJochreImage(JochreImage image) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("image_id", image.getId()); String sql = null;/*from w ww . ja v a 2 s. c om*/ sql = "delete from ocr_split where split_shape_id in (" + " select shape_id from ocr_shape" + " inner join ocr_group on shape_group_id = group_id" + " inner join ocr_row on group_row_id = row_id" + " inner join ocr_paragraph on row_paragraph_id = paragraph_id" + " WHERE paragraph_image_id = :image_id)"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); sql = "delete from ocr_shape where shape_group_id in (" + " select group_id from ocr_group" + " inner join ocr_row on group_row_id = row_id" + " inner join ocr_paragraph on row_paragraph_id = paragraph_id" + " WHERE paragraph_image_id = :image_id)"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); sql = "delete from ocr_group where group_row_id in (" + " select row_id from ocr_row" + " inner join ocr_paragraph on row_paragraph_id = paragraph_id" + " WHERE paragraph_image_id = :image_id)"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); sql = "delete from ocr_row where row_paragraph_id in (" + " select paragraph_id from ocr_paragraph" + " WHERE paragraph_image_id = :image_id)"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); sql = "delete from ocr_paragraph" + " where paragraph_image_id = :image_id"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); sql = "delete from ocr_image" + " WHERE image_id = :image_id"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); }
From source file:com.joliciel.jochre.graphics.GraphicsDaoJdbc.java
@Override public List<Shape> findShapesToSplit(Locale locale) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT " + SELECT_SHAPE + ", count(split_id) as the_count FROM ocr_shape" + " LEFT JOIN ocr_split on shape_id = split_shape_id" + " LEFT JOIN ocr_group ON shape_group_id = group_id" + " LEFT JOIN ocr_row ON group_row_id = row_id" + " LEFT JOIN ocr_paragraph ON row_paragraph_id = paragraph_id" + " LEFT JOIN ocr_image ON paragraph_image_id = image_id" + " WHERE length(shape_letter)>1" + " AND shape_letter not like '%|'" + " AND shape_letter not like '|%'" + " AND shape_letter not in (:dual_character_letters)" + " AND image_imgstatus_id in (:image_imgstatus_id)" + " GROUP BY " + SELECT_SHAPE + " ORDER BY the_count, shape_letter, shape_id"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); Linguistics linguistics = Linguistics.getInstance(locale); paramSource.addValue("dual_character_letters", linguistics.getDualCharacterLetters()); List<Integer> imageStatusList = new ArrayList<Integer>(); imageStatusList.add(ImageStatus.TRAINING_VALIDATED.getId()); imageStatusList.add(ImageStatus.TRAINING_HELD_OUT.getId()); imageStatusList.add(ImageStatus.TRAINING_TEST.getId()); paramSource.addValue("image_imgstatus_id", imageStatusList); LOG.debug(sql);// ww w . j a v a2s .c o m logParameters(paramSource); @SuppressWarnings("unchecked") List<Shape> shapes = jt.query(sql, paramSource, new ShapeMapper(this.getGraphicsServiceInternal())); return shapes; }
From source file:com.joliciel.jochre.graphics.GraphicsDaoJdbc.java
@Override public void saveParagraph(Paragraph paragraph) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); MapSqlParameterSource paramSource = new MapSqlParameterSource(); ParagraphInternal iParagraph = (ParagraphInternal) paragraph; paramSource.addValue("paragraph_image_id", paragraph.getImageId()); paramSource.addValue("paragraph_index", paragraph.getIndex()); String sql = null;// www . j a v a 2 s . c o m if (paragraph.isNew()) { sql = "SELECT nextval('ocr_paragraph_id_seq')"; LOG.debug(sql); int paragraphId = jt.queryForInt(sql, paramSource); paramSource.addValue("paragraph_id", paragraphId); sql = "INSERT INTO ocr_paragraph (paragraph_id, paragraph_image_id, paragraph_index) " + "VALUES (:paragraph_id, :paragraph_image_id, :paragraph_index)"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); iParagraph.setId(paragraphId); } else { paramSource.addValue("paragraph_id", paragraph.getId()); sql = "UPDATE ocr_paragraph" + " SET paragraph_image_id = :paragraph_image_id" + ", paragraph_index = :paragraph_index" + " WHERE paragraph_id = :paragraph_id"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); } }
From source file:com.joliciel.jochre.graphics.GraphicsDaoJdbc.java
@Override public void saveGroupOfShapes(GroupOfShapes group) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); MapSqlParameterSource paramSource = new MapSqlParameterSource(); GroupOfShapesInternal iGroup = (GroupOfShapesInternal) group; paramSource.addValue("group_row_id", group.getRowId()); paramSource.addValue("group_index", group.getIndex()); paramSource.addValue("group_hard_hyphen", group.isHardHyphen()); paramSource.addValue("group_broken_word", group.isBrokenWord()); paramSource.addValue("group_segment_problem", group.isSegmentationProblem()); paramSource.addValue("group_skip", group.isSkip()); String sql = null;/*from w w w . j a v a 2 s. com*/ if (group.isNew()) { sql = "SELECT nextval('ocr_group_id_seq')"; LOG.debug(sql); int groupId = jt.queryForInt(sql, paramSource); paramSource.addValue("group_id", groupId); sql = "INSERT INTO ocr_group (group_id, group_row_id, group_index, group_hard_hyphen, group_broken_word, group_segment_problem, group_skip) " + "VALUES (:group_id, :group_row_id, :group_index, :group_hard_hyphen, :group_broken_word, :group_segment_problem, :group_skip)"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); iGroup.setId(groupId); } else { paramSource.addValue("group_id", group.getId()); sql = "UPDATE ocr_group" + " SET group_row_id = :group_row_id" + ", group_index = :group_index" + ", group_hard_hyphen = :group_hard_hyphen" + ", group_broken_word = :group_broken_word" + ", group_segment_problem = :group_segment_problem" + ", group_skip = :group_skip" + " WHERE group_id = :group_id"; LOG.debug(sql); logParameters(paramSource); jt.update(sql, paramSource); } }
From source file:info.raack.appliancelabeler.data.JDBCDatabase.java
@Override public Map<UserAppliance, ApplianceStateTransition> getLatestApplianceStatesForUserAppliances( List<UserAppliance> apps, final ApplianceEnergyConsumptionDetectionAlgorithm algorithm) { List<ApplianceStateTransition> latestTransitions = new ArrayList<ApplianceStateTransition>(); // query will not execute properly if there are no appliance ids in the "in" list if (apps.size() > 0) { List<Integer> applianceIds = new ArrayList<Integer>(); for (UserAppliance userAppliance : apps) { applianceIds.add(userAppliance.getId()); }//from w w w . j a va2 s . c o m MapSqlParameterSource parameters = new MapSqlParameterSource(); parameters.addValue("user_appliance_ids", applianceIds); parameters.addValue("detection_algorithm", algorithm.getId()); NamedParameterJdbcTemplate template = new NamedParameterJdbcTemplate(jdbcTemplate); ApplianceStateTransitionMapper applianceStateTransitionMapper = new ApplianceStateTransitionMapper( algorithm); template.query( "select ast.id as ast_id, ast.start_on as ast_start_on, ast.time as ast_time, ua.id as ua_id, ua.name as ua_name, ua.algorithm_generated as ua_algorithm_generated from appliance_state_transitions ast, (select user_appliance_id, max(time) as maxdatetime from appliance_state_transitions where user_appliance_id in (:user_appliance_ids) and detection_algorithm = :detection_algorithm group by user_appliance_id) groupedast, user_appliances ua where ua.id = ast.user_appliance_id and ast.user_appliance_id = groupedast.user_appliance_id and ast.time = groupedast.maxdatetime order by ast_time", parameters, applianceStateTransitionMapper); } Map<UserAppliance, ApplianceStateTransition> latestApplianceStateTransitions = new HashMap<UserAppliance, ApplianceStateTransition>(); for (ApplianceStateTransition transition : latestTransitions) { latestApplianceStateTransitions.put(transition.getUserAppliance(), transition); } return latestApplianceStateTransitions; }