List of usage examples for org.springframework.jdbc.core.namedparam MapSqlParameterSource MapSqlParameterSource
public MapSqlParameterSource()
From source file:org.inbio.modeling.core.dao.impl.LayerDAOImpl.java
@Override public Layer findByName(String name) { String sqlStatement = null;/* www . jav a2 s. co m*/ MapSqlParameterSource args = null; Layer layer = null; sqlStatement = "SELECT * FROM " + this.table + " " + " WHERE name = :name limit 1 "; args = new MapSqlParameterSource(); args.addValue("name", name); layer = getSimpleJdbcTemplate().queryForObject(sqlStatement, new LayerRowMapper(), args); return layer; }
From source file:com.joliciel.frenchTreebank.TreebankDaoImpl.java
public Category loadCategory(int categoryId) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT " + SELECT_CATEGORY + " FROM ftb_category WHERE cat_id=:cat_id"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("cat_id", categoryId); LOG.info(sql);//from w w w.j a v a 2 s . c om TreebankDaoImpl.LogParameters(paramSource); Category category = null; try { category = (Category) jt.queryForObject(sql, paramSource, new CategoryMapper()); } catch (EmptyResultDataAccessException ex) { ex.hashCode(); } return category; }
From source file:airport.database.services.statistics.StatisticsDaoImpl.java
@Override public void incAmountSecondsOnline(User user, int amountSeconds) { MapSqlParameterSource parameterSource = new MapSqlParameterSource(); parameterSource.addValue("amountSeconds", amountSeconds); parameterSource.addValue("id", user.getId()); jdbcTemplate.update(SQL_QUERY_INC_SECONDS_ONLINE, parameterSource); }
From source file:com.joliciel.jochre.security.SecurityDaoJdbc.java
@Override public void saveUserInternal(UserInternal user) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("user_username", user.getUsername()); paramSource.addValue("user_password", user.getPassword()); paramSource.addValue("user_first_name", user.getFirstName()); paramSource.addValue("user_last_name", user.getLastName()); paramSource.addValue("user_role", user.getRole().getId()); paramSource.addValue("user_failed_logins", user.getFailedLoginCount()); paramSource.addValue("user_logins", user.getLoginCount()); String sql = null;//from w ww. j a v a2 s. c o m if (user.isNew()) { sql = "SELECT nextval('ocr_user_id_seq')"; LOG.info(sql); int userId = jt.queryForInt(sql, paramSource); paramSource.addValue("user_id", userId); sql = "INSERT INTO ocr_user (user_id, user_username, user_password" + ", user_first_name, user_last_name, user_role, user_failed_logins, user_logins) " + "VALUES (:user_id, :user_username, :user_password" + ", :user_first_name, :user_last_name, :user_role, :user_failed_logins, :user_logins)"; LOG.info(sql); logParameters(paramSource); jt.update(sql, paramSource); user.setId(userId); } else { paramSource.addValue("user_id", user.getId()); sql = "UPDATE ocr_user" + " SET user_username = :user_username" + ", user_password = :user_password" + ", user_first_name = :user_first_name" + ", user_last_name = :user_last_name" + ", user_role = :user_role" + ", user_failed_logins = :user_failed_logins" + ", user_logins = :user_logins" + " WHERE user_id = :user_id"; LOG.info(sql); logParameters(paramSource); jt.update(sql, paramSource); } }
From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaLinkDao.java
/** * For the given list of new drops, find those that the link hash already * exists in the db and update the drop entry with the existing id. Also * remove the hash from the new link index for those that already exist. * /* w ww . jav a 2 s.c o m*/ * @param newLinkIndex * @param drops */ private void updateNewLinkIndex(Map<String, List<int[]>> newLinkIndex, List<Drop> drops) { // First find and update existing drops with their ids. String sql = "SELECT id, hash FROM links WHERE hash IN (:hashes)"; MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("hashes", newLinkIndex.keySet()); List<Map<String, Object>> results = this.namedJdbcTemplate.queryForList(sql, params); // Update id for the drops that were found for (Map<String, Object> result : results) { String hash = (String) result.get("hash"); Long id = ((Number) result.get("id")).longValue(); List<int[]> indexes = newLinkIndex.get(hash); for (int[] index : indexes) { drops.get(index[0]).getLinks().get(index[1]).setId(id); } // Hash is not for a new drop so remove it newLinkIndex.remove(hash); } }
From source file:com.perry.infrastructure.call.CallDaoServiceImpl.java
@Override public void unAssignTruck(long callId) { String sql = "update calls set truck_id = 0 where call_id = :callId"; MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("callId", callId); namedParameterJdbcTemplate.update(sql, params); }
From source file:com.joliciel.lefff.LefffDaoImpl.java
@SuppressWarnings("unchecked") @Override/*from w w w . ja v a 2s . c o m*/ public List<Attribute> findAttributes(LefffEntryInternal entry) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT " + SELECT_ATTRIBUTE + " FROM lef_attribute, lef_entry_attribute" + " WHERE attribute_id = entatt_attribute_id AND entatt_entry_id = :entry_id" + " ORDER BY attribute_code, attribute_value"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("entry_id", entry.getId()); LOG.info(sql); LefffDaoImpl.LogParameters(paramSource); List<Attribute> attributes = jt.query(sql, paramSource, new AttributeMapper(this.getLefffServiceInternal())); return attributes; }
From source file:com.joliciel.talismane.terminology.postgres.PostGresTerminologyBase.java
@Override public List<Term> getTerms(int frequencyThreshold, String searchText, boolean marked, boolean markedExpansions) { MONITOR.startTask("getTerms"); try {/*w w w . j a v a 2 s .com*/ NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT " + SELECT_TERM + " FROM term" + " INNER JOIN text ON term_text_id=text_id" + " WHERE term_project_id = :term_project_id"; if (marked && markedExpansions) { sql += " AND term_marked = :term_marked"; if (searchText.length() > 0) sql += " AND text_text LIKE :term_text"; } else { if (frequencyThreshold > 0) sql += " AND term_frequency >= :term_frequency"; if (searchText.length() > 0) sql += " AND text_text LIKE :term_text"; if (marked) sql += " AND term_marked = :term_marked"; } sql += " ORDER BY term_frequency DESC, text_text"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); if (frequencyThreshold > 0) paramSource.addValue("term_frequency", frequencyThreshold); if (searchText.length() > 0) paramSource.addValue("term_text", searchText + "%"); if (marked) paramSource.addValue("term_marked", true); paramSource.addValue("term_project_id", this.getCurrentProjectId()); LOG.trace(sql); LogParameters(paramSource); @SuppressWarnings("unchecked") List<Term> terms = jt.query(sql, paramSource, new TermMapper()); if (marked && markedExpansions) { this.addParents(terms); List<Term> termsWithFrequency = new ArrayList<Term>(); for (Term term : terms) { int maxAncestorFrequency = this.getMaxAncestorFrequency(term); if (maxAncestorFrequency >= frequencyThreshold) termsWithFrequency.add(term); } terms = termsWithFrequency; } return terms; } finally { MONITOR.endTask("getTerms"); } }
From source file:com.ushahidi.swiftriver.core.api.dao.impl.JpaMediaDao.java
/** * For the given list of new drops, find those that the media hash already * exists in the db and update the drop entry with the existing id. Also * remove the hash from the new media index for those that already exist. * //from ww w . j a va 2s . c o m * @param newMediaIndex * @param drops */ private void updateNewMediaIndex(Map<String, List<int[]>> newMediaIndex, List<Drop> drops) { // First find and update existing drops with their ids. String sql = "SELECT id, hash FROM media WHERE hash IN (:hashes)"; MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue("hashes", newMediaIndex.keySet()); List<Map<String, Object>> results = this.namedJdbcTemplate.queryForList(sql, params); // Update id for the drops that were found for (Map<String, Object> result : results) { String hash = (String) result.get("hash"); Long id = ((Number) result.get("id")).longValue(); List<int[]> indexes = newMediaIndex.get(hash); for (int[] index : indexes) { drops.get(index[0]).getMedia().get(index[1]).setId(id); } // Hash is not for a new drop so remove it newMediaIndex.remove(hash); } }
From source file:com.epam.catgenome.dao.DaoHelper.java
/** * Returns {@code List} which contains next values for sequence with the given name. * * @param sequenceName {@code String} specifies full-qualified name of sequence which * next values should be returned by a call * @param count int specifies the number of next values are should be retrieved * @return {@code List} list of next values for sequence; list.size() == count *///from w w w. j a v a2 s.c o m @Transactional(propagation = Propagation.MANDATORY) public List<Long> createIds(final String sequenceName, final int count) { Assert.isTrue(StringUtils.isNotBlank(sequenceName)); if (count == 0) { return Collections.emptyList(); } // creates a new temporary list: list.size() == count final List<Long> rows = LongStream.range(0L, count).collect(LinkedList::new, LinkedList::add, LinkedList::addAll); final Long listId = createTempLongList(rows); // generates next values for sequence with the given name final MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue(HelperParameters.LIST_ID.name(), listId); params.addValue(HelperParameters.SEQUENCE_NAME.name(), sequenceName.trim()); final List<Long> list = getNamedParameterJdbcTemplate().queryForList(createIdsQuery, params, Long.class); // clears a temporary list clearTempList(listId); return list; }