List of usage examples for org.springframework.jdbc.support.rowset SqlRowSet next
boolean next() throws InvalidResultSetAccessException;
From source file:com.hygenics.parser.getDAOTemplate.java
/** * check for column//from w ww. j ava 2s. co m */ public boolean columnExists(String table, String column) { String[] data = table.split("\\."); if (data.length > 1) { String query = "SELECT count(column_name) FROM information_schema.columns WHERE table_name='".trim() + data[1].trim() + "' AND table_schema='" + data[0] + "' AND column_name='" + column.trim() + "'"; SqlRowSet rs = this.jdbcTemplateObject.queryForRowSet(query); if (rs.next()) { if (rs.getInt(1) != 0) { return true; } } } return false; }
From source file:com.krawler.spring.documents.documentDAOImpl.java
public boolean checkReloadDocumentIndex() { boolean doReload = false; String query = "select reloadindex from lucene_master where activity=? and reloadindex=?"; SqlRowSet rs = queryForRowSetJDBC(query, new Object[] { "documents", 1 }); if (rs.next()) { doReload = true;//from ww w. j ava 2 s. c om } return doReload; }
From source file:com.joliciel.talismane.terminology.postgres.PostGresTerminologyBase.java
void addParents(List<Term> childTerms) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT " + SELECT_TERM + ", term_expansion_id FROM term" + " INNER JOIN text ON term_text_id=text_id" + " INNER JOIN term_expansions ON term_id = termexp_term_id" + " WHERE term_project_id = :term_project_id" + " AND termexp_expansion_id IN (:child_terms)"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); paramSource.addValue("term_project_id", this.getCurrentProjectId()); List<Integer> termIds = new ArrayList<Integer>(); Map<Integer, PostGresTerm> childTermMap = new HashMap<Integer, PostGresTerm>(); for (Term childTerm : childTerms) { PostGresTerm termInternal = (PostGresTerm) childTerm; if (termInternal.getParentsInternal() == null) { termIds.add(termInternal.getId()); termInternal.setParentsInternal(new TreeSet<Term>()); childTermMap.put(termInternal.getId(), termInternal); }//from w w w .ja va 2 s. c o m } paramSource.addValue("child_terms", termIds); LOG.trace(sql); LogParameters(paramSource); SqlRowSet rs = jt.queryForRowSet(sql, paramSource); TermMapper termMapper = new TermMapper(); List<Term> parentTerms = new ArrayList<Term>(); while (rs.next()) { Term term = termMapper.mapRow(rs); parentTerms.add(term); int childId = rs.getInt("termexp_expansion_id"); PostGresTerm childTerm = childTermMap.get(childId); childTerm.getParentsInternal().add(term); } if (parentTerms.size() > 0) { this.addParents(parentTerms); } }
From source file:org.restsql.core.impl.AbstractSqlResourceMetaData.java
/** * Builds list of primary key column labels. * /*from w ww .j a va 2s .c o m*/ * @param Connection * connection * @throws SqlResourceException * if a database access error occurs */ private void buildPrimaryKeys() { SqlRowSet resultSet = null; for (final TableMetaData table : tables) { // statement.setString(1, table.getDatabaseName()); // statement.setString(2, table.getTableName()); resultSet = this.jdbcTemplate.queryForRowSet(getSqlPkQuery(), table.getDatabaseName(), table.getTableName()); while (resultSet.next()) { final String columnName = resultSet.getString(1); for (final ColumnMetaData column : table.getColumns().values()) { if (columnName.equals(column.getColumnName())) { ((ColumnMetaDataImpl) column).setPrimaryKey(true); ((TableMetaDataImpl) table).addPrimaryKey(column); } } } } }
From source file:org.restsql.core.impl.AbstractSqlResourceMetaData.java
/** * Builds sequence metadata for all columns. * //from ww w. ja va 2 s .co m * @param connection * database connection * @throws SQLException * if a database access error occurs */ private void buildSequenceMetaData() { SqlRowSet resultSet = null; for (final TableMetaData table : tables) { // statement.setString(1, table.getDatabaseName()); // statement.setString(2, table.getTableName()); resultSet = this.jdbcTemplate.queryForRowSet(getSqlColumnsQuery(), table.getDatabaseName(), table.getTableName()); while (resultSet.next()) { final String columnName = resultSet.getString(1); for (ColumnMetaData column : table.getColumns().values()) { if (column.getColumnName().equals(columnName)) { setSequenceMetaData((ColumnMetaDataImpl) column, resultSet); break; } } } } }
From source file:org.apereo.lap.services.output.CSVOutputHandler.java
@Override public OutputResult writeOutput(Output output) { OutputResult result = new OutputResult(output); // make sure we can write the CSV File csv = new File(configuration.getOutputDirectory(), output.filename); boolean created; try {/*ww w. j a v a 2 s . co m*/ created = csv.createNewFile(); if (logger.isDebugEnabled()) logger.debug("CSV file created (" + created + "): " + csv.getAbsolutePath()); } catch (IOException e) { throw new IllegalStateException("Exception creating CSV file: " + csv.getAbsolutePath() + ": " + e, e); } if (!created) { // created file is going to be a writeable file so no check needed if (csv.isFile() && csv.canRead() && csv.canWrite()) { // file exists and we can write to it if (logger.isDebugEnabled()) logger.debug("CSV file is writeable: " + csv.getAbsolutePath()); } else { throw new IllegalStateException("Cannot write to the CSV file: " + csv.getAbsolutePath()); } } // make sure we can read from the temp data source try { int rows = storage.getTempJdbcTemplate().queryForObject(output.makeTempDBCheckSQL(), Integer.class); logger.info( "Preparing to output " + rows + " from temp table " + output.from + " to " + output.filename); } catch (Exception e) { throw new RuntimeException( "Failure while trying to count the output data rows: " + output.makeTempDBCheckSQL()); } Map<String, String> sourceToHeaderMap = output.makeSourceTargetMap(); String selectSQL = output.makeTempDBSelectSQL(); // fetch the data to write to CSV SqlRowSet rowSet; try { // for really large data we probably need to use http://docs.spring.io/spring/docs/3.0.x/api/org/springframework/jdbc/core/RowCallbackHandler.html rowSet = storage.getTempJdbcTemplate().queryForRowSet(selectSQL); } catch (Exception e) { throw new RuntimeException("Failure while trying to retrieve the output data set: " + selectSQL); } // write data to the CSV file int lines = 0; PrintWriter pw = null; try { pw = new PrintWriter(new BufferedWriter(new FileWriter(csv, true))); CSVWriter writer = new CSVWriter(pw); // write out the header writer.writeNext(sourceToHeaderMap.values().toArray(new String[sourceToHeaderMap.size()])); // write out the rows while (rowSet.next()) { String[] rowVals = new String[sourceToHeaderMap.size()]; for (int i = 0; i < sourceToHeaderMap.size(); i++) { rowVals[i] = (rowSet.wasNull() ? null : rowSet.getString(i + 1)); } writer.writeNext(rowVals); } IOUtils.closeQuietly(writer); } catch (Exception e) { throw new RuntimeException("Failure writing output to CSV (" + csv.getAbsolutePath() + "): " + e, e); } finally { IOUtils.closeQuietly(pw); } result.done(lines, null); return result; }
From source file:org.restsql.core.impl.AbstractSqlResourceMetaData.java
private void buildInvisibleForeignKeys() { SqlRowSet resultSet = null; for (final TableMetaData table : tables) { if (!table.isParent()) { // statement.setString(1, table.getDatabaseName()); // statement.setString(2, table.getTableName()); resultSet = this.jdbcTemplate.queryForRowSet(getSqlColumnsQuery(), table.getDatabaseName(), table.getTableName()); while (resultSet.next()) { final String columnName = resultSet.getString(1); if (!table.getColumns().containsKey(columnName)) { TableMetaData mainTable; switch (table.getTableRole()) { case ChildExtension: mainTable = childTable; break; default: // Child, ParentExtension, Unknown mainTable = parentTable; }/*from ww w . j a v a2s. c o m*/ // Look for a pk on the main table with the same // name for (final ColumnMetaData pk : mainTable.getPrimaryKeys()) { if (columnName.equals(pk.getColumnName())) { final ColumnMetaDataImpl fkColumn = new ColumnMetaDataImpl(table.getDatabaseName(), table.getQualifiedTableName(), table.getTableName(), table.getTableRole(), columnName, pk.getColumnLabel(), resultSet.getString(2), this); ((TableMetaDataImpl) table).addColumn(fkColumn); } } } } } } }
From source file:net.freechoice.dao.impl.DaoUser.java
@Deprecated @Override/* w w w.ja v a 2s .c o m*/ public AvgUser getRoleByLoginName(final String loginName) { AvgUser role = null; SqlRowSet rowSet = getJdbcTemplate().queryForRowSet("select U.id, U.password, U.email, M.value " + " from FC_User as U left outer join FC_User_Meta as M " + " on U.id = M.id_user_ " + " where U.is_valid = true and name_login = " + quote(loginName) + " and M.key = " + quote(SuperUser.TIME_EXPIRE)); //System.err.println("SQL:" + // "select U.id, U.password, U.email, M.value " // + " from FC_User as U left outer join FC_User_Meta as M " // +" on U.id = M.id_user_ " // +" where U.is_valid = true and name_login = " + quote(loginName) // +" and M.key = " + quote(SuperUser.TIME_EXPIRE) // ); if (rowSet.next()) { if (rowSet.isLast()) { /** * only super user have expire time */ String timeExpire = rowSet.getString(4); if (timeExpire == null || (timeExpire != null && DateUtil.hasExpired(timeExpire))) { role = new AvgUser(); } else { role = new SuperUser(); } role.id = rowSet.getInt(1); role.hashedPswWithSalt = rowSet.getString(2); role.email = rowSet.getString(3); role.name_login = loginName; } else { throw new RuntimeException("multiple user found, should be one only"); } } return role; }
From source file:com.joliciel.lefff.LefffDaoImpl.java
@Override public Map<String, List<LexicalEntry>> findEntryMap(List<String> categories) { NamedParameterJdbcTemplate jt = new NamedParameterJdbcTemplate(this.getDataSource()); String sql = "SELECT " + SELECT_ENTRY + "," + SELECT_WORD + "," + SELECT_LEMMA + "," + SELECT_CATEGORY + "," + SELECT_PREDICATE + "," + SELECT_ATTRIBUTE + " FROM lef_entry" + " INNER JOIN lef_word ON entry_word_id = word_id" + " INNER JOIN lef_lemma ON entry_lemma_id = lemma_id" + " INNER JOIN lef_category ON entry_category_id = category_id" + " INNER JOIN lef_predicate ON entry_predicate_id = predicate_id" + " INNER JOIN lef_attribute ON entry_morph_id = attribute_id" + " WHERE entry_status < 3"; if (categories != null && categories.size() > 0) { sql += " AND category_code in (:categoryCodes)"; }//from w w w .ja va 2 s. c o m sql += " ORDER BY entry_status, entry_id"; MapSqlParameterSource paramSource = new MapSqlParameterSource(); if (categories != null && categories.size() > 0) { paramSource.addValue("categoryCodes", categories); } LOG.info(sql); LefffDaoImpl.LogParameters(paramSource); double requiredCapacity = 500000; Map<String, List<LexicalEntry>> entryMap = new HashMap<String, List<LexicalEntry>>( ((int) Math.ceil(requiredCapacity / 0.75))); EntryMapper entryMapper = new EntryMapper(this.lefffServiceInternal); WordMapper wordMapper = new WordMapper(this.lefffServiceInternal); CategoryMapper categoryMapper = new CategoryMapper(this.lefffServiceInternal); LemmaMapper lemmaMapper = new LemmaMapper(this.lefffServiceInternal); PredicateMapper predicateMapper = new PredicateMapper(this.lefffServiceInternal); AttributeMapper attributeMapper = new AttributeMapper(this.lefffServiceInternal); SqlRowSet rowSet = jt.queryForRowSet(sql, paramSource); Map<Integer, Category> categoryMap = new HashMap<Integer, Category>(); Map<Integer, Predicate> predicateMap = new HashMap<Integer, Predicate>(); Map<Integer, Attribute> attributeMap = new HashMap<Integer, Attribute>(); Map<Integer, Lemma> lemmaMap = new HashMap<Integer, Lemma>(); while (rowSet.next()) { LefffEntryInternal entry = entryMapper.mapRow(rowSet); WordInternal word = wordMapper.mapRow(rowSet); entry.setWord(word); int categoryId = rowSet.getInt("category_id"); Category category = categoryMap.get(categoryId); if (category == null) { category = categoryMapper.mapRow(rowSet); categoryMap.put(categoryId, category); } entry.setCategory(category); int predicateId = rowSet.getInt("predicate_id"); Predicate predicate = predicateMap.get(predicateId); if (predicate == null) { predicate = predicateMapper.mapRow(rowSet); predicateMap.put(predicateId, predicate); } entry.setPredicate(predicate); int lemmaId = rowSet.getInt("lemma_id"); Lemma lemma = lemmaMap.get(lemmaId); if (lemma == null) { lemma = lemmaMapper.mapRow(rowSet); lemmaMap.put(lemmaId, lemma); } entry.setLemma(lemma); int attributeId = rowSet.getInt("attribute_id"); Attribute attribute = attributeMap.get(attributeId); if (attribute == null) { attribute = attributeMapper.mapRow(rowSet); attributeMap.put(attributeId, attribute); } entry.setMorphology(attribute); List<LexicalEntry> entries = entryMap.get(word.getText()); if (entries == null) { entries = new ArrayList<LexicalEntry>(); entryMap.put(word.getText(), entries); } entries.add(entry); } for (String word : entryMap.keySet()) { List<LexicalEntry> entries = entryMap.get(word); ArrayList<LexicalEntry> entriesArrayList = (ArrayList<LexicalEntry>) entries; entriesArrayList.trimToSize(); } return entryMap; }
From source file:com.krawler.crm.reportBuilder.bizservice.ReportBuilderServiceImpl.java
public List getIdsList(SqlRowSet rs) { List<Object> ll = new ArrayList<Object>(); List<String> idList = new ArrayList<String>(); Boolean productColFlag = false; try {/*ww w . j a v a 2 s.c o m*/ SqlRowSetMetaData rsmd = rs.getMetaData(); while (rs.next()) { for (int i = 1; i <= rsmd.getColumnCount(); i++) { String columnLabel = rsmd.getColumnLabel(i); if (columnLabel.equals(Constants.Crm_leadid) || columnLabel.equals(Constants.Crm_productid) || columnLabel.equals(Constants.Crm_accountid) || columnLabel.equals(Constants.Crm_contactid) || columnLabel.equals(Constants.Crm_caseid) || columnLabel.equals(Constants.Crm_opportunityid)) { if (rs.getObject(i) != null) { idList.add(rs.getObject(i).toString()); } } else if (columnLabel.equals(Constants.Crm_lead_product_key) || columnLabel.equals(Constants.Crm_opportunity_product_key) || columnLabel.equals(Constants.Crm_account_product_key) || columnLabel.equals(Constants.Crm_case_product_key)) { productColFlag = true; } } } rs.beforeFirst(); ll.add(idList); ll.add(productColFlag); } catch (Exception ex) { LOGGER.warn(ex.getMessage(), ex); } return ll; }