List of usage examples for org.springframework.jdbc.core JdbcTemplate queryForObject
@Override @Nullable public <T> T queryForObject(String sql, Class<T> requiredType) throws DataAccessException
From source file:com.logsniffer.app.DataSourceAppConfig.java
/** * @return H2 pooled data source/*from ww w . j a va 2 s . com*/ * @throws SQLException */ @Bean(destroyMethod = "dispose") public DataSource dataSource() throws SQLException { final JdbcConnectionPool pool = JdbcConnectionPool.create(url, user, password); pool.setMaxConnections(maxPoolConnections); Connection con = null; con = pool.getConnection(); final Flyway flyway = new Flyway(); flyway.setLocations("classpath:sql/migration"); flyway.setDataSource(pool); flyway.setSqlMigrationPrefix("VOS-"); flyway.setIgnoreFailedFutureMigration(true); final JdbcTemplate tpl = new JdbcTemplate(pool); if (tpl.queryForObject("select count(*) from information_schema.tables where table_name = 'LOG_SOURCES'", int.class) == 0) { logger.info("H2 database not found, creating new schema and populate with default data"); flyway.setBaselineVersion(MigrationVersion.fromVersion(DB_SETUP_VERSION)); flyway.setBaselineOnMigrate(true); try { final ResourceDatabasePopulator dbPopulator = new ResourceDatabasePopulator(); dbPopulator.addScript(new ClassPathResource("/sql/quartz/tables_h2.sql")); dbPopulator.addScript(new ClassPathResource("/sql/model/schema_h2.sql")); dbPopulator.populate(con); newSchema = true; logger.info("Established H2 connection pool with new database"); } finally { if (con != null) { con.close(); } } } else { logger.info("Established H2 connection pool with existing database"); if (tpl.queryForObject( "select count(*) from information_schema.tables where table_name = 'schema_version'", int.class) == 0) { logger.info("Flyway's DB migration not setup in this version, set baseline version to 0.5.0"); flyway.setBaselineVersion(MigrationVersion.fromVersion("0.5.0")); flyway.setBaselineOnMigrate(true); } } logger.debug("Migrating database, base version is: {}", flyway.getBaselineVersion()); flyway.migrate(); logger.debug("Database migrated from base version: {}", flyway.getBaselineVersion()); return pool; }
From source file:org.cloudfoundry.test.ServiceController.java
@RequestMapping(value = "/mysql/char-set", method = RequestMethod.GET) public ResponseEntity<String> getMySQLDataSourceCharSet() { if (serviceHolder.getMySqlDataSource() == null) { return new ResponseEntity<String>(HttpStatus.NOT_FOUND); }/*from ww w.j a v a 2 s .c o m*/ JdbcTemplate jt = new JdbcTemplate(serviceHolder.getMySqlDataSource()); String characterEncoding = jt.queryForObject("SELECT charset('Dligt vder oroar semestersvensken')", String.class); return new ResponseEntity<String>(characterEncoding, HttpStatus.OK); }
From source file:com.rplt.studioMusik.member.MemberDAO.java
@Override public String getGeneratedKodeMember() { String sql = "SELECT to_char(max(kode_member) + 1, 'FM099999') FROM member_studio_musik"; JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); String query = jdbcTemplate.queryForObject(sql, String.class); if (query == null) { return "000001"; } else {/*from w w w . j a v a 2 s.c o m*/ return query; } }
From source file:io.cloudslang.engine.data.SimpleHiloIdentifierGenerator.java
private void updateCurrentChunk() { if (logger.isDebugEnabled()) { logger.debug("Updating HILO chunk..."); }/* w ww. j a v a 2 s. com*/ long t = System.currentTimeMillis(); try (Connection conn = dataSource.getConnection()) { conn.setAutoCommit(false); JdbcTemplate jdbcTemplate = new JdbcTemplate(new SingleConnectionDataSource(conn, true)); jdbcTemplate.update(SQL_LOCK); currentChunk = jdbcTemplate.queryForObject(SQL_SELECT, Integer.class); if (logger.isDebugEnabled()) logger.debug("Current chunk: " + currentChunk); jdbcTemplate.execute(SQL_UPDATE); jdbcTemplate.execute("commit"); if (logger.isDebugEnabled()) { logger.debug("Updating HILO chunk done in " + (System.currentTimeMillis() - t) + " ms"); } currentId = 0; } catch (SQLException e) { logger.error("Unable to update current chunk", e); throw new IllegalStateException("Unable to update current chunk"); } }
From source file:com.rplt.studioMusik.dataPersewaan.PersewaanStudioMusikDAO.java
@Override public String getGeneratedKodeSewa() { String sql = "SELECT to_char(max(kode_sewa) + 1, 'FM099999') FROM persewaan_studio_musik"; JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); String query = jdbcTemplate.queryForObject(sql, String.class); if (query == null) { return "100001"; } else {// w ww .j a va 2s.c o m return query; } }
From source file:com.github.viktornar.migration.schema.Schema.java
/** * Check if row exist in given table./* w ww . j av a 2 s. c o m*/ * * @param template The JDBC template to use. * @param whereClause The where clause * @param table The table in question * @return Whether the row exists. */ protected boolean rowExists(JdbcTemplate template, String whereClause, String table) { assert template != null; assert whereClause != null && !whereClause.isEmpty(); assert table != null && !table.isEmpty(); try { Integer rowCount = template .queryForObject(format("select count(*) from %s where %s", table, whereClause), Integer.class); return rowCount > 0; } catch (Exception x) { return false; } }
From source file:dao.SearchDAO.java
public static ObjectNode getPagedCommentsByKeyword(String category, String keyword, int page, int size) { List<Dataset> pagedDatasets = new ArrayList<Dataset>(); final JdbcTemplate jdbcTemplate = getJdbcTemplate(); javax.sql.DataSource ds = jdbcTemplate.getDataSource(); DataSourceTransactionManager tm = new DataSourceTransactionManager(ds); TransactionTemplate txTemplate = new TransactionTemplate(tm); ObjectNode result;//from w w w . j av a 2 s . c o m result = txTemplate.execute(new TransactionCallback<ObjectNode>() { public ObjectNode doInTransaction(TransactionStatus status) { List<Map<String, Object>> rows = null; String query = SEARCH_DATASET_BY_COMMENTS_WITH_PAGINATION.replace("$keyword", keyword); rows = jdbcTemplate.queryForList(query, (page - 1) * size, size); for (Map row : rows) { Dataset ds = new Dataset(); ds.id = (long) row.get(DatasetRowMapper.DATASET_ID_COLUMN); ds.name = (String) row.get(DatasetRowMapper.DATASET_NAME_COLUMN); ds.source = (String) row.get(DatasetRowMapper.DATASET_SOURCE_COLUMN); ds.urn = (String) row.get(DatasetRowMapper.DATASET_URN_COLUMN); ds.schema = (String) row.get(DatasetRowMapper.DATASET_SCHEMA_COLUMN); pagedDatasets.add(ds); } long count = 0; try { count = jdbcTemplate.queryForObject("SELECT FOUND_ROWS()", Long.class); } catch (EmptyResultDataAccessException e) { Logger.error("Exception = " + e.getMessage()); } ObjectNode resultNode = Json.newObject(); resultNode.put("count", count); resultNode.put("page", page); resultNode.put("category", category); resultNode.put("itemsPerPage", size); resultNode.put("totalPages", (int) Math.ceil(count / ((double) size))); resultNode.set("data", Json.toJson(pagedDatasets)); return resultNode; } }); return result; }
From source file:uk.ac.kcl.partitioners.RealtimeTimeStampAndPKRangePartitioner.java
private ScheduledPartitionParams getParams(Timestamp jobStartTimeStamp, boolean inclusiveOfStart) { JdbcTemplate jdbcTemplate = new JdbcTemplate(sourceDataSource); String sql = "\n SELECT " + " MAX(" + column + ") AS max_id , \n" + " MIN(" + column + ") AS min_id , \n" + " MAX(" + timeStamp + ") AS max_time_stamp , \n" + " MIN(" + timeStamp + ") AS min_time_stamp \n" + " FROM ( \n" + " SELECT " + batchJobUtils.cleanSqlString(env.getProperty("partitionerPreFieldsSQL")) + " " + column + ", " + timeStamp + " FROM " + table + " "; Timestamp jobEndTimeStamp = getEndTimeStamp(jobStartTimeStamp); if (configuredFirstRunTimestamp != null && firstRun) { sql = getStartTimeInclusiveSqlString(sql, jobStartTimeStamp, jobEndTimeStamp); } else if (inclusiveOfStart) { sql = getStartTimeInclusiveSqlString(sql, jobStartTimeStamp, jobEndTimeStamp); } else if (!inclusiveOfStart) { sql = getStartTimeExclusiveSqlString(sql, jobStartTimeStamp, jobEndTimeStamp); } else {//from w w w .j a v a 2 s . c o m throw new RuntimeException("cannot determine parameters"); } logger.info("This job SQL: " + sql); return (ScheduledPartitionParams) jdbcTemplate.queryForObject(sql, new PartitionParamsRowMapper()); }
From source file:au.aurin.org.svc.GeodataFinder.java
public String FindApp(final String app_id) { LOGGER.info("FindApp, app_id {} ", app_id); try {// w w w.j a v a2 s . c o m final JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); final String query = "Select appname || ': ' || appurl from application where app_id=" + app_id; LOGGER.info(" query FindApp is {} ", query); jdbcTemplate.execute(query); return jdbcTemplate.queryForObject(query, String.class); } catch (final Exception e) { LOGGER.info("FindApp failed error is: ", e.toString()); return ""; } }
From source file:dao.SearchDAO.java
public static ObjectNode getPagedDatasetByKeyword(String category, String keyword, String source, int page, int size) { List<Dataset> pagedDatasets = new ArrayList<Dataset>(); final JdbcTemplate jdbcTemplate = getJdbcTemplate(); javax.sql.DataSource ds = jdbcTemplate.getDataSource(); DataSourceTransactionManager tm = new DataSourceTransactionManager(ds); TransactionTemplate txTemplate = new TransactionTemplate(tm); ObjectNode result;//from w ww .j av a 2 s . com result = txTemplate.execute(new TransactionCallback<ObjectNode>() { public ObjectNode doInTransaction(TransactionStatus status) { List<Map<String, Object>> rows = null; if (StringUtils.isBlank(source) || source.toLowerCase().equalsIgnoreCase("all")) { String query = SEARCH_DATASET_WITH_PAGINATION.replace("$keyword", keyword); rows = jdbcTemplate.queryForList(query, (page - 1) * size, size); } else { String query = SEARCH_DATASET_BY_SOURCE_WITH_PAGINATION.replace("$keyword", keyword); rows = jdbcTemplate.queryForList(query, source, (page - 1) * size, size); } for (Map row : rows) { Dataset ds = new Dataset(); ds.id = (Long) row.get(DatasetRowMapper.DATASET_ID_COLUMN); ds.name = (String) row.get(DatasetRowMapper.DATASET_NAME_COLUMN); ds.source = (String) row.get(DatasetRowMapper.DATASET_SOURCE_COLUMN); ds.urn = (String) row.get(DatasetRowMapper.DATASET_URN_COLUMN); ds.schema = (String) row.get(DatasetRowMapper.DATASET_SCHEMA_COLUMN); pagedDatasets.add(ds); } long count = 0; try { count = jdbcTemplate.queryForObject("SELECT FOUND_ROWS()", Long.class); } catch (EmptyResultDataAccessException e) { Logger.error("Exception = " + e.getMessage()); } ObjectNode resultNode = Json.newObject(); resultNode.put("count", count); resultNode.put("page", page); resultNode.put("category", category); resultNode.put("source", source); resultNode.put("itemsPerPage", size); resultNode.put("totalPages", (int) Math.ceil(count / ((double) size))); resultNode.set("data", Json.toJson(pagedDatasets)); return resultNode; } }); return result; }