List of usage examples for org.springframework.jdbc.core JdbcTemplate JdbcTemplate
public JdbcTemplate(DataSource dataSource)
From source file:org.wso2.carbon.metrics.jdbc.core.BaseReporterTest.java
@BeforeSuite protected static void init() throws Exception { if (logger.isInfoEnabled()) { logger.info("Initializing the data source and populating data"); }// ww w .j av a 2 s. c om // Setup datasource dataSource = JdbcConnectionPool.create("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1", "sa", ""); template = new JdbcTemplate(dataSource); ResourceDatabasePopulator populator = new ResourceDatabasePopulator(); populator.addScript(new ClassPathResource("dbscripts/h2.sql")); populator.populate(dataSource.getConnection()); // Create initial context System.setProperty(Context.INITIAL_CONTEXT_FACTORY, "org.apache.naming.java.javaURLContextFactory"); System.setProperty(Context.URL_PKG_PREFIXES, "org.apache.naming"); InitialContext ic = new InitialContext(); ic.createSubcontext("jdbc"); ic.bind("jdbc/WSO2MetricsDB", dataSource); if (logger.isInfoEnabled()) { logger.info("Creating Metrics"); } metrics = new Metrics(TestUtils.getConfigProvider("metrics.yaml")); metrics.activate(); metricService = metrics.getMetricService(); metricManagementService = metrics.getMetricManagementService(); }
From source file:com.univocity.app.utils.DatabaseImpl.java
public DatabaseImpl(String databaseName, File dirWithCreateTableScripts) { try {/*from w ww . j a v a2 s. com*/ Class.forName("org.hsqldb.jdbcDriver"); DataSource dataSource = new SingleConnectionDataSource("jdbc:hsqldb:mem:" + databaseName, "sa", "", true); this.jdbcTemplate = new JdbcTemplate(dataSource); } catch (Exception ex) { throw new IllegalStateException("Error creating database " + databaseName, ex); } createTables(dirWithCreateTableScripts); }
From source file:com.iucosoft.eavertizare.dao.impl.ConfiguratiiDaoImpl.java
@Override public void save(Configuratii config) { String query = "insert into configuratii_db " + "(id, driver, url_db, username, password, tabela_clienti)" + " values (?, ?, ?, ?, ?, ?)"; JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); Object[] args = new Object[] { null, config.getDriver(), config.getUrlDb(), config.getUsername(), config.getPassword(), config.getTabelaClienti() }; jdbcTemplate.update(query, args);/*from w w w . j a v a2 s. c o m*/ }
From source file:com.mir00r.jdbc_dao.EmployeeDao.java
@Autowired public void setDataSource(DataSource dataSource) { // this.dataSource = dataSource; jdbctemplate = new JdbcTemplate(dataSource); namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource); }
From source file:com.company.project.service.dao.PersistentTokenDao.java
public Map<String, Object> get(String series) { JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); Map<String, Object> map = null; map = jdbcTemplate.queryForMap("SELECT * from persistent_logins where series = ?", series); return map;//from www . j a va 2 s. c o m }
From source file:com.beezas.dao.TicketDaoImpl.java
@Override public void saveOrUpdate(Ticket ticket) { // if (ticket.getTicketType()!=null) { if (ticket.getId() > 0) { System.out.println("update"); // update // String sql = "UPDATE ticket SET total_tickets=? WHERE ticket_type=?"; String sql = "UPDATE tickets SET total_tickets=?, ticket_type=? WHERE ticket_id=?"; jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.update(sql, ticket.getTotalTickets(), ticket.getTicketType(), ticket.getId()); } else {/*from w ww . ja v a2 s .co m*/ // String sql = "insert into ticket" + "(ticket_type,total_tickets ) VALUES (?, ?)"; String sql = "insert into tickets" + "(ticket_id,ticket_type,total_tickets ) VALUES (seq_ticket_id.nextval, ?, ?)"; jdbcTemplate = new JdbcTemplate(dataSource); // Creates an instance of JdbcTemplate and supply a data source object jdbcTemplate.update(sql, new Object[] { ticket.getTicketType(), ticket.getTotalTickets() }); } }
From source file:edu.wisc.jmeter.dao.JdbcMonitorDaoTest.java
@Before public void setup() throws Exception { this.ds = new SimpleDriverDataSource(new jdbcDriver(), "jdbc:hsqldb:mem:JdbcMonitorTest", "sa", ""); this.jdbcTemplate = new JdbcTemplate(this.ds); SimpleJdbcTestUtils.executeSqlScript(new SimpleJdbcTemplate(this.jdbcTemplate), new ClassPathResource("/tables_hsql.sql"), false); this.jdbcMonitorDao = new JdbcMonitorDao(this.ds, Integer.MAX_VALUE, Integer.MAX_VALUE); this.jdbcMonitorDao.afterPropertiesSet(); }
From source file:com.univocity.examples.Tutorial004Advanced.java
@Test public void example001DataSetProducer() { initializeEngine("Producer"); DataIntegrationEngine engine = Univocity.getEngine("Producer"); //##CODE_START //To keep it simple, we will just insert a locale to the database directly and use its ID in our mappings. new JdbcTemplate(dataSource) .update("INSERT INTO locale (acronym, description) VALUES ('en_US', 'American English')"); int localeId = new JdbcTemplate(dataSource).queryForObject("SELECT id FROM locale WHERE acronym = 'en_US'", Integer.class); //Let's define the ID of the inserted locale as a constant which is accessible from anywhere. engine.setConstant("locale", localeId); //Here we add our dataset producer to the "FOOD_DES" entity and tell uniVocity to extract the columns used to generate the datasets. engine.addDatasetProducer(EngineScope.CYCLE, new FoodProcessor()).on("FOOD_DES", "Ndb_no", "Long_Desc"); DataStoreMapping dsMapping = engine.map("csvDataStore", "newSchema"); EntityMapping mapping;//from ww w . j a v a 2 s. c o m //The first mapping uses the "food_names" dataset that is produced with our FoodProcessor. mapping = dsMapping.map("food_names", "food_name"); mapping.identity().associate("name").toGeneratedId("id"); mapping = dsMapping.map("food_names", "newSchema.food_name_details"); //Here we read the locale constant mapping.identity().associate("name", "{$locale}").to("id", "loc"); mapping.reference().using("name").referTo("food_names", "food_name").on("id").directly().onMismatch() .abort(); mapping.value().copy("name").to("description"); //"food_name_details" exist in multiple data stores. In this case uniVocity cannot resolve what source entity to use, //it is necessary to prepend the data store name to resolve the ambiguity. //Datasets are part of a special data store named "<datasets>". mapping = dsMapping.map("<datasets>.food_name_details", "food"); mapping.identity().associate("food_code").toGeneratedId("id"); mapping.reference().using("name").referTo("food_names", "food_name").on("name_id").directly().onMismatch() .abort(); mapping = dsMapping.map("food_state_names", "food_state"); mapping.identity().associate("name").toGeneratedId("id"); mapping = dsMapping.map("food_state_names", "newSchema.food_state_details"); mapping.identity().associate("name", "{$locale}").to("id", "loc"); mapping.reference().using("name").referTo("food_state_names", "food_state").on("id").directly().onMismatch() .abort(); mapping.value().copy("name").to("description"); mapping = dsMapping.map("<datasets>.food_state_details", "state_of_food"); mapping.identity().associate("name", "food_code").to("state_id", "food_id"); mapping.reference().using("name").referTo("food_state_names", "food_state").on("state_id").directly() .onMismatch().abort(); mapping.reference().using("food_code").referTo("<datasets>.food_name_details", "food").on("food_id") .directly().onMismatch().abort(); mapping.value().copy("order").to("sequence"); //After mapping food names and food states, we still need to set the food properties. //This mapping will use the original source entity "FOOD_DES" mapping = dsMapping.map("FOOD_DES", "food"); mapping.identity().associate("NDB_No").to("id"); mapping.reference().using("NDB_No").referTo("<datasets>.food_name_details", "food").on("id").directly() .onMismatch().abort(); mapping.value().copy("CHO_Factor", "Fat_Factor", "Pro_Factor", "N_Factor").to("carbohydrate_factor", "fat_factor", "protein_factor", "nitrogen_protein_factor"); //The mapping defined above creates rows for "food", but they are updates to the records mapped from "<datasets>.food_name_details" to "food". //To avoid inserting these rows as new records, we use the "updateNewRows" insert option. mapping.persistence().usingMetadata().deleteDisabled().updateDisabled().updateNewRows(); //Let's execute the mapping cycle and see the results engine.executeCycle(); //##CODE_END StringBuilder output = new StringBuilder(); println(output, printFoodNameTables()); println(output, printFoodTable()); println(output, printFoodStateTables()); println(output, printStateOfFoodTable()); //As the data is distributed across too many tables, it may be a bit hard to reason about how things are associated. //We thought it would be easier for you to see it coming from a query that produces an output similar to the input data: println(output, queryTablesAndPrintMigratedFoodData()); printAndValidate(output); Univocity.shutdown("Producer"); }
From source file:com.springsource.greenhouse.groups.JdbcGroupRepositoryTest.java
@Before public void setup() { db = new GreenhouseTestDatabaseBuilder().member().group().testData(getClass()).getDatabase(); jdbcTemplate = new JdbcTemplate(db); groupRepository = new JdbcGroupRepository(jdbcTemplate); }
From source file:com.dai.dao.JogadorEquipaAdversariaDaoImpl.java
@Override public void apagaJEA(int idJEA) { String sql = "delete from jogadorEquipaAdversaria where idJogadorEquipaAdversaria =" + idJEA; JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); jdbcTemplate.update(sql);/* w ww .j a va 2 s . com*/ }