List of usage examples for org.apache.commons.dbutils.handlers MapListHandler MapListHandler
public MapListHandler()
BasicRowProcessor
for conversion. From source file:com.gs.obevo.dbmetadata.impl.dialects.Db2MetadataDialect.java
@Override public ImmutableCollection<DaSequence> searchSequences(final DaSchema schema, Connection conn) throws SQLException { QueryRunner query = new QueryRunner(); // SEQTYPE <> 'I' is for identity columns; we don't want that when pulling user defined sequences ImmutableList<Map<String, Object>> maps = ListAdapter .adapt(query// www . jav a 2 s . c o m .query(conn, "select SEQNAME SEQUENCE_NAME from syscat.SEQUENCES\n" + "where SEQSCHEMA = '" + schema.getName() + "' AND SEQTYPE <> 'I'\n", new MapListHandler())) .toImmutable(); return maps.collect(new Function<Map<String, Object>, DaSequence>() { @Override public DaSequence valueOf(Map<String, Object> map) { return new DaSequenceImpl((String) map.get("SEQUENCE_NAME"), schema); } }); }
From source file:at.molindo.dbcopy.Database.java
private static Map<String, Table> readTables(Connection connection) throws SQLException { Map<String, Table> tables = new HashMap<String, Table>(); DatabaseMetaData meta = connection.getMetaData(); String catalog = connection.getCatalog(); // for each table in current catalog ResultSet rs = meta.getTables(catalog, null, null, null); try {/* w w w. ja v a 2 s.com*/ while (rs.next()) { String tableName = rs.getString("TABLE_NAME"); Table.Builder table = Table.builder(tableName); // columns String columnsQuery = "select COLUMN_NAME,COLLATION_NAME from information_schema.COLUMNS where TABLE_SCHEMA=? and TABLE_NAME=? order by ORDINAL_POSITION"; Map<String, Column> columns = Utils.executePrepared(connection, columnsQuery, new ColumnHandler(), catalog, tableName); if (columns.isEmpty()) { throw new IllegalStateException("table (" + tableName + ") without columns?"); } table.addColumns(columns.values()); // unique keys String uniqueKeysQuery = "show keys from `" + tableName + "` in `" + catalog + "` where `Non_unique` = 0 and `Null` = ''"; List<Map<String, Object>> uniqueKeyColumns = Utils.executePrepared(connection, uniqueKeysQuery, new MapListHandler()); ListMap<String, Column> uniqeKeys = new ListMap<String, Column>(); for (Map<String, Object> keyColumn : uniqueKeyColumns) { String name = (String) keyColumn.get("INDEX_NAME"); String columnName = (String) keyColumn.get("COLUMN_NAME"); if (name == null) { throw new IllegalStateException("KEY_NAME must not be null"); } if (columnName == null) { throw new IllegalStateException("COLUMN_NAME must not be null"); } Column column = columns.get(columnName); if (column == null) { throw new IllegalStateException("COLUMN_NAME unknown: " + columnName); } uniqeKeys.add(name, column); } for (Map.Entry<String, List<Column>> e : uniqeKeys.entrySet()) { table.addUniqueKey(e.getKey(), e.getValue()); } if (uniqeKeys.isEmpty()) { log.warn("table without primary key not supported: " + tableName); } else { tables.put(tableName, table.build()); } } } finally { Utils.close(rs); } return tables; }
From source file:com.aw.core.dao.DAOSql.java
/** * Helper method used to retrieve a list of Maps using standard SQL sintaxis * * @param sql SQL query//from w w w . j a v a 2s .c o m * @param filterKeys key used to restrict the search * @return list of maps */ public List<Map> findListOfMaps(String sql, Object[] filterKeys) { return (List<Map>) executeQuery(sql, filterKeys, new MapListHandler()); }
From source file:com.paladin.sys.db.QueryHelper.java
/** * Spring queryForList??// ww w. j a va 2 s . c o m * * @param _sql * @param _par * @return */ public static List<Map<String, Object>> queryList(String _sql, Object... _par) { MapListHandler handler = new MapListHandler() { @Override protected Map<String, Object> handleRow(ResultSet __rs) throws SQLException { return getMapFromRs(__rs); } }; try { return RUNNER.query(getConnection(), _sql, handler, _par); } catch (SQLException e) { e.printStackTrace(); } finally { DBManager.closeConnection(); } return null; }
From source file:com.odap.server.scheduler.JobManager.java
private synchronized void processJobs(List<Map<String, Object>> newJobList) { logger.info("Entering processJobs()"); future_offset = 1;//from w w w . j a va2s . c o m for (Map<String, Object> job : newJobList) { logger.info("Searching for job id:" + job.get("id")); Object jobExists = null; for (Map<String, Object> scheduledJob : scheduledMapList) { if (scheduledJob.get("id").toString().compareTo(job.get("id").toString()) == 0) { jobExists = scheduledJob; break; } } if (jobExists == null) { //Create new quartz job logger.info("Creating new job for " + job.get("id")); try { Class<? extends Job> jobClass = (Class<? extends Job>) Class .forName(job.get("pre_processor").toString()); JobDetail quartzJob = newJob(jobClass).withIdentity("job-" + job.get("id"), "jobman").build(); quartzJob.getJobDataMap().put("server_id", job.get("server_id")); quartzJob.getJobDataMap().put("job_id", job.get("id")); quartzJob.getJobDataMap().put("account_id", job.get("account_id")); quartzJob.getJobDataMap().put("server_name", job.get("server_name")); quartzJob.getJobDataMap().put("row", job); QueryRunner qRunner = new QueryRunner(); String sql = "select * from notifications n, joomla.cloud_users u, servers s, jobs j WHERE n.user_id = u.id and j.server_id = s.id AND job_id = " + job.get("id"); logger.info("Getting notifications attached to job:" + sql); quartzJob.getJobDataMap().put("notifications", (List<Map<String, Object>>) qRunner.query(conn, sql, new MapListHandler())); // If the cron entry is empty/null then we schedule it as soon as possible to run // otherwise we schedule the cron job //TODO prevent flood of jobs on restart and workload management if (job.get("cron") == null || job.get("cron").toString().compareTo("") == 0) { logger.debug("Scheduling one time execution for job " + job.get("id")); SimpleTrigger trigger = (SimpleTrigger) newTrigger() .withIdentity("trigger-" + job.get("id"), "onetime") .startAt(futureDate(future_offset += 25, IntervalUnit.SECOND)) // some Date .forJob("job-" + job.get("id"), "jobman") // identify job with name, group strings .build(); sched.scheduleJob(quartzJob, trigger); } else { sched.scheduleJob(quartzJob, newTrigger().withIdentity("trig-" + job.get("id"), "jobman") .withSchedule(cronSchedule(job.get("cron").toString())) .forJob("job-" + job.get("id"), "jobman").build()); } } catch (Exception e) { logger.error("Unable to create job " + job.get("id"), e); } } } // Now deal with the case where jobs need to be descheduled as this is a second run for (Map<String, Object> scheduledJob : scheduledMapList) { boolean job_found = false; for (Map<String, Object> newjob : newJobList) { if (newjob.get("id").toString().compareTo(scheduledJob.get("id").toString()) == 0) { job_found = true; break; } } if (job_found == false && scheduledJob.get("cron") != null && scheduledJob.get("cron").toString().length() > 0) { logger.info("Need to unschedule job " + scheduledJob.get("id")); try { sched.deleteJob(new JobKey("job-" + scheduledJob.get("id"), "jobman")); } catch (SchedulerException e) { logger.error("Unable to remove job", e); } } } scheduledMapList = newJobList; logger.info("Exiting processJobs()"); }
From source file:com.gs.obevo.db.impl.core.deploy.CsvStaticDataDeployerTest.java
private void testPrimaryKeyOverride() { this.jdbc.execute(conn, "CREATE TABLE " + schema + "." + table + " (\n" + "AID INT NOT NULL,\n" + "BID INT NOT NULL,\n" + "STRINGFIELD VARCHAR(30)\tNULL,\n" + "TIMESTAMPFIELD TIMESTAMP\tNULL,\n" + "CID INT NULL,\n" + "UPDATETIMEFIELD TIMESTAMP NOT NULL, \n" + ")\n"); DbEnvironment env = new DbEnvironment(); env.setPlatform(PLATFORM);/*www. j a v a2 s .c om*/ env.setNullToken("(null)"); env.setDataDelimiter('^'); Change artifact = mock(Change.class); when(artifact.getPhysicalSchema()).thenReturn(new PhysicalSchema(schema)); when(artifact.getObjectName()).thenReturn(table); when(artifact.getMetadataAttribute(TextMarkupDocumentReader.ATTR_UPDATE_TIME_COLUMN)) .thenReturn("UPDATETIMEFIELD"); when(artifact.getMetadataAttribute("primaryKeys")) .thenReturn("AID,BID,STRINGFIELD,TIMESTAMPFIELD,CID,UPDATETIMEFIELD"); String columnHeaders = "aId^bId^stringField^timestampField^cId"; when(artifact.getConvertedContent()).thenReturn(columnHeaders + "\n" + "1^2^AB,C^2012-01-01 12:12:12^\n" + "2^3^(null)^2013-01-01 11:11:11.65432^9\n" + "3^4^ABC^(null)^9\n" + "3^4^ABC^(null)^9\n" + "4^4^0006^(null)^9\n" + "2^3^(null)^2013-01-01 11:11:11.65432^9\n"); LocalDateTime preDeployTime = new LocalDateTime(); CsvStaticDataDeployer csvStaticDataDeployer = new CsvStaticDataDeployer(env, getSqlExecutor(), this.ds, metadataManager, new H2DbPlatform()); csvStaticDataDeployer.deployArtifact(artifact); List<Map<String, Object>> results = this.jdbc.query(conn, "select * from " + schema + "." + table + " order by AID", new MapListHandler()); assertEquals(6, results.size()); this.verifyRow(results.get(0), 1, 2, "AB,C", new LocalDateTime("2012-01-01T12:12:12"), null, preDeployTime, true); this.verifyRow(results.get(1), 2, 3, null, new LocalDateTime("2013-01-01T11:11:11.65432"), 9, preDeployTime, true); this.verifyRow(results.get(2), 2, 3, null, new LocalDateTime("2013-01-01T11:11:11.65432"), 9, preDeployTime, true); this.verifyRow(results.get(3), 3, 4, "ABC", null, 9, preDeployTime, true); this.verifyRow(results.get(4), 3, 4, "ABC", null, 9, preDeployTime, true); this.verifyRow(results.get(5), 4, 4, "0006", null, 9, preDeployTime, true); }
From source file:com.ruihu.easyshop.order.dao.OrderDao.java
private void loadOrderItem(Order order) throws SQLException { /*//from ww w . j a va 2 s .co m * 1. select * from t_orderitem where oid=? * 2. get List<OrderItem> * 3. set Order Object */ String sql = "select * from t_orderitem where oid=?"; List<Map<String, Object>> mapList = qr.query(sql, new MapListHandler(), order.getOid()); List<OrderItem> orderItemList = toOrderItemList(mapList); order.setOrderItemList(orderItemList); }
From source file:it.attocchi.db.DbUtilsConnector.java
public List<Map<String, Object>> executeMap(boolean keepConnOpen, String aQuery) throws Exception { List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(); // No DataSource so we must handle Connections manually QueryRunner run = new QueryRunner(); try {/*w w w. j av a2 s .com*/ /* * Sembra che il like con i parametri ufficiali non funzioni, forse * dovuto al fatto che son tutti object */ logger.debug(aQuery); result = run.query(getConnection(), aQuery, new MapListHandler()); } finally { if (!keepConnOpen) close(); } return result; }
From source file:com.eryansky.core.db.DbUtilsDao.java
/** * ??Map?Map?List/*from w w w . j av a2 s . c o m*/ * * @param sql * sql? * @param params * ? * @return */ public List<Map<String, Object>> find(String sql, Object[] params) throws DaoException { queryRunner = new QueryRunner(dataSource); List<Map<String, Object>> list = new ArrayList<Map<String, Object>>(); try { if (params == null) { list = (List<Map<String, Object>>) queryRunner.query(sql, new MapListHandler()); } else { list = (List<Map<String, Object>>) queryRunner.query(sql, new MapListHandler(), params); } } catch (SQLException e) { logger.error("Error occured while attempting to query data", e); throw new DaoException(e); } return list; }
From source file:com.gs.obevo.dbmetadata.impl.dialects.MsSqlMetadataDialect.java
@Override public ImmutableCollection<DaRoutine> searchExtraRoutines(final DaSchema schema, String procedureName, Connection conn) throws SQLException { String nameClause = procedureName != null ? " and ROUTINE_NAME = '" + procedureName + "'\n" : " "; String query = "SELECT" + " ROUTINE_CATALOG," + " ROUTINE_SCHEMA," + " ROUTINE_NAME," + " SPECIFIC_NAME," + " ROUTINE_TYPE," + " OBJECT_DEFINITION(OBJECT_ID(ROUTINE_CATALOG + '.' + ROUTINE_SCHEMA + '.' + ROUTINE_NAME)) AS ROUTINE_DEFINITION" + " FROM INFORMATION_SCHEMA.ROUTINES" + " WHERE ROUTINE_CATALOG = '" + schema.getName() + "'" + nameClause;/*from w w w . ja v a 2s. c o m*/ QueryRunner qr = new QueryRunner(); // using queryRunner so that we can reuse the connection ImmutableList<Map<String, Object>> maps = ListAdapter.adapt(qr.query(conn, query, new MapListHandler())) .toImmutable(); return maps.collect(new Function<Map<String, Object>, DaRoutine>() { @Override public DaRoutine valueOf(Map<String, Object> object) { DaRoutineType routineType = DaRoutineType .valueOf(((String) object.get("ROUTINE_TYPE")).toLowerCase()); return new DaRoutinePojoImpl((String) object.get("ROUTINE_NAME"), schema, routineType, (String) object.get("SPECIFIC_NAME"), (String) object.get("ROUTINE_DEFINITION")); } }); }