Example usage for org.apache.commons.dbutils QueryRunner query

List of usage examples for org.apache.commons.dbutils QueryRunner query

Introduction

In this page you can find the example usage for org.apache.commons.dbutils QueryRunner query.

Prototype

public <T> T query(String sql, ResultSetHandler<T> rsh, Object... params) throws SQLException 

Source Link

Document

Executes the given SELECT SQL query and returns a result object.

Usage

From source file:azkaban.trigger.JdbcTriggerLoader.java

@Override
public List<Trigger> loadTriggers() throws TriggerLoaderException {
    logger.info("Loading all triggers from db.");
    Connection connection = getConnection();

    QueryRunner runner = new QueryRunner();
    ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();

    List<Trigger> triggers;

    try {//from w  w  w .j a  v  a 2 s  .co m
        triggers = runner.query(connection, GET_ALL_TRIGGERS, handler);
    } catch (SQLException e) {
        logger.error(GET_ALL_TRIGGERS + " failed.");

        throw new TriggerLoaderException("Loading triggers from db failed. ", e);
    } finally {
        DbUtils.closeQuietly(connection);
    }

    logger.info("Loaded " + triggers.size() + " triggers.");

    return triggers;
}

From source file:it.attocchi.db.DbUtilsConnector.java

public <T> T executeTop1(boolean keepConnOpen, String aTop1Query, Class<T> clazz) throws Exception {
    T result = null;/*from  ww w.j a  v  a  2  s.  c  om*/

    // No DataSource so we must handle Connections manually
    QueryRunner run = new QueryRunner();

    try {

        /*
         * Sembra che il like con i parametri ufficiali non funzioni, forse
         * dovuto al fatto che son tutti object
         */
        logger.debug("Esecuzione di : " + aTop1Query);
        List<T> listTop1 = run.query(getConnection(), aTop1Query, getResultSetHandler(clazz));

        if (listTop1.size() > 0) {
            logger.debug(String.format("Record Trovati: %s", listTop1.size()));
            result = listTop1.get(0);
        }

    } finally {
        // Use this helper method so we don't have to check for null
        if (!keepConnOpen)
            close();
    }

    return result;
}

From source file:azkaban.scheduler.JdbcScheduleLoader.java

@Override
public List<Schedule> loadSchedules() throws ScheduleManagerException {
    logger.info("Loading all schedules from db.");
    Connection connection = getConnection();

    QueryRunner runner = new QueryRunner();
    ResultSetHandler<List<Schedule>> handler = new ScheduleResultHandler();

    List<Schedule> schedules;

    try {// w w  w  . ja  v a  2s  . c  o  m
        schedules = runner.query(connection, SELECT_ALL_SCHEDULES, handler);
    } catch (SQLException e) {
        logger.error(SELECT_ALL_SCHEDULES + " failed.");

        DbUtils.closeQuietly(connection);
        throw new ScheduleManagerException("Loading schedules from db failed. ", e);
    } finally {
        DbUtils.closeQuietly(connection);
    }

    logger.info("Now trying to update the schedules");

    // filter the schedules
    for (Schedule sched : schedules) {
        if (!sched.updateTime()) {
            logger.info("Schedule " + sched.getScheduleName()
                    + " was scheduled before azkaban start, skipping it.");
            schedules.remove(sched);
            removeSchedule(sched);
        } else {
            logger.info("Recurring schedule, need to update next exec time");
            try {
                updateNextExecTime(sched);
            } catch (Exception e) {
                e.printStackTrace();
                throw new ScheduleManagerException("Update next execution time failed.", e);
            }
            logger.info("Schedule " + sched.getScheduleName() + " loaded and updated.");
        }
    }

    logger.info("Loaded " + schedules.size() + " schedules.");

    return schedules;
}

From source file:azkaban.trigger.JdbcTriggerLoader.java

private synchronized void addTrigger(Connection connection, Trigger t, EncodingType encType)
        throws TriggerLoaderException {

    QueryRunner runner = new QueryRunner();

    long id;/*from w  ww  .ja  v  a  2s  .  c om*/

    try {
        runner.update(connection, ADD_TRIGGER, DateTime.now().getMillis());
        connection.commit();
        id = runner.query(connection, LastInsertID.LAST_INSERT_ID, new LastInsertID());

        if (id == -1L) {
            logger.error("trigger id is not properly created.");
            throw new TriggerLoaderException("trigger id is not properly created.");
        }

        t.setTriggerId((int) id);
        updateTrigger(t);
        logger.info("uploaded trigger " + t.getDescription());
    } catch (SQLException e) {
        throw new TriggerLoaderException("Error creating trigger.", e);
    }

}

From source file:azkaban.migration.scheduler.JdbcScheduleLoader.java

@Override
public List<Schedule> loadSchedules() throws ScheduleManagerException {
    logger.info("Loading all schedules from db.");
    Connection connection = getConnection();

    QueryRunner runner = new QueryRunner();
    ResultSetHandler<List<Schedule>> handler = new ScheduleResultHandler();

    List<Schedule> schedules;

    try {//  w  ww  .  jav a2s . c  o m
        schedules = runner.query(connection, SELECT_ALL_SCHEDULES, handler);
    } catch (SQLException e) {
        logger.error(SELECT_ALL_SCHEDULES + " failed.");

        DbUtils.closeQuietly(connection);
        throw new ScheduleManagerException("Loading schedules from db failed. ", e);
    } finally {
        DbUtils.closeQuietly(connection);
    }

    logger.info("Now trying to update the schedules");

    // filter the schedules
    Iterator<Schedule> scheduleIterator = schedules.iterator();
    while (scheduleIterator.hasNext()) {
        Schedule sched = scheduleIterator.next();
        if (!sched.updateTime()) {
            logger.info("Schedule " + sched.getScheduleName()
                    + " was scheduled before azkaban start, skipping it.");
            scheduleIterator.remove();
            removeSchedule(sched);
        } else {
            logger.info("Recurring schedule, need to update next exec time");
            try {
                updateNextExecTime(sched);
            } catch (Exception e) {
                e.printStackTrace();
                throw new ScheduleManagerException("Update next execution time failed.", e);
            }
            logger.info("Schedule " + sched.getScheduleName() + " loaded and updated.");
        }
    }

    logger.info("Loaded " + schedules.size() + " schedules.");

    return schedules;
}

From source file:com.fluke.database.dataservice.EODDao.java

public List<EODTicker> getEODTickers(String equity, Date fromDate, Date toDate) throws SQLException {
    QueryRunner run = new QueryRunner(DatabaseProperty.getDataSource());
    String sql = "select * from EOD  where equity = ? and date between '%s' and '%s' order by date";
    String from = Util.getDate(fromDate);
    String to = Util.getDate(toDate);
    sql = String.format(sql, from, to);
    Object[] params = new Object[] { equity };
    ResultSetHandler rsh = new BeanListHandler(EODTicker.class);
    return filterDupicateDate((List<EODTicker>) run.query(sql, rsh, params));
}

From source file:name.marcelomorales.siqisiqi.bonecp.DataSourceProviderTest.java

@Test
public void testStatistics() throws Exception {
    Config config = ConfigFactory.parseString("bonecp.url=\"jdbc:derby:memory:dbstats;create=true\"")
            .withFallback(ConfigFactory.load());

    DataSourceProvider dsp = new DataSourceProvider(config);

    assertNull(dsp.getStatistics());/* w ww  .  j a va 2s . c  om*/

    DataSource dataSource = dsp.get();

    Connection connection = dataSource.getConnection();

    connection.createStatement().execute("CREATE TABLE TABLETEST1 (ACOLUMN VARCHAR(10))");
    connection.commit();

    QueryRunner queryRunner = new QueryRunner();
    queryRunner.update(connection, "INSERT INTO TABLETEST1 VALUES ('AAA')");
    queryRunner.update(connection, "INSERT INTO TABLETEST1 VALUES (?)", "BBB");
    queryRunner.update(connection, "INSERT INTO TABLETEST1 VALUES (?)", "CCC");
    connection.commit();

    List<String> values = queryRunner.query(connection, "SELECT * FROM TABLETEST1 ORDER BY ACOLUMN ASC",
            new ColumnListHandler<String>());

    connection.commit();

    connection.close();

    assertEquals("AAA", values.get(0));
    assertEquals("BBB", values.get(1));
    assertEquals("CCC", values.get(2));

    Statistics statistics = dsp.getStatistics();
    assertTrue(statistics.getCacheHits() > 0);
    assertTrue(statistics.getCacheMiss() > 0);
    assertEquals(1, statistics.getConnectionsRequested());
    assertEquals(4, statistics.getStatementsPrepared());

    dsp.close();
}

From source file:com.gs.obevo.dbmetadata.impl.dialects.SybaseAseMetadataDialect.java

@Override
public ImmutableCollection<DaRule> searchRules(final DaSchema schema, Connection conn) throws SQLException {
    QueryRunner query = new QueryRunner(); // using queryRunner so that we can reuse the connection

    // Do not use ANSI JOIN as it does not work in Sybase 11.x - the SQL below works across all versions
    ImmutableList<Map<String, Object>> maps = ListAdapter.adapt(query.query(conn,
            "SELECT rul.name as RULE_NAME\n" + "FROM " + schema.getName() + "..sysobjects rul\n"
                    + "WHERE rul.type = 'R'\n" + "and not exists (\n"
                    + "\t-- Ensure that the entry is not attached to a table; otherwise, it is a regular table constraint, and will already be dropped when the table is dropped\n"
                    + "\tselect 1 from " + schema.getName() + "..sysconstraints c\n"
                    + "\twhere c.constrid = rul.id\n" + ")\n",
            new MapListHandler())).toImmutable();

    return maps.collect(new Function<Map<String, Object>, DaRule>() {
        @Override/*from   w  w  w.j  a  v  a  2s.c o  m*/
        public DaRule valueOf(Map<String, Object> map) {
            return new DaRuleImpl((String) map.get("RULE_NAME"), schema);
        }
    });
}

From source file:it.attocchi.db.DbUtilsConnector.java

public <T> List<T> executeTrimedString(boolean keepConnOpen, String aQuery, Class<T> clazz) throws Exception {
    List<T> result = new ArrayList<T>();

    // No DataSource so we must handle Connections manually
    QueryRunner run = new QueryRunner() {
        protected ResultSet wrap(ResultSet rs) {
            return StringTrimmedResultSet.wrap(rs);
        }//ww w. j ava  2  s. c  o  m
    };

    try {

        /*
         * Sembra che il like con i parametri ufficiali non funzioni, forse
         * dovuto al fatto che son tutti object
         */
        logger.debug(aQuery);
        result = run.query(getConnection(), aQuery, getResultSetHandler(clazz));

    } finally {
        if (!keepConnOpen)
            close();
    }

    return result;
}

From source file:com.gs.obevo.dbmetadata.impl.dialects.SybaseAseMetadataDialect.java

@Override
public ImmutableCollection<ExtraIndexInfo> searchExtraConstraintIndices(DaSchema schema, String tableName,
        Connection conn) throws SQLException {
    QueryRunner query = new QueryRunner(); // using queryRunner so that we can reuse the connection

    // Do not use ANSI JOIN as it does not work in Sybase 11.x - the SQL below works across all versions
    String tableClause = tableName == null ? "" : " AND tab.name = '" + tableName + "'";
    ImmutableList<Map<String, Object>> maps = ListAdapter.adapt(query.query(conn,
            "select tab.name TABLE_NAME, ind.name INDEX_NAME, status2 & 8 IS_CONSTRAINT, status2 & 512 IS_CLUSTERED "
                    + "from " + schema.getName() + "..sysindexes ind, " + schema.getName() + "..sysobjects tab "
                    + "where ind.id = tab.id " + tableClause,
            new MapListHandler())).toImmutable();

    return maps.collect(new Function<Map<String, Object>, ExtraIndexInfo>() {
        @Override//from   w w  w  .j  a va 2s. c  om
        public ExtraIndexInfo valueOf(Map<String, Object> map) {
            return new ExtraIndexInfo((String) map.get("TABLE_NAME"), (String) map.get("INDEX_NAME"),
                    (Integer) map.get("IS_CONSTRAINT") != 0, (Integer) map.get("IS_CLUSTERED") != 0);
        }
    });
}