Example usage for org.springframework.jdbc.core JdbcTemplate queryForObject

List of usage examples for org.springframework.jdbc.core JdbcTemplate queryForObject

Introduction

In this page you can find the example usage for org.springframework.jdbc.core JdbcTemplate queryForObject.

Prototype

@Override
    public <T> T queryForObject(String sql, Class<T> requiredType, @Nullable Object... args)
            throws DataAccessException 

Source Link

Usage

From source file:com.sfs.whichdoctor.dao.ReimbursementDAOImpl.java

/**
 * Get a ReimbursementBean for a specified reimbursementId with the supplied
 * load details. A boolean parameter identifies whether to use the default
 * reader connection or optional writer connection datasource.
 *
 * @param reimbursementId the reimbursement id
 * @param loadDetails the load details//  w  ww. j  av  a  2 s.co m
 * @param useWriterConn the use writer conn
 *
 * @return the reimbursement bean
 *
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
private ReimbursementBean load(final int reimbursementId, final BuilderBean loadDetails,
        final boolean useWriterConn) throws WhichDoctorDaoException {

    dataLogger.info("Reimbursement Id: " + reimbursementId + " requested");

    ReimbursementBean reimbursement = null;

    final String loadSQL = this.getSQL().getValue("reimbursement/load")
            + " AND reimbursement.ReimbursementId = ? " + "GROUP BY reimbursement.ReimbursementId";

    JdbcTemplate jdbcTemplate = this.getJdbcTemplateReader();

    if (useWriterConn) {
        jdbcTemplate = this.getJdbcTemplateWriter();
    }

    try {
        reimbursement = (ReimbursementBean) jdbcTemplate.queryForObject(loadSQL,
                new Object[] { reimbursementId }, new RowMapper() {
                    public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                        return loadReimbursement(rs, loadDetails);
                    }
                });

    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for search: " + ie.getMessage());
    }
    return reimbursement;
}

From source file:com.sfs.whichdoctor.dao.CreditDAOImpl.java

/**
 * Used to load a CreditBean with a specific name and supplied load options.
 * A boolean parameter identifies whether to use the default reader
 * connection or optional writer connection datasource.
 *
 * @param creditId the credit id//  ww w  . j a  va  2 s  . c  o m
 * @param loadDetails the load details
 * @param useWriter the use writer
 *
 * @return the credit bean
 *
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
private CreditBean load(final int creditId, final BuilderBean loadDetails, final boolean useWriter)
        throws WhichDoctorDaoException {

    CreditBean credit = null;

    dataLogger.info("Credit Id: " + creditId + " requested");

    final String loadSQL = getSQL().getValue("credit/load") + " AND credit.CreditId = ?";

    JdbcTemplate jdbcTemplate = this.getJdbcTemplateReader();
    if (useWriter) {
        jdbcTemplate = this.getJdbcTemplateWriter();
    }

    try {
        credit = (CreditBean) jdbcTemplate.queryForObject(loadSQL, new Object[] { creditId }, new RowMapper() {
            public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                return loadCredit(rs, loadDetails);
            }
        });

    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for this search:" + ie.getMessage());
    }
    return credit;
}

From source file:com.sfs.whichdoctor.dao.ReceiptDAOImpl.java

/**
 * Used to get a ReceiptBean for a specified receiptId and supplied load
 * options. A boolean parameter identifies whether to use the default reader
 * connection or optional writer connection datasource.
 *
 * @param receiptId the receipt id//  w ww.  j  a  v  a  2  s  .c om
 * @param loadDetails the load details
 * @param useWriterConn the use writer conn
 *
 * @return the receipt bean
 *
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
private ReceiptBean load(final int receiptId, final BuilderBean loadDetails, final boolean useWriterConn)
        throws WhichDoctorDaoException {

    ReceiptBean receipt = null;

    final String loadSQL = getSQL().getValue("receipt/load")
            + " AND receipt.ReceiptId = ? GROUP BY receipt.ReceiptId";

    JdbcTemplate jdbcTemplate = this.getJdbcTemplateReader();

    if (useWriterConn) {
        jdbcTemplate = this.getJdbcTemplateWriter();
    }

    try {
        receipt = (ReceiptBean) jdbcTemplate.queryForObject(loadSQL, new Object[] { receiptId },
                new RowMapper() {

                    public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                        return loadReceipt(rs, loadDetails);
                    }
                });

    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for this search:" + ie.getMessage());
    }
    return receipt;
}

From source file:com.sfs.whichdoctor.dao.GroupDAOImpl.java

/**
 * Load a GroupBean for the specified groupId and provided load details.
 *
 * @param groupId the group id/*from ww  w.  j a v a 2 s  .c  om*/
 * @param loadDetails the load details
 * @param useWriterConn use the writer connection
 *
 * @return the group bean
 *
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
private GroupBean load(final int groupId, final BuilderBean loadDetails, final boolean useWriterConn)
        throws WhichDoctorDaoException {

    GroupBean group = null;

    JdbcTemplate jdbcTemplate = this.getJdbcTemplateReader();
    if (useWriterConn) {
        jdbcTemplate = this.getJdbcTemplateWriter();
    }

    final String loadSQL = getSQL().getValue("group/load") + " AND groups.GroupId = ? GROUP BY groups.GroupId";

    try {
        group = (GroupBean) jdbcTemplate.queryForObject(loadSQL, new Object[] { groupId }, new RowMapper() {
            public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                return loadGroup(rs, loadDetails);
            }
        });

    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for the search: " + ie.getMessage());
    }
    return group;
}

From source file:com.taobao.tddl.common.jdbc.MetaDataQueryForMapHandler.java

public Map<String, Object> queryForMap(JdbcTemplate jdbcTemplate, String tableName, String selectColumns,
        String whereSql, Object[] args) {
    tableName = tableName.toLowerCase();
    TableMetaData tmd = tableMetaDatas.get(tableName);
    StringBuilder sql = new StringBuilder("select ");
    if (tmd == null) {
        sql.append(selectColumns == null ? "*" : selectColumns);
    } else {//from  www  . j a v  a  2s.  co m
        sql.append(tmd.commaColumnNames);
    }
    //sql,sql
    //sql
    sql.append(" from ").append(tableName).append(" ").append(whereSql);

    if (log.isDebugEnabled()) {
        log.debug("sql=[" + sql.toString() + "], args=" + Arrays.asList(args));
    }

    try {
        return convert(jdbcTemplate.queryForObject(sql.toString(), args, getRowMapper(tableName)));
    } catch (EmptyResultDataAccessException e) {
        return null;
    } catch (DataAccessException e) {
        log.error("sql=[" + sql.toString() + "], args=" + Arrays.asList(args), e);
        throw e;
    }
}

From source file:com.sfs.whichdoctor.dao.OrganisationDAOImpl.java

/**
 * Used to get a OrganisationBean for a specified organisationId with the
 * load details provided. A boolean parameter identifies whether to use the
 * default reader connection or optional writer connection datasource
 *
 * @param organisationId the organisation id
 * @param loadDetails the load details/*from w  w w. j av  a 2 s . com*/
 * @param useWriterConn the use writer conn
 *
 * @return the organisation bean
 *
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
private OrganisationBean load(final int organisationId, final BuilderBean loadDetails,
        final boolean useWriterConn) throws WhichDoctorDaoException {

    OrganisationBean organisation = null;

    final String loadOrganisationId = getSQL().getValue("organisation/load")
            + " AND organisation.OrganisationId = ?";

    JdbcTemplate jdbcTemplate = this.getJdbcTemplateReader();
    if (useWriterConn) {
        jdbcTemplate = this.getJdbcTemplateWriter();
    }

    try {
        organisation = (OrganisationBean) jdbcTemplate.queryForObject(loadOrganisationId,
                new Object[] { organisationId }, new RowMapper() {
                    public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                        return loadOrganisation(rs, loadDetails);
                    }
                });

    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for this search: " + ie.getMessage());
    }
    return organisation;
}

From source file:au.aurin.org.svc.GeodataFinder.java

public userData getUser(final String email) {

    String query = "select * from users " + " where email=?";

    // final String query =
    // "select user_id, email, enabled,firstname,lastname,password from users "
    // + " where email='" + useremail + "'";
    try {// w  w  w. j  a v  a 2 s.c  o m
        final JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);

        LOGGER.info("getUser: query is {} ", query);

        final userData user = (userData) jdbcTemplate.queryForObject(query, new String[] { email },
                new BeanPropertyRowMapper(userData.class));
        if (user != null) {
            final long user_id = user.getUser_id();
            LOGGER.info("user_id in  getUser query  is {} ", user_id);

            query = "select a.role_id,b.rolename from user_roles as a , roles as b where a.role_id = b.role_id "
                    + " and  a.user_id = " + user.getUser_id();
            final List<roleData> roleuser = jdbcTemplate.query(query,
                    new BeanPropertyRowMapper(roleData.class));
            user.setUserRoles(roleuser);

            query = "select a.org_id,b.orgname,b.orgcountry,b.orgstate,b.orglga,b.orgbounds,b.orgextent, b.orgcenter from user_orgs as a , organisations as b where a.org_id = b.org_id "
                    + " and  a.user_id = " + user.getUser_id();
            final List<orgData> orguser = jdbcTemplate.query(query, new BeanPropertyRowMapper(orgData.class));
            user.setUserOrganisations(orguser);

            query = "select a.app_id,b.appname from user_apps as a , application as b where a.app_id = b.app_id "
                    + " and  a.user_id = " + user.getUser_id();
            final List<appData> appuser = jdbcTemplate.query(query, new BeanPropertyRowMapper(appData.class));
            user.setUserApplications(appuser);

            query = "select a.acclvl_id,b.acclvlname from user_acclvls as a , acclvls as b where a.acclvl_id = b.acclvl_id "
                    + " and  a.user_id = " + user.getUser_id();
            final List<acclvlData> accuser = jdbcTemplate.query(query,
                    new BeanPropertyRowMapper(acclvlData.class));
            user.setUserAccessLevels(accuser);

            query = "select a.agr_id,a.agreed, a.aggtime, b.lictext,b.licblob,b.licver,b.lic_id,c.orgname, d.appname from agreement as a , license as b, organisations as c, application as d "
                    + " where a.lic_id = b.lic_id  and b.org_id = c.org_id and a.app_id = d.app_id"
                    + " and  a.user_id = " + user.getUser_id();
            final List<agreementData> agguser = jdbcTemplate.query(query,
                    new BeanPropertyRowMapper(agreementData.class));
            user.setUserAgreements(agguser);
        }

        return user;

    } catch (final Exception e) {

        LOGGER.info("error in  getUser is : {}", e.toString());

    }
    return null;

}

From source file:com.sfs.whichdoctor.dao.DebitDAOImpl.java

/**
 * Used to get a DebitBean for a specified debitId and supplied load
 * options. A boolean parameter identifies whether to use the default reader
 * connection or optional writer connection datasource.
 *
 * @param debitId the debit id//from w  ww.  j av a 2 s  .  c  om
 * @param loadDetails the load details
 * @param useWriterConn the use writer conn
 *
 * @return the debit bean
 *
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
private DebitBean load(final int debitId, final BuilderBean loadDetails, final boolean useWriterConn)
        throws WhichDoctorDaoException {

    DebitBean debit = null;

    dataLogger.info("DebitId: " + debitId + " requested");

    JdbcTemplate jdbcTemplate = this.getJdbcTemplateReader();
    if (useWriterConn) {
        jdbcTemplate = this.getJdbcTemplateWriter();
    }

    final String loadSQL = getSQL().getValue("debit/load") + " AND invoice.InvoiceId = ?";

    try {
        debit = (DebitBean) jdbcTemplate.queryForObject(loadSQL, new Object[] { debitId }, new RowMapper() {
            public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException {
                return loadDebit(rs, loadDetails);
            }
        });

    } catch (IncorrectResultSizeDataAccessException ie) {
        dataLogger.debug("No results found for search: " + ie.getMessage());
    }
    return debit;
}

From source file:com.hexin.core.dao.BaseDaoSupport.java

public <T> Page<T> findMySqlPage(JdbcTemplate jdbcTemplate, String sql, Class<T> dtoClass,
        PageCondition pageCondition, Object... args) {

    Page<T> page = new Page<T>();
    StringBuffer countSqlBuf = new StringBuffer();
    int currentPage = 1;
    int pageSize = 10;
    String camelSortBy = "";
    String underlineSortBy = "";
    String orderBy = "";
    long total;/*w ww.j av a2  s. co m*/
    long totalPage;
    List<T> resultList = null;

    // distinct
    countSqlBuf.append("select count(*) from (");
    countSqlBuf.append(StringUtils.substringBeforeLast(sql, "order "));
    countSqlBuf.append(") tmp_table");
    debugSql(countSqlBuf.toString(), args);

    // 
    total = jdbcTemplate.queryForObject(countSqlBuf.toString(), Long.class, args);
    page.setTotal(total);

    StringBuffer pageSqlBuf = new StringBuffer();
    pageSqlBuf.append("select * from (");
    pageSqlBuf.append(sql);
    pageSqlBuf.append(") t ");

    if (pageCondition != null) {
        currentPage = pageCondition.getPage();
        pageSize = pageCondition.getRows();
        camelSortBy = pageCondition.getSort();
        orderBy = pageCondition.getOrder();

        // ????
        underlineSortBy = IcpObjectUtil.camelToUnderline(camelSortBy);
    }

    if (StringUtils.isNotEmpty(underlineSortBy) && StringUtils.isNotEmpty(orderBy)) {
        pageSqlBuf.append(" order by ");
        pageSqlBuf.append(underlineSortBy).append(" ").append(orderBy).append(" ");
    }
    pageSqlBuf.append(" limit ");
    pageSqlBuf.append((currentPage - 1) * pageSize);
    pageSqlBuf.append(" ,");
    pageSqlBuf.append(pageSize);
    pageSqlBuf.append(" ");

    debugSql(pageSqlBuf.toString(), args);

    RowMapper<T> rowMapper = new BeanPropertyRowMapper<T>(dtoClass);
    resultList = jdbcTemplate.query(pageSqlBuf.toString(), rowMapper, args);

    long mod = total % pageSize;
    if (mod == 0) {
        totalPage = total / pageSize;
    } else {
        totalPage = total / pageSize + 1;
    }

    page.setRows(resultList);
    page.setCurrentPage(currentPage);
    page.setPageSize(pageSize);
    page.setTotalPage(totalPage);
    page.setPageIndex(PageIndex.getPageIndex(Constants.PAGE_RANGE, pageSize, totalPage));

    return page;
}

From source file:uta.ak.usttmp.dmcore.task.QuartzMiningJob.java

@Override
public void execute(JobExecutionContext jec) {

    //====== statistic time ======
    Date statisticStartTime = new Date();
    Date preprocessTime = new Date();
    Date miningTime = new Date();
    Date trackingTime = new Date();

    long statistic_TotalNum = 0;

    String updateSql = "UPDATE `c_miningtask`  " + "SET `qrtz_job_name` = ?,  "
            + " `qrtz_job_exec_count` = ?,  " + " `status` = ?  " + "WHERE  " + "   `mme_eid` = ?;";

    DataSource ds = null;//from   w  w  w  . j  a v  a2  s.  com
    JdbcTemplate jt = null;

    int nextExecCount = 0;
    int totalExecCount = 0;

    boolean hasPC = false;
    boolean hasTC = false;

    MiningTask mt;

    try {

        miningTaskId = Long.parseLong((String) jec.getMergedJobDataMap().get("miningTaskId"));
        jobName = (String) jec.getMergedJobDataMap().get("jobName");

        applicationContext = new ClassPathXmlApplicationContext("applicationContext.xml");

        ds = (DataSource) applicationContext.getBean("dataSource");
        jt = new JdbcTemplate(ds);

        //Load Miningtask
        String querySql = "select * from c_miningtask where mme_eid=?";
        mt = (MiningTask) jt.queryForObject(querySql, new Object[] { miningTaskId }, new MiningTaskRowMapper());

        if (mt.getQrtzJobExecCount() == mt.getQrtzJobTotalCount()) {
            return;
        }
        totalExecCount = mt.getQrtzJobTotalCount();

        if (null != mt.getPreprocessComponent() && !"NONE".equals(mt.getPreprocessComponent().toUpperCase())) {
            hasPC = true;
        }
        if (null != mt.getTrackingComponent() && !"NONE".equals(mt.getTrackingComponent().toUpperCase())) {
            hasTC = true;
        }

        List<Text> textList;
        List<Topic> topicList;

        if (hasPC) {
            preprocessComponent = (PreprocessComponent) applicationContext.getBean(mt.getPreprocessComponent());
        }

        miningComponent = (MiningComponent) applicationContext.getBean(mt.getMiningComponent());

        if (hasTC) {
            trackingComponent = (TrackingComponent) applicationContext.getBean(mt.getTrackingComponent());
        }

        miningTaskService = (MiningTaskService) applicationContext.getBean("miningTaskService");

        topicMiningService = (TopicMiningService) applicationContext.getBean("topicMiningService");

        nextExecCount = mt.getQrtzJobExecCount() + 1;

        SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

        //Calculate the time period
        Calendar cal = Calendar.getInstance();
        cal.setTime(mt.getStartTime());
        cal.add(Calendar.HOUR_OF_DAY, mt.getMiningInterval() * mt.getQrtzJobExecCount());
        Date startTime = cal.getTime();
        cal.setTime(mt.getStartTime());
        cal.add(Calendar.HOUR_OF_DAY, mt.getMiningInterval() * nextExecCount);
        Date endTime = cal.getTime();

        /**************************************************************
         * 1. Preprocess text data                                    *
         **************************************************************/
        String sTag = mt.getId() + "_" + mt.getTag();

        //Clear the text table if there is existed text.
        String clearTextSQL = "DELETE " + "FROM " + "   c_text " + "WHERE " + "   ( "
                + "       text_createdate " + "      BETWEEN ? " + "      AND ? " + "   ) " + "AND tag = ?";
        jt.update(clearTextSQL, formatter.format(startTime), formatter.format(endTime), sTag);

        System.out.println("Query raw text records...");
        String querySQL = "SELECT " + "   * " + "FROM " + "   c_rawtext " + "WHERE " + "   ( "
                + "       text_createdate " + "      BETWEEN ? " + "      AND ? " + "   ) " + "AND tag = ?";

        List<RawText> rawTextList = jt.query(querySQL,
                new Object[] { formatter.format(startTime), formatter.format(endTime), mt.getTag() },
                new RawTextRowMapper());

        //if no raw text to be mined, then return
        if (null == rawTextList || rawTextList.isEmpty()) {
            int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED
                    : MiningTask.STATUS_RUNNING;
            //Update task status
            jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId);
            return;
        }

        statistic_TotalNum = (null != rawTextList) ? rawTextList.size() : 0;

        if (hasPC) {

            textList = preprocessComponent.preprocess(mt, rawTextList);
        } else {

            textList = new ArrayList<>();
            for (RawText rt : rawTextList) {
                Text tx = new Text();
                tx.setCreateTime(rt.getCreateTime());
                tx.setRawTextId(rt.getId());
                tx.setTag(sTag);
                tx.setText(rt.getText());
                tx.setTitle(rt.getTitle());

                textList.add(tx);
            }
        }

        //if no text to be mined, then return
        if (null == textList || textList.isEmpty()) {
            int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED
                    : MiningTask.STATUS_RUNNING;
            //Update task status
            jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId);
            return;
        }

        List<Object[]> text_lines = new ArrayList<>();
        for (Text tx : textList) {
            Object[] ojarr = new Object[] { tx.getTitle(), tx.getText(), sTag,
                    String.valueOf(tx.getRawTextId()), formatter.format(tx.getCreateTime()) };
            text_lines.add(ojarr);
        }

        String insertSQL = "INSERT INTO c_text(mme_lastupdate, mme_updater, title, text, tag, rawtext_id, text_createdate) "
                + "VALUES (NOW(), \"USTTMP\", ?, ?, ?, ?, ?)";

        System.out.println("Start to insert text records...");
        jt.batchUpdate(insertSQL, text_lines);

        //statistic time
        preprocessTime = new Date();

        /**************************************************************
         * 2. Mining topics                                           *
         **************************************************************/

        //Clear the existed topics

        String clearTopicSQL = "DELETE " + "FROM " + "   c_topic " + "WHERE "
                + "miningtask_id=? AND seq_no = ?";
        jt.update(clearTopicSQL, miningTaskId, nextExecCount);

        topicList = miningComponent.generateTopics(mt, textList);

        String insertTpSQL = "INSERT INTO `c_topic` ( " + "   `mme_lastupdate`, " + "   `mme_updater`, "
                + "   `name`, " + "   `content`, " + "   `remark`, " + "   `miningtask_id`, " + "   `seq_no` "
                + ") " + "VALUES " + "   (NOW(), 'USTTMP' ,?,?,?,?,?)";

        List<Object[]> tpArgsList = new ArrayList<>();

        for (Topic tm : topicList) {
            Object[] objarr = new Object[] { tm.getName(), tm.toString(),
                    (null != tm.getRemark()) ? tm.getRemark() : "", miningTaskId, nextExecCount };
            tpArgsList.add(objarr);
        }

        System.out.println("Inserting records into the c_topic table...");
        jt.batchUpdate(insertTpSQL, tpArgsList);

        //statistic time
        miningTime = new Date();

        /**************************************************************
         * 3. Evolution tracking                                      *
         **************************************************************/

        if (hasTC) {
            if (nextExecCount > 1) {
                if (null != topicList && !topicList.isEmpty()) {

                    int preTopicSeq = nextExecCount - 1;
                    int nextTopicSeq = nextExecCount;

                    //Clear existed topic evolution rela
                    String clearEvSQL = "DELETE " + "FROM " + "   c_topicevolutionrela " + "WHERE "
                            + "   miningtask_id =? " + "AND pre_topic_seq =? " + "AND next_topic_seq =?";
                    jt.update(clearEvSQL, miningTaskId, preTopicSeq, nextTopicSeq);

                    List<Topic> preTopics = topicMiningService.getTopics(miningTaskId, preTopicSeq);
                    List<Topic> nextTopics = topicMiningService.getTopics(miningTaskId, nextTopicSeq);

                    if (null == preTopics || preTopics.isEmpty()) {

                        UsttmpProcessException upe = new UsttmpProcessException(
                                UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION);
                        throw upe;
                    }
                    if (null == nextTopics || nextTopics.isEmpty()) {
                        UsttmpProcessException upe = new UsttmpProcessException(
                                UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION);
                        throw upe;
                    }

                    List<EvolutionRelationship> evRelaList = trackingComponent
                            .getTopicEvolutionRelationships(mt, preTopics, nextTopics);

                    String insertEvSql = "INSERT INTO `c_topicevolutionrela` (  " + "   `pre_topic_id`,  "
                            + "   `next_topic_id`,  " + "   `rank_against_pre_topic_in_next_group`,  "
                            + "   `rank_against_next_topic_in_pre_group`,  " + "   `similarity`  ,"
                            + "   `miningtask_id`  ," + "   `pre_topic_seq`  ," + "   `next_topic_seq`  "
                            + ")  " + "VALUES  " + "   (?, ?, ?, ?, ?, ?, ?, ?)";

                    List<Object[]> argsList = new ArrayList<>();

                    for (EvolutionRelationship er : evRelaList) {
                        Object[] objarr = new Object[] { er.getPreTopic().getId(), er.getNextTopic().getId(),
                                er.getRankAgainstPreTopicInNextGroup(), er.getRankAgainstNextTopicInPreGroup(),
                                er.getSimilarity(), miningTaskId, preTopicSeq, nextTopicSeq };
                        argsList.add(objarr);
                    }
                    jt.batchUpdate(insertEvSql, argsList);
                }
            }
        }

        int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED
                : MiningTask.STATUS_RUNNING;
        //Update task status
        jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId);

        //statistic time
        trackingTime = new Date();

    } catch (UsttmpProcessException e) {

        if (UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION.equals(e.getMessage())) {

            //Update task status
            int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED
                    : MiningTask.STATUS_RUNNING;
            jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId);

        } else {

        }

        e.printStackTrace();
        StringWriter errors = new StringWriter();
        e.printStackTrace(new PrintWriter(errors));
        logger.error(errors.toString());
        //log exception table
        miningTaskService.logMiningTask(MiningTaskService.LOG_TYPE_EXCEPTION, miningTaskId, errors.toString());

        //statistic time
        trackingTime = new Date();
    } catch (Exception e) {

        e.printStackTrace();
        StringWriter errors = new StringWriter();
        e.printStackTrace(new PrintWriter(errors));
        logger.error(errors.toString());
        //log exception table
        miningTaskService.logMiningTask(MiningTaskService.LOG_TYPE_EXCEPTION, miningTaskId, errors.toString());

        //statistic time
        trackingTime = new Date();

    } finally {

        long statistic_PreprocessTime = (preprocessTime.getTime() - statisticStartTime.getTime()) / 1000;
        long statistic_MiningTime = (miningTime.getTime() - preprocessTime.getTime()) / 1000;
        ;
        long statistic_TrackingTime = (trackingTime.getTime() - miningTime.getTime()) / 1000;
        long statistic_TotalTime = (trackingTime.getTime() - statisticStartTime.getTime()) / 1000;

        logger.info("============================ " + "This is start log. " + "============================");

        logger.info("Total number of texts being processed is " + statistic_TotalNum + ".");

        logger.info("Preprocess time is " + statistic_PreprocessTime + " seconds.");

        logger.info("Mining time is " + statistic_MiningTime + " seconds.");

        logger.info("Tracking time is " + statistic_TrackingTime + " seconds.");

        logger.info("Total time is " + statistic_TotalTime + " seconds.");

        logger.info("============================ " + "This is end log. " + "============================");

    }
}