Example usage for org.apache.ibatis.session SqlSession flushStatements

List of usage examples for org.apache.ibatis.session SqlSession flushStatements

Introduction

In this page you can find the example usage for org.apache.ibatis.session SqlSession flushStatements.

Prototype

List<BatchResult> flushStatements();

Source Link

Document

Flushes batch statements.

Usage

From source file:ph.fingra.hadoop.dbms.parts.prerole.service.KeyLogServiceImpl.java

License:Apache License

public int renewalAppLogFirst(AppLogFirst new_vo, AppLogFirst old_vo) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    KeyLogDao dao = session.getMapper(KeyLogDao.class);

    boolean has_error = false;
    boolean is_modified = false;

    try {//from w w  w .  j a  v  a2s  .c  o  m

        if (old_vo != null) {

            String old_date = old_vo.getYear() + "-" + old_vo.getMonth() + "-" + old_vo.getDay();
            String new_date = new_vo.getYear() + "-" + new_vo.getMonth() + "-" + new_vo.getDay();

            int days_num = DateTimeUtil.daysBetween(old_date, new_date, "yyyy-MM-dd");
            if (days_num < 0) {
                dao.updateAppLogFirst(new_vo);
                is_modified = true;
            } else {
                //do nothing
            }
        } else {
            dao.insertAppLogFirst(new_vo);
            is_modified = true;
        }

        if (is_modified) {
            List<BatchResult> results = session.flushStatements();
            results.clear();
        }
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.prerole.service.KeyLogServiceImpl.java

License:Apache License

public int renewalComponentLogFirst(ComponentLogFirst new_vo, ComponentLogFirst old_vo) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    KeyLogDao dao = session.getMapper(KeyLogDao.class);

    boolean has_error = false;
    boolean is_modified = false;

    try {//w  ww .j a  v a 2  s  . c  o m

        if (old_vo != null) {

            String old_date = old_vo.getYear() + "-" + old_vo.getMonth() + "-" + old_vo.getDay();
            String new_date = new_vo.getYear() + "-" + new_vo.getMonth() + "-" + new_vo.getDay();

            int days_num = DateTimeUtil.daysBetween(old_date, new_date, "yyyy-MM-dd");
            if (days_num < 0) {
                dao.updateComponentLogFirst(new_vo);
                is_modified = true;
            } else {

                //do nothing
            }
        } else {
            dao.insertComponentLogFirst(new_vo);
            is_modified = true;
        }

        if (is_modified) {
            List<BatchResult> results = session.flushStatements();
            results.clear();
        }
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.prerole.service.LogsServiceImpl.java

License:Apache License

public int insertBatchLogsDay(List<LogsAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*from   w  w w. j av  a 2 s .c  o  m*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    LogsDao dao = session.getMapper(LogsDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<LogsAll> it = in_volist.iterator();

            while (it.hasNext()) {
                LogsAll insert = it.next();
                dao.insertLogsDay(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.prerole.service.LogsServiceImpl.java

License:Apache License

public int deleteLogsDayByDate(String year, String month, String day) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    LogsDao dao = session.getMapper(LogsDao.class);

    boolean has_error = false;

    try {/*w  ww . jav a2s.c o  m*/
        dao.deleteLogsDayByKey(year, month, day, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}