Example usage for org.apache.ibatis.session SqlSession flushStatements

List of usage examples for org.apache.ibatis.session SqlSession flushStatements

Introduction

In this page you can find the example usage for org.apache.ibatis.session SqlSession flushStatements.

Prototype

List<BatchResult> flushStatements();

Source Link

Document

Flushes batch statements.

Usage

From source file:ph.fingra.hadoop.dbms.parts.performance.service.TimeServiceImpl.java

License:Apache License

public int insertBatchTimeMonth(List<TimeAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*from ww w  .j  a  v  a  2  s.com*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    TimeDao dao = session.getMapper(TimeDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<TimeAll> it = in_volist.iterator();

            while (it.hasNext()) {
                TimeAll insert = it.next();
                dao.insertTimeMonth(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.TimeServiceImpl.java

License:Apache License

public int deleteTimeMonthByDate(String year, String month) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    TimeDao dao = session.getMapper(TimeDao.class);

    boolean has_error = false;

    try {/*ww  w  . j av a  2  s.  com*/
        dao.deleteTimeMonthByKey(year, month, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.UserServiceImpl.java

License:Apache License

public int insertBatchUserHour(List<UserAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*from   www.ja v a 2  s . c  o m*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    UserDao dao = session.getMapper(UserDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<UserAll> it = in_volist.iterator();

            while (it.hasNext()) {
                UserAll insert = it.next();
                dao.insertUserHour(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.UserServiceImpl.java

License:Apache License

public int deleteUserHourByDate(String year, String month, String day, String hour) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    UserDao dao = session.getMapper(UserDao.class);

    boolean has_error = false;

    try {// w  w w  .j a va 2  s . c  o  m
        dao.deleteUserHourByKey(year, month, day, hour, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.UserServiceImpl.java

License:Apache License

public int insertBatchUserDay(List<UserAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//from  www . j a  v a 2  s . c  o m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    UserDao dao = session.getMapper(UserDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<UserAll> it = in_volist.iterator();

            while (it.hasNext()) {
                UserAll insert = it.next();
                dao.insertUserDay(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.UserServiceImpl.java

License:Apache License

public int deleteUserDayByDate(String year, String month, String day) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    UserDao dao = session.getMapper(UserDao.class);

    boolean has_error = false;

    try {/*from  ww w .java2  s. c o m*/
        dao.deleteUserDayByKey(year, month, day, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.UserServiceImpl.java

License:Apache License

public int insertBatchUserWeek(List<UserAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//  w  w w .  ja v  a 2  s. c o m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    UserDao dao = session.getMapper(UserDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<UserAll> it = in_volist.iterator();

            while (it.hasNext()) {
                UserAll insert = it.next();
                dao.insertUserWeek(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.UserServiceImpl.java

License:Apache License

public int deleteUserWeekByDate(String year, String week) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    UserDao dao = session.getMapper(UserDao.class);

    boolean has_error = false;

    try {//w  w w  .  j av a 2s  . c  o m
        dao.deleteUserWeekByKey(year, week, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.UserServiceImpl.java

License:Apache License

public int insertBatchUserMonth(List<UserAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }// ww  w  . j  av  a  2  s.c  o m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    UserDao dao = session.getMapper(UserDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<UserAll> it = in_volist.iterator();

            while (it.hasNext()) {
                UserAll insert = it.next();
                dao.insertUserMonth(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.UserServiceImpl.java

License:Apache License

public int deleteUserMonthByDate(String year, String month) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    UserDao dao = session.getMapper(UserDao.class);

    boolean has_error = false;

    try {//w  ww  .j a v a  2  s .c o m
        dao.deleteUserMonthByKey(year, month, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}