Example usage for org.apache.ibatis.session SqlSession flushStatements

List of usage examples for org.apache.ibatis.session SqlSession flushStatements

Introduction

In this page you can find the example usage for org.apache.ibatis.session SqlSession flushStatements.

Prototype

List<BatchResult> flushStatements();

Source Link

Document

Flushes batch statements.

Usage

From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionServiceImpl.java

License:Apache License

public int insertBatchSessionDay(List<SessionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//from  w  ww. java 2 s. c  om

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    SessionDao dao = session.getMapper(SessionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<SessionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                SessionAll insert = it.next();
                dao.insertSessionDay(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionServiceImpl.java

License:Apache License

public int deleteSessionDayByDate(String year, String month, String day) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    SessionDao dao = session.getMapper(SessionDao.class);

    boolean has_error = false;

    try {/*from   w  w  w.ja v  a2 s  .c  o m*/
        dao.deleteSessionDayByKey(year, month, day, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionServiceImpl.java

License:Apache License

public int insertBatchSessionWeek(List<SessionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//  www .  ja  va2  s.  com

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    SessionDao dao = session.getMapper(SessionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<SessionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                SessionAll insert = it.next();
                dao.insertSessionWeek(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionServiceImpl.java

License:Apache License

public int deleteSessionWeekByDate(String year, String week) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    SessionDao dao = session.getMapper(SessionDao.class);

    boolean has_error = false;

    try {//  w  w w.  ja v a  2  s  .c  o  m
        dao.deleteSessionWeekByKey(year, week, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionServiceImpl.java

License:Apache License

public int insertBatchSessionMonth(List<SessionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//  w ww . j av  a2s .c  om

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    SessionDao dao = session.getMapper(SessionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<SessionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                SessionAll insert = it.next();
                dao.insertSessionMonth(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionServiceImpl.java

License:Apache License

public int deleteSessionMonthByDate(String year, String month) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    SessionDao dao = session.getMapper(SessionDao.class);

    boolean has_error = false;

    try {/*  w  w w .  j a  v  a2 s.c  om*/
        dao.deleteSessionMonthByKey(year, month, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.TimeServiceImpl.java

License:Apache License

public int insertBatchTimeDay(List<TimeAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//from  w  ww.ja  v  a 2 s. c o m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    TimeDao dao = session.getMapper(TimeDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<TimeAll> it = in_volist.iterator();

            while (it.hasNext()) {
                TimeAll insert = it.next();
                dao.insertTimeDay(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.TimeServiceImpl.java

License:Apache License

public int deleteTimeDayByDate(String year, String month, String day) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    TimeDao dao = session.getMapper(TimeDao.class);

    boolean has_error = false;

    try {/*from w  w w  . ja va  2s.c  o  m*/
        dao.deleteTimeDayByKey(year, month, day, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.TimeServiceImpl.java

License:Apache License

public int insertBatchTimeWeek(List<TimeAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/* www  . j  a va 2  s.c  om*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    TimeDao dao = session.getMapper(TimeDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<TimeAll> it = in_volist.iterator();

            while (it.hasNext()) {
                TimeAll insert = it.next();
                dao.insertTimeWeek(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.TimeServiceImpl.java

License:Apache License

public int deleteTimeWeekByDate(String year, String week) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    TimeDao dao = session.getMapper(TimeDao.class);

    boolean has_error = false;

    try {//from   w w  w .  j  ava 2s.c o  m
        dao.deleteTimeWeekByKey(year, week, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}