Example usage for org.apache.ibatis.session SqlSession flushStatements

List of usage examples for org.apache.ibatis.session SqlSession flushStatements

Introduction

In this page you can find the example usage for org.apache.ibatis.session SqlSession flushStatements.

Prototype

List<BatchResult> flushStatements();

Source Link

Document

Flushes batch statements.

Usage

From source file:ph.fingra.hadoop.dbms.parts.performance.service.NewuserServiceImpl.java

License:Apache License

public int insertBatchNewuserWeek(List<NewuserAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }// ww w  . ja  va2  s  .c o  m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    NewuserDao dao = session.getMapper(NewuserDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<NewuserAll> it = in_volist.iterator();

            while (it.hasNext()) {
                NewuserAll insert = it.next();
                dao.insertNewuserWeek(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.NewuserServiceImpl.java

License:Apache License

public int deleteNewuserWeekByDate(String year, String week) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    NewuserDao dao = session.getMapper(NewuserDao.class);

    boolean has_error = false;

    try {//  www .  jav  a 2s.co  m
        dao.deleteNewuserWeekByKey(year, week, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.NewuserServiceImpl.java

License:Apache License

public int insertBatchNewuserMonth(List<NewuserAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/* w  w  w. j a va  2s . co m*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    NewuserDao dao = session.getMapper(NewuserDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<NewuserAll> it = in_volist.iterator();

            while (it.hasNext()) {
                NewuserAll insert = it.next();
                dao.insertNewuserMonth(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.NewuserServiceImpl.java

License:Apache License

public int deleteNewuserMonthByDate(String year, String month) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    NewuserDao dao = session.getMapper(NewuserDao.class);

    boolean has_error = false;

    try {/*  w  w w  .jav a2  s . co  m*/
        dao.deleteNewuserMonthByKey(year, month, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.PageviewServiceImpl.java

License:Apache License

public int insertBatchPageviewHour(List<PageviewAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//from www. ja va  2 s  . c o m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    PageviewDao dao = session.getMapper(PageviewDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<PageviewAll> it = in_volist.iterator();

            while (it.hasNext()) {
                PageviewAll insert = it.next();
                dao.insertPageviewHour(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.PageviewServiceImpl.java

License:Apache License

public int deletePageviewHourByDate(String year, String month, String day, String hour) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    PageviewDao dao = session.getMapper(PageviewDao.class);

    boolean has_error = false;

    try {/*w w w .j a  va  2 s .co  m*/
        dao.deletePageviewHourByKey(year, month, day, hour, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.PageviewServiceImpl.java

License:Apache License

public int insertBatchPageviewDay(List<PageviewAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//from   w  w w  .j a v a 2  s  .  co  m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    PageviewDao dao = session.getMapper(PageviewDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<PageviewAll> it = in_volist.iterator();

            while (it.hasNext()) {
                PageviewAll insert = it.next();
                dao.insertPageviewDay(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.PageviewServiceImpl.java

License:Apache License

public int deletePageviewDayByDate(String year, String month, String day) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    PageviewDao dao = session.getMapper(PageviewDao.class);

    boolean has_error = false;

    try {//from   w  ww .  ja va 2s  .c o m
        dao.deletePageviewDayByKey(year, month, day, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.PageviewServiceImpl.java

License:Apache License

public int insertBatchPageviewWeek(List<PageviewAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*w w  w .j  av a  2 s . com*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    PageviewDao dao = session.getMapper(PageviewDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<PageviewAll> it = in_volist.iterator();

            while (it.hasNext()) {
                PageviewAll insert = it.next();
                dao.insertPageviewWeek(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.PageviewServiceImpl.java

License:Apache License

public int deletePageviewWeekByDate(String year, String week) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    PageviewDao dao = session.getMapper(PageviewDao.class);

    boolean has_error = false;

    try {/*from  w ww . j a  v  a2 s.  co  m*/
        dao.deletePageviewWeekByKey(year, week, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}