Example usage for org.apache.ibatis.session SqlSession flushStatements

List of usage examples for org.apache.ibatis.session SqlSession flushStatements

Introduction

In this page you can find the example usage for org.apache.ibatis.session SqlSession flushStatements.

Prototype

List<BatchResult> flushStatements();

Source Link

Document

Flushes batch statements.

Usage

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java

License:Apache License

public int insertBatchLanguageDay(List<LanguageAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*from   w  w  w  . j  a  v a2s. c  o  m*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    LanguageDao dao = session.getMapper(LanguageDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<LanguageAll> it = in_volist.iterator();

            while (it.hasNext()) {
                LanguageAll insert = it.next();
                dao.insertLanguageDay(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java

License:Apache License

public int deleteLanguageDayByDate(String year, String month, String day) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    LanguageDao dao = session.getMapper(LanguageDao.class);

    boolean has_error = false;

    try {/*w  ww .  jav  a2s.c o  m*/
        dao.deleteLanguageDayByKey(year, month, day, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java

License:Apache License

public int insertBatchLanguageWeek(List<LanguageAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*from   w w w.  j  ava 2s  .  c  om*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    LanguageDao dao = session.getMapper(LanguageDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<LanguageAll> it = in_volist.iterator();

            while (it.hasNext()) {
                LanguageAll insert = it.next();
                dao.insertLanguageWeek(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java

License:Apache License

public int deleteLanguageWeekByDate(String year, String week) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    LanguageDao dao = session.getMapper(LanguageDao.class);

    boolean has_error = false;

    try {//  www . j a v a  2  s  .c om
        dao.deleteLanguageWeekByKey(year, week, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java

License:Apache License

public int insertBatchLanguageMonth(List<LanguageAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//from w  w  w  .j a va 2s.  c o m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    LanguageDao dao = session.getMapper(LanguageDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<LanguageAll> it = in_volist.iterator();

            while (it.hasNext()) {
                LanguageAll insert = it.next();
                dao.insertLanguageMonth(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java

License:Apache License

public int deleteLanguageMonthByDate(String year, String month) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    LanguageDao dao = session.getMapper(LanguageDao.class);

    boolean has_error = false;

    try {/*  w w w .  ja  v  a2  s. c  o m*/
        dao.deleteLanguageMonthByKey(year, month, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java

License:Apache License

public int insertBatchOsversionDay(List<OsversionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }// w w w  .  j ava  2 s  .com

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    OsversionDao dao = session.getMapper(OsversionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<OsversionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                OsversionAll insert = it.next();
                dao.insertOsversionDay(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java

License:Apache License

public int deleteOsversionDayByDate(String year, String month, String day) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    OsversionDao dao = session.getMapper(OsversionDao.class);

    boolean has_error = false;

    try {//from ww w  .  j a v  a 2s  .c o m
        dao.deleteOsversionDayByKey(year, month, day, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java

License:Apache License

public int insertBatchOsversionWeek(List<OsversionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*from   w ww  . j  a  va  2  s  .  c om*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    OsversionDao dao = session.getMapper(OsversionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<OsversionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                OsversionAll insert = it.next();
                dao.insertOsversionWeek(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java

License:Apache License

public int deleteOsversionWeekByDate(String year, String week) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    OsversionDao dao = session.getMapper(OsversionDao.class);

    boolean has_error = false;

    try {/*  ww w .  ja  v  a 2 s.  co m*/
        dao.deleteOsversionWeekByKey(year, week, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}