Example usage for org.apache.ibatis.session SqlSession flushStatements

List of usage examples for org.apache.ibatis.session SqlSession flushStatements

Introduction

In this page you can find the example usage for org.apache.ibatis.session SqlSession flushStatements.

Prototype

List<BatchResult> flushStatements();

Source Link

Document

Flushes batch statements.

Usage

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java

License:Apache License

public int insertBatchOsversionMonth(List<OsversionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*from  w  w  w  .ja  va 2  s  .  c  om*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    OsversionDao dao = session.getMapper(OsversionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<OsversionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                OsversionAll insert = it.next();
                dao.insertOsversionMonth(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java

License:Apache License

public int deleteOsversionMonthByDate(String year, String month) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    OsversionDao dao = session.getMapper(OsversionDao.class);

    boolean has_error = false;

    try {/*from   w  w w  .j  a  va  2s .  c  o  m*/
        dao.deleteOsversionMonthByKey(year, month, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.ResolutionServiceImpl.java

License:Apache License

public int insertBatchResolutionDay(List<ResolutionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//from  w  w  w .  ja  v  a  2  s .co  m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    ResolutionDao dao = session.getMapper(ResolutionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<ResolutionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                ResolutionAll insert = it.next();
                dao.insertResolutionDay(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.ResolutionServiceImpl.java

License:Apache License

public int deleteResolutionDayByDate(String year, String month, String day) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    ResolutionDao dao = session.getMapper(ResolutionDao.class);

    boolean has_error = false;

    try {/*ww w  .j a va2s .  c o m*/
        dao.deleteResolutionDayByKey(year, month, day, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.ResolutionServiceImpl.java

License:Apache License

public int insertBatchResolutionWeek(List<ResolutionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*w  w  w. j a  va2s .c  o m*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    ResolutionDao dao = session.getMapper(ResolutionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<ResolutionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                ResolutionAll insert = it.next();
                dao.insertResolutionWeek(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.ResolutionServiceImpl.java

License:Apache License

public int deleteResolutionWeekByDate(String year, String week) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    ResolutionDao dao = session.getMapper(ResolutionDao.class);

    boolean has_error = false;

    try {// ww  w  .  j a  v a  2s .  co m
        dao.deleteResolutionWeekByKey(year, week, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.ResolutionServiceImpl.java

License:Apache License

public int insertBatchResolutionMonth(List<ResolutionAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }/*from   w ww . ja v  a2  s.  co  m*/

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    ResolutionDao dao = session.getMapper(ResolutionDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<ResolutionAll> it = in_volist.iterator();

            while (it.hasNext()) {
                ResolutionAll insert = it.next();
                dao.insertResolutionMonth(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.distribution.service.ResolutionServiceImpl.java

License:Apache License

public int deleteResolutionMonthByDate(String year, String month) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    ResolutionDao dao = session.getMapper(ResolutionDao.class);

    boolean has_error = false;

    try {//from   www . ja v  a 2 s . com
        dao.deleteResolutionMonthByKey(year, month, "", "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.FrequencyServiceImpl.java

License:Apache License

public int insertBatchFrequencyHour(List<FrequencyAll> in_volist) throws Exception {

    if (in_volist == null) {
        return 0;
    }//  www  . j a  v a 2  s.c  o  m

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    FrequencyDao dao = session.getMapper(FrequencyDao.class);

    boolean has_error = false;

    try {

        if (in_volist != null) {

            Iterator<FrequencyAll> it = in_volist.iterator();

            while (it.hasNext()) {
                FrequencyAll insert = it.next();
                dao.insertFrequencyHour(insert);
            }
        }

        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}

From source file:ph.fingra.hadoop.dbms.parts.performance.service.FrequencyServiceImpl.java

License:Apache License

public int deleteFrequencyHourByDate(String year, String month, String day, String hour) throws Exception {

    SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false);
    FrequencyDao dao = session.getMapper(FrequencyDao.class);

    boolean has_error = false;

    try {/*from  w ww  .j av a  2s . c  o  m*/
        dao.deleteFrequencyHourByKey(year, month, day, hour, "");
        List<BatchResult> results = session.flushStatements();
        results.clear();
    } catch (Exception e) {
        has_error = true;
        session.rollback();
        session.close();
        throw e;
    } finally {
        if (has_error == false)
            session.commit();
        session.close();
    }

    return (has_error == false) ? 1 : 0;
}