List of usage examples for org.apache.ibatis.session SqlSession flushStatements
List<BatchResult> flushStatements();
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthSectionServiceImpl.java
License:Apache License
public int insertBatchCountrySessionlengthSectionDay(List<CountrySessionlengthSectionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w ww .j a va 2 s. c om SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthSectionDao dao = session.getMapper(CountrySessionlengthSectionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountrySessionlengthSectionAll> it = in_volist.iterator(); while (it.hasNext()) { CountrySessionlengthSectionAll insert = it.next(); dao.insertCountrySessionlengthSectionDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthSectionServiceImpl.java
License:Apache License
public int deleteCountrySessionlengthSectionDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthSectionDao dao = session.getMapper(CountrySessionlengthSectionDao.class); boolean has_error = false; try {/* w ww. j av a2 s . c o m*/ dao.deleteCountrySessionlengthSectionDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthSectionServiceImpl.java
License:Apache License
public int insertBatchCountrySessionlengthSectionWeek(List<CountrySessionlengthSectionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from ww w . j av a 2s.com SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthSectionDao dao = session.getMapper(CountrySessionlengthSectionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountrySessionlengthSectionAll> it = in_volist.iterator(); while (it.hasNext()) { CountrySessionlengthSectionAll insert = it.next(); dao.insertCountrySessionlengthSectionWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthSectionServiceImpl.java
License:Apache License
public int deleteCountrySessionlengthSectionWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthSectionDao dao = session.getMapper(CountrySessionlengthSectionDao.class); boolean has_error = false; try {/*from w ww .ja va 2s . c o m*/ dao.deleteCountrySessionlengthSectionWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthSectionServiceImpl.java
License:Apache License
public int insertBatchCountrySessionlengthSectionMonth(List<CountrySessionlengthSectionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from www. j a va 2 s . c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthSectionDao dao = session.getMapper(CountrySessionlengthSectionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountrySessionlengthSectionAll> it = in_volist.iterator(); while (it.hasNext()) { CountrySessionlengthSectionAll insert = it.next(); dao.insertCountrySessionlengthSectionMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthSectionServiceImpl.java
License:Apache License
public int deleteCountrySessionlengthSectionMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthSectionDao dao = session.getMapper(CountrySessionlengthSectionDao.class); boolean has_error = false; try {//w w w . j a v a 2 s.co m dao.deleteCountrySessionlengthSectionMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthServiceImpl.java
License:Apache License
public int insertBatchCountrySessionlengthDay(List<CountrySessionlengthAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w ww. j a v a 2s.com SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthDao dao = session.getMapper(CountrySessionlengthDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountrySessionlengthAll> it = in_volist.iterator(); while (it.hasNext()) { CountrySessionlengthAll insert = it.next(); dao.insertCountrySessionlengthDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthServiceImpl.java
License:Apache License
public int deleteCountrySessionlengthDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthDao dao = session.getMapper(CountrySessionlengthDao.class); boolean has_error = false; try {/*from w ww . j av a 2 s . c o m*/ dao.deleteCountrySessionlengthDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthServiceImpl.java
License:Apache License
public int insertBatchCountrySessionlengthWeek(List<CountrySessionlengthAll> in_volist) throws Exception { if (in_volist == null) { return 0; }// ww w.j ava2 s.c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthDao dao = session.getMapper(CountrySessionlengthDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountrySessionlengthAll> it = in_volist.iterator(); while (it.hasNext()) { CountrySessionlengthAll insert = it.next(); dao.insertCountrySessionlengthWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountrySessionlengthServiceImpl.java
License:Apache License
public int deleteCountrySessionlengthWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountrySessionlengthDao dao = session.getMapper(CountrySessionlengthDao.class); boolean has_error = false; try {/*from ww w .ja va 2 s . c o m*/ dao.deleteCountrySessionlengthWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }