List of usage examples for org.apache.ibatis.session SqlSession flushStatements
List<BatchResult> flushStatements();
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryPageviewServiceImpl.java
License:Apache License
public int insertBatchCountryPageviewWeek(List<CountryPageviewAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w w w . ja va2 s .co m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryPageviewDao dao = session.getMapper(CountryPageviewDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryPageviewAll> it = in_volist.iterator(); while (it.hasNext()) { CountryPageviewAll insert = it.next(); dao.insertCountryPageviewWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryPageviewServiceImpl.java
License:Apache License
public int deleteCountryPageviewWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryPageviewDao dao = session.getMapper(CountryPageviewDao.class); boolean has_error = false; try {// ww w.j a va 2 s . c om dao.deleteCountryPageviewWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryPageviewServiceImpl.java
License:Apache License
public int insertBatchCountryPageviewMonth(List<CountryPageviewAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w ww.ja v a2 s .c om SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryPageviewDao dao = session.getMapper(CountryPageviewDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryPageviewAll> it = in_volist.iterator(); while (it.hasNext()) { CountryPageviewAll insert = it.next(); dao.insertCountryPageviewMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryPageviewServiceImpl.java
License:Apache License
public int deleteCountryPageviewMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryPageviewDao dao = session.getMapper(CountryPageviewDao.class); boolean has_error = false; try {/*from www . j ava2s.c om*/ dao.deleteCountryPageviewMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryServiceImpl.java
License:Apache License
public int insertBatchCountryDay(List<CountryAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w w w .j av a 2 s. c om SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryDao dao = session.getMapper(CountryDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryAll> it = in_volist.iterator(); while (it.hasNext()) { CountryAll insert = it.next(); dao.insertCountryDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryServiceImpl.java
License:Apache License
public int deleteCountryDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryDao dao = session.getMapper(CountryDao.class); boolean has_error = false; try {// w ww . j ava 2 s . c o m dao.deleteCountryDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryServiceImpl.java
License:Apache License
public int insertBatchCountryWeek(List<CountryAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from ww w. j a v a 2 s. co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryDao dao = session.getMapper(CountryDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryAll> it = in_volist.iterator(); while (it.hasNext()) { CountryAll insert = it.next(); dao.insertCountryWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryServiceImpl.java
License:Apache License
public int deleteCountryWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryDao dao = session.getMapper(CountryDao.class); boolean has_error = false; try {// w ww . j a v a 2s. co m dao.deleteCountryWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryServiceImpl.java
License:Apache License
public int insertBatchCountryMonth(List<CountryAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from www. jav a 2 s.c om SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryDao dao = session.getMapper(CountryDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryAll> it = in_volist.iterator(); while (it.hasNext()) { CountryAll insert = it.next(); dao.insertCountryMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryServiceImpl.java
License:Apache License
public int deleteCountryMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryDao dao = session.getMapper(CountryDao.class); boolean has_error = false; try {// w w w .ja v a 2s .c om dao.deleteCountryMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }