List of usage examples for org.apache.ibatis.session SqlSession rollback
void rollback();
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.AppversionServiceImpl.java
License:Apache License
public int deleteAppversionMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); AppversionDao dao = session.getMapper(AppversionDao.class); boolean has_error = false; try {/*from ww w . ja v a2 s.co m*/ dao.deleteAppversionMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryNewuserServiceImpl.java
License:Apache License
public int insertBatchCountryNewuserDay(List<CountryNewuserAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w w w. j a v a 2 s . c om*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryNewuserDao dao = session.getMapper(CountryNewuserDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryNewuserAll> it = in_volist.iterator(); while (it.hasNext()) { CountryNewuserAll insert = it.next(); dao.insertCountryNewuserDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryNewuserServiceImpl.java
License:Apache License
public int deleteCountryNewuserDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryNewuserDao dao = session.getMapper(CountryNewuserDao.class); boolean has_error = false; try {//from ww w . j a v a 2s .c om dao.deleteCountryNewuserDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryNewuserServiceImpl.java
License:Apache License
public int insertBatchCountryNewuserWeek(List<CountryNewuserAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w ww. j a va 2s . c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryNewuserDao dao = session.getMapper(CountryNewuserDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryNewuserAll> it = in_volist.iterator(); while (it.hasNext()) { CountryNewuserAll insert = it.next(); dao.insertCountryNewuserWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryNewuserServiceImpl.java
License:Apache License
public int deleteCountryNewuserWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryNewuserDao dao = session.getMapper(CountryNewuserDao.class); boolean has_error = false; try {/*from w ww .jav a2s .c o m*/ dao.deleteCountryNewuserWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryNewuserServiceImpl.java
License:Apache License
public int insertBatchCountryNewuserMonth(List<CountryNewuserAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w w w . ja v a 2 s . co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryNewuserDao dao = session.getMapper(CountryNewuserDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryNewuserAll> it = in_volist.iterator(); while (it.hasNext()) { CountryNewuserAll insert = it.next(); dao.insertCountryNewuserMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryNewuserServiceImpl.java
License:Apache License
public int deleteCountryNewuserMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryNewuserDao dao = session.getMapper(CountryNewuserDao.class); boolean has_error = false; try {/*from www.j a va2 s . c o m*/ dao.deleteCountryNewuserMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryPageviewServiceImpl.java
License:Apache License
public int insertBatchCountryPageviewDay(List<CountryPageviewAll> in_volist) throws Exception { if (in_volist == null) { return 0; }// w w w . j ava 2s.com SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryPageviewDao dao = session.getMapper(CountryPageviewDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryPageviewAll> it = in_volist.iterator(); while (it.hasNext()) { CountryPageviewAll insert = it.next(); dao.insertCountryPageviewDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryPageviewServiceImpl.java
License:Apache License
public int deleteCountryPageviewDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryPageviewDao dao = session.getMapper(CountryPageviewDao.class); boolean has_error = false; try {/*from w w w .ja va 2s. c o m*/ dao.deleteCountryPageviewDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.CountryPageviewServiceImpl.java
License:Apache License
public int insertBatchCountryPageviewWeek(List<CountryPageviewAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w w w. j av a 2 s. com SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); CountryPageviewDao dao = session.getMapper(CountryPageviewDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CountryPageviewAll> it = in_volist.iterator(); while (it.hasNext()) { CountryPageviewAll insert = it.next(); dao.insertCountryPageviewWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }