List of usage examples for org.apache.ibatis.session SqlSession rollback
void rollback();
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java
License:Apache License
public int deleteLanguageDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); LanguageDao dao = session.getMapper(LanguageDao.class); boolean has_error = false; try {// ww w .ja v a2 s . co m dao.deleteLanguageDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java
License:Apache License
public int insertBatchLanguageWeek(List<LanguageAll> in_volist) throws Exception { if (in_volist == null) { return 0; }// w w w . ja va 2 s. c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); LanguageDao dao = session.getMapper(LanguageDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<LanguageAll> it = in_volist.iterator(); while (it.hasNext()) { LanguageAll insert = it.next(); dao.insertLanguageWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java
License:Apache License
public int deleteLanguageWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); LanguageDao dao = session.getMapper(LanguageDao.class); boolean has_error = false; try {//from w w w.j a v a 2 s . c om dao.deleteLanguageWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java
License:Apache License
public int insertBatchLanguageMonth(List<LanguageAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w w w .j a v a2 s . c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); LanguageDao dao = session.getMapper(LanguageDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<LanguageAll> it = in_volist.iterator(); while (it.hasNext()) { LanguageAll insert = it.next(); dao.insertLanguageMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.LanguageServiceImpl.java
License:Apache License
public int deleteLanguageMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); LanguageDao dao = session.getMapper(LanguageDao.class); boolean has_error = false; try {/*from w w w .j a v a2s . com*/ dao.deleteLanguageMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java
License:Apache License
public int insertBatchOsversionDay(List<OsversionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/* ww w .j av a 2 s. c o m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); OsversionDao dao = session.getMapper(OsversionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<OsversionAll> it = in_volist.iterator(); while (it.hasNext()) { OsversionAll insert = it.next(); dao.insertOsversionDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java
License:Apache License
public int deleteOsversionDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); OsversionDao dao = session.getMapper(OsversionDao.class); boolean has_error = false; try {// w ww. j av a 2s . c o m dao.deleteOsversionDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java
License:Apache License
public int insertBatchOsversionWeek(List<OsversionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*w ww . j ava 2 s . com*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); OsversionDao dao = session.getMapper(OsversionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<OsversionAll> it = in_volist.iterator(); while (it.hasNext()) { OsversionAll insert = it.next(); dao.insertOsversionWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java
License:Apache License
public int deleteOsversionWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); OsversionDao dao = session.getMapper(OsversionDao.class); boolean has_error = false; try {/*from w w w . j ava 2 s. co m*/ dao.deleteOsversionWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.distribution.service.OsversionServiceImpl.java
License:Apache License
public int insertBatchOsversionMonth(List<OsversionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w ww . j ava 2s .c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); OsversionDao dao = session.getMapper(OsversionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<OsversionAll> it = in_volist.iterator(); while (it.hasNext()) { OsversionAll insert = it.next(); dao.insertOsversionMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }