List of usage examples for org.apache.ibatis.session SqlSession rollback
void rollback();
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentCountryServiceImpl.java
License:Apache License
public int deleteComponentCountryMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentCountryDao dao = session.getMapper(ComponentCountryDao.class); boolean has_error = false; try {/*from w w w . j a v a 2s . c om*/ dao.deleteCompoCountryMonthByKey(year, month, "", "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentDeviceServiceImpl.java
License:Apache License
public int insertBatchComponentDeviceDay(List<CompoDeviceAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from www .ja va 2s. co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentDeviceDao dao = session.getMapper(ComponentDeviceDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoDeviceAll> it = in_volist.iterator(); while (it.hasNext()) { CompoDeviceAll insert = it.next(); dao.insertCompoDeviceDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentDeviceServiceImpl.java
License:Apache License
public int deleteComponentDeviceDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentDeviceDao dao = session.getMapper(ComponentDeviceDao.class); boolean has_error = false; try {// w ww .j a v a2 s .co m dao.deleteCompoDeviceDayByKey(year, month, day, "", "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentDeviceServiceImpl.java
License:Apache License
public int insertBatchComponentDeviceWeek(List<CompoDeviceAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w ww .j av a2 s .c o m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentDeviceDao dao = session.getMapper(ComponentDeviceDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoDeviceAll> it = in_volist.iterator(); while (it.hasNext()) { CompoDeviceAll insert = it.next(); dao.insertCompoDeviceWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentDeviceServiceImpl.java
License:Apache License
public int deleteComponentDeviceWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentDeviceDao dao = session.getMapper(ComponentDeviceDao.class); boolean has_error = false; try {/*from w w w . j av a 2s . c o m*/ dao.deleteCompoDeviceWeekByKey(year, week, "", "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentDeviceServiceImpl.java
License:Apache License
public int insertBatchComponentDeviceMonth(List<CompoDeviceAll> in_volist) throws Exception { if (in_volist == null) { return 0; }// w w w . j a va 2s . c om SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentDeviceDao dao = session.getMapper(ComponentDeviceDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoDeviceAll> it = in_volist.iterator(); while (it.hasNext()) { CompoDeviceAll insert = it.next(); dao.insertCompoDeviceMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentDeviceServiceImpl.java
License:Apache License
public int deleteComponentDeviceMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentDeviceDao dao = session.getMapper(ComponentDeviceDao.class); boolean has_error = false; try {/*from w w w . j a v a 2 s . co m*/ dao.deleteCompoDeviceMonthByKey(year, month, "", "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentFrequencyServiceImpl.java
License:Apache License
public int insertBatchComponentFrequencyDay(List<CompoFrequencyAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*www . ja v a 2 s . com*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentFrequencyDao dao = session.getMapper(ComponentFrequencyDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoFrequencyAll> it = in_volist.iterator(); while (it.hasNext()) { CompoFrequencyAll insert = it.next(); dao.insertCompoFrequencyDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentFrequencyServiceImpl.java
License:Apache License
public int deleteComponentFrequencyDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentFrequencyDao dao = session.getMapper(ComponentFrequencyDao.class); boolean has_error = false; try {/*from w w w. ja va 2 s.c om*/ dao.deleteCompoFrequencyDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentFrequencyServiceImpl.java
License:Apache License
public int insertBatchComponentFrequencyWeek(List<CompoFrequencyAll> in_volist) throws Exception { if (in_volist == null) { return 0; }// w ww .j a v a 2 s . c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentFrequencyDao dao = session.getMapper(ComponentFrequencyDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoFrequencyAll> it = in_volist.iterator(); while (it.hasNext()) { CompoFrequencyAll insert = it.next(); dao.insertCompoFrequencyWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }