List of usage examples for org.apache.ibatis.session SqlSession flushStatements
List<BatchResult> flushStatements();
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentSessionServiceImpl.java
License:Apache License
public int insertBatchComponentSessionWeek(List<CompoSessionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from www . j a va2s.c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentSessionDao dao = session.getMapper(ComponentSessionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoSessionAll> it = in_volist.iterator(); while (it.hasNext()) { CompoSessionAll insert = it.next(); dao.insertCompoSessionWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentSessionServiceImpl.java
License:Apache License
public int deleteComponentSessionWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentSessionDao dao = session.getMapper(ComponentSessionDao.class); boolean has_error = false; try {//from w w w. java 2 s. co m dao.deleteCompoSessionWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentSessionServiceImpl.java
License:Apache License
public int insertBatchComponentSessionMonth(List<CompoSessionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w w w . ja va 2s . c o m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentSessionDao dao = session.getMapper(ComponentSessionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoSessionAll> it = in_volist.iterator(); while (it.hasNext()) { CompoSessionAll insert = it.next(); dao.insertCompoSessionMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentSessionServiceImpl.java
License:Apache License
public int deleteComponentSessionMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentSessionDao dao = session.getMapper(ComponentSessionDao.class); boolean has_error = false; try {// ww w. ja v a 2 s . c o m dao.deleteCompoSessionMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentTimeServiceImpl.java
License:Apache License
public int insertBatchComponentTimeDay(List<CompoTimeAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w ww .j ava 2s . co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentTimeDao dao = session.getMapper(ComponentTimeDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoTimeAll> it = in_volist.iterator(); while (it.hasNext()) { CompoTimeAll insert = it.next(); dao.insertCompoTimeDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentTimeServiceImpl.java
License:Apache License
public int deleteComponentTimeDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentTimeDao dao = session.getMapper(ComponentTimeDao.class); boolean has_error = false; try {/*from w w w. ja v a2s .com*/ dao.deleteCompoTimeDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentTimeServiceImpl.java
License:Apache License
public int insertBatchComponentTimeWeek(List<CompoTimeAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/* w w w . j a v a2s . co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentTimeDao dao = session.getMapper(ComponentTimeDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoTimeAll> it = in_volist.iterator(); while (it.hasNext()) { CompoTimeAll insert = it.next(); dao.insertCompoTimeWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentTimeServiceImpl.java
License:Apache License
public int deleteComponentTimeWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentTimeDao dao = session.getMapper(ComponentTimeDao.class); boolean has_error = false; try {//from w ww. java 2 s . co m dao.deleteCompoTimeWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentTimeServiceImpl.java
License:Apache License
public int insertBatchComponentTimeMonth(List<CompoTimeAll> in_volist) throws Exception { if (in_volist == null) { return 0; }// ww w.j av a 2s . co m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentTimeDao dao = session.getMapper(ComponentTimeDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoTimeAll> it = in_volist.iterator(); while (it.hasNext()) { CompoTimeAll insert = it.next(); dao.insertCompoTimeMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentTimeServiceImpl.java
License:Apache License
public int deleteComponentTimeMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentTimeDao dao = session.getMapper(ComponentTimeDao.class); boolean has_error = false; try {//from w ww . j ava 2 s .c om dao.deleteCompoTimeMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }