List of usage examples for org.apache.ibatis.session SqlSession flushStatements
List<BatchResult> flushStatements();
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentNewuserServiceImpl.java
License:Apache License
public int insertBatchComponentNewuserDay(List<CompoNewuserAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w w w .j a va 2 s . c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentNewuserDao dao = session.getMapper(ComponentNewuserDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoNewuserAll> it = in_volist.iterator(); while (it.hasNext()) { CompoNewuserAll insert = it.next(); dao.insertCompoNewuserDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentNewuserServiceImpl.java
License:Apache License
public int deleteComponentNewuserDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentNewuserDao dao = session.getMapper(ComponentNewuserDao.class); boolean has_error = false; try {//from w ww .j av a 2s . com dao.deleteCompoNewuserDayByKey(year, month, day, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentNewuserServiceImpl.java
License:Apache License
public int insertBatchComponentNewuserWeek(List<CompoNewuserAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w w w. j a va 2s .co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentNewuserDao dao = session.getMapper(ComponentNewuserDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoNewuserAll> it = in_volist.iterator(); while (it.hasNext()) { CompoNewuserAll insert = it.next(); dao.insertCompoNewuserWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentNewuserServiceImpl.java
License:Apache License
public int deleteComponentNewuserWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentNewuserDao dao = session.getMapper(ComponentNewuserDao.class); boolean has_error = false; try {// w ww. ja va 2 s . c o m dao.deleteCompoNewuserWeekByKey(year, week, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentNewuserServiceImpl.java
License:Apache License
public int insertBatchComponentNewuserMonth(List<CompoNewuserAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from ww w .ja v a 2 s . c o m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentNewuserDao dao = session.getMapper(ComponentNewuserDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoNewuserAll> it = in_volist.iterator(); while (it.hasNext()) { CompoNewuserAll insert = it.next(); dao.insertCompoNewuserMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentNewuserServiceImpl.java
License:Apache License
public int deleteComponentNewuserMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentNewuserDao dao = session.getMapper(ComponentNewuserDao.class); boolean has_error = false; try {/*from w w w . j av a 2s . c o m*/ dao.deleteCompoNewuserMonthByKey(year, month, "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentOsversionServiceImpl.java
License:Apache License
public int insertBatchComponentOsversionDay(List<CompoOsversionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }// www . ja va2 s . c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentOsversionDao dao = session.getMapper(ComponentOsversionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoOsversionAll> it = in_volist.iterator(); while (it.hasNext()) { CompoOsversionAll insert = it.next(); dao.insertCompoOsversionDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentOsversionServiceImpl.java
License:Apache License
public int deleteComponentOsversionDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentOsversionDao dao = session.getMapper(ComponentOsversionDao.class); boolean has_error = false; try {/*from w ww .jav a2 s .c o m*/ dao.deleteCompoOsversionDayByKey(year, month, day, "", "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentOsversionServiceImpl.java
License:Apache License
public int insertBatchComponentOsversionWeek(List<CompoOsversionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w ww .ja v a 2 s . com*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentOsversionDao dao = session.getMapper(ComponentOsversionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<CompoOsversionAll> it = in_volist.iterator(); while (it.hasNext()) { CompoOsversionAll insert = it.next(); dao.insertCompoOsversionWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.component.service.ComponentOsversionServiceImpl.java
License:Apache License
public int deleteComponentOsversionWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); ComponentOsversionDao dao = session.getMapper(ComponentOsversionDao.class); boolean has_error = false; try {/* w w w . j ava 2 s . c o m*/ dao.deleteCompoOsversionWeekByKey(year, week, "", "", ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }