List of usage examples for org.apache.ibatis.session SqlSession flushStatements
List<BatchResult> flushStatements();
From source file:ph.fingra.hadoop.dbms.parts.performance.service.PageviewServiceImpl.java
License:Apache License
public int insertBatchPageviewMonth(List<PageviewAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*w ww. j a v a 2 s. co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); PageviewDao dao = session.getMapper(PageviewDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<PageviewAll> it = in_volist.iterator(); while (it.hasNext()) { PageviewAll insert = it.next(); dao.insertPageviewMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.PageviewServiceImpl.java
License:Apache License
public int deletePageviewMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); PageviewDao dao = session.getMapper(PageviewDao.class); boolean has_error = false; try {/*w ww. jav a 2s . com*/ dao.deletePageviewMonthByKey(year, month, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthSectionServiceImpl.java
License:Apache License
public int insertBatchSessionlengthSectionHour(List<SessionlengthSectionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*ww w . j a va 2 s . co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthSectionDao dao = session.getMapper(SessionlengthSectionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionlengthSectionAll> it = in_volist.iterator(); while (it.hasNext()) { SessionlengthSectionAll insert = it.next(); dao.insertSessionlengthSectionHour(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthSectionServiceImpl.java
License:Apache License
public int deleteSessionlengthSectionHourByDate(String year, String month, String day, String hour) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthSectionDao dao = session.getMapper(SessionlengthSectionDao.class); boolean has_error = false; try {//from w ww.ja v a2 s .c om dao.deleteSessionlengthSectionHourByKey(year, month, day, hour, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthSectionServiceImpl.java
License:Apache License
public int insertBatchSessionlengthSectionDay(List<SessionlengthSectionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w w w. j a v a 2 s .c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthSectionDao dao = session.getMapper(SessionlengthSectionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionlengthSectionAll> it = in_volist.iterator(); while (it.hasNext()) { SessionlengthSectionAll insert = it.next(); dao.insertSessionlengthSectionDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthSectionServiceImpl.java
License:Apache License
public int deleteSessionlengthSectionDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthSectionDao dao = session.getMapper(SessionlengthSectionDao.class); boolean has_error = false; try {//ww w .j a v a 2 s. co m dao.deleteSessionlengthSectionDayByKey(year, month, day, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthSectionServiceImpl.java
License:Apache License
public int insertBatchSessionlengthSectionWeek(List<SessionlengthSectionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*w ww . j a va 2s . co m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthSectionDao dao = session.getMapper(SessionlengthSectionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionlengthSectionAll> it = in_volist.iterator(); while (it.hasNext()) { SessionlengthSectionAll insert = it.next(); dao.insertSessionlengthSectionWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthSectionServiceImpl.java
License:Apache License
public int deleteSessionlengthSectionWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthSectionDao dao = session.getMapper(SessionlengthSectionDao.class); boolean has_error = false; try {/*from w w w . ja v a 2 s . c o m*/ dao.deleteSessionlengthSectionWeekByKey(year, week, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthSectionServiceImpl.java
License:Apache License
public int insertBatchSessionlengthSectionMonth(List<SessionlengthSectionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w ww. ja v a 2s .c om*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthSectionDao dao = session.getMapper(SessionlengthSectionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionlengthSectionAll> it = in_volist.iterator(); while (it.hasNext()) { SessionlengthSectionAll insert = it.next(); dao.insertSessionlengthSectionMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthSectionServiceImpl.java
License:Apache License
public int deleteSessionlengthSectionMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthSectionDao dao = session.getMapper(SessionlengthSectionDao.class); boolean has_error = false; try {/* ww w. ja v a2 s . co m*/ dao.deleteSessionlengthSectionMonthByKey(year, month, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }