List of usage examples for org.apache.ibatis.session SqlSession flushStatements
List<BatchResult> flushStatements();
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthServiceImpl.java
License:Apache License
public int insertBatchSessionlengthHour(List<SessionlengthAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w w w . j a v a2 s .c om SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthDao dao = session.getMapper(SessionlengthDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionlengthAll> it = in_volist.iterator(); while (it.hasNext()) { SessionlengthAll insert = it.next(); dao.insertSessionlengthHour(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthServiceImpl.java
License:Apache License
public int deleteSessionlengthHourByDate(String year, String month, String day, String hour) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthDao dao = session.getMapper(SessionlengthDao.class); boolean has_error = false; try {/*from w w w . j a v a 2 s. c om*/ dao.deleteSessionlengthHourByKey(year, month, day, hour, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthServiceImpl.java
License:Apache License
public int insertBatchSessionlengthDay(List<SessionlengthAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/* w ww . java2 s . c o m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthDao dao = session.getMapper(SessionlengthDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionlengthAll> it = in_volist.iterator(); while (it.hasNext()) { SessionlengthAll insert = it.next(); dao.insertSessionlengthDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthServiceImpl.java
License:Apache License
public int deleteSessionlengthDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthDao dao = session.getMapper(SessionlengthDao.class); boolean has_error = false; try {/*from w w w . j av a2 s . com*/ dao.deleteSessionlengthDayByKey(year, month, day, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthServiceImpl.java
License:Apache License
public int insertBatchSessionlengthWeek(List<SessionlengthAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from w w w . jav a2 s. c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthDao dao = session.getMapper(SessionlengthDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionlengthAll> it = in_volist.iterator(); while (it.hasNext()) { SessionlengthAll insert = it.next(); dao.insertSessionlengthWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthServiceImpl.java
License:Apache License
public int deleteSessionlengthWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthDao dao = session.getMapper(SessionlengthDao.class); boolean has_error = false; try {/*from ww w .j av a 2 s . c o m*/ dao.deleteSessionlengthWeekByKey(year, week, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthServiceImpl.java
License:Apache License
public int insertBatchSessionlengthMonth(List<SessionlengthAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/* ww w. ja v a 2 s. c o m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthDao dao = session.getMapper(SessionlengthDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionlengthAll> it = in_volist.iterator(); while (it.hasNext()) { SessionlengthAll insert = it.next(); dao.insertSessionlengthMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionlengthServiceImpl.java
License:Apache License
public int deleteSessionlengthMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionlengthDao dao = session.getMapper(SessionlengthDao.class); boolean has_error = false; try {// w w w . j a v a2 s.co m dao.deleteSessionlengthMonthByKey(year, month, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionServiceImpl.java
License:Apache License
public int insertBatchSessionHour(List<SessionAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/*from w w w .ja v a2s.c o m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionDao dao = session.getMapper(SessionDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<SessionAll> it = in_volist.iterator(); while (it.hasNext()) { SessionAll insert = it.next(); dao.insertSessionHour(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.SessionServiceImpl.java
License:Apache License
public int deleteSessionHourByDate(String year, String month, String day, String hour) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); SessionDao dao = session.getMapper(SessionDao.class); boolean has_error = false; try {/*w w w .j av a 2s . c o m*/ dao.deleteSessionHourByKey(year, month, day, hour, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }