List of usage examples for org.apache.ibatis.session SqlSession flushStatements
List<BatchResult> flushStatements();
From source file:ph.fingra.hadoop.dbms.parts.performance.service.FrequencyServiceImpl.java
License:Apache License
public int insertBatchFrequencyDay(List<FrequencyAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/* w w w . ja v a 2 s .c o m*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); FrequencyDao dao = session.getMapper(FrequencyDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<FrequencyAll> it = in_volist.iterator(); while (it.hasNext()) { FrequencyAll insert = it.next(); dao.insertFrequencyDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.FrequencyServiceImpl.java
License:Apache License
public int deleteFrequencyDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); FrequencyDao dao = session.getMapper(FrequencyDao.class); boolean has_error = false; try {//from ww w . java 2s .co m dao.deleteFrequencyDayByKey(year, month, day, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.FrequencyServiceImpl.java
License:Apache License
public int insertBatchFrequencyWeek(List<FrequencyAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from www .j ava2s. c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); FrequencyDao dao = session.getMapper(FrequencyDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<FrequencyAll> it = in_volist.iterator(); while (it.hasNext()) { FrequencyAll insert = it.next(); dao.insertFrequencyWeek(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.FrequencyServiceImpl.java
License:Apache License
public int deleteFrequencyWeekByDate(String year, String week) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); FrequencyDao dao = session.getMapper(FrequencyDao.class); boolean has_error = false; try {/* w ww. j a v a 2 s . com*/ dao.deleteFrequencyWeekByKey(year, week, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.FrequencyServiceImpl.java
License:Apache License
public int insertBatchFrequencyMonth(List<FrequencyAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//from ww w . j a v a 2s . c o m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); FrequencyDao dao = session.getMapper(FrequencyDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<FrequencyAll> it = in_volist.iterator(); while (it.hasNext()) { FrequencyAll insert = it.next(); dao.insertFrequencyMonth(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.FrequencyServiceImpl.java
License:Apache License
public int deleteFrequencyMonthByDate(String year, String month) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); FrequencyDao dao = session.getMapper(FrequencyDao.class); boolean has_error = false; try {//w w w . j ava 2 s.co m dao.deleteFrequencyMonthByKey(year, month, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.NewuserServiceImpl.java
License:Apache License
public int insertBatchNewuserHour(List<NewuserAll> in_volist) throws Exception { if (in_volist == null) { return 0; }/* w w w. j av a 2s . com*/ SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); NewuserDao dao = session.getMapper(NewuserDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<NewuserAll> it = in_volist.iterator(); while (it.hasNext()) { NewuserAll insert = it.next(); dao.insertNewuserHour(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.NewuserServiceImpl.java
License:Apache License
public int deleteNewuserHourByDate(String year, String month, String day, String hour) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); NewuserDao dao = session.getMapper(NewuserDao.class); boolean has_error = false; try {/* ww w .j ava2 s . c om*/ dao.deleteNewuserHourByKey(year, month, day, hour, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.NewuserServiceImpl.java
License:Apache License
public int insertBatchNewuserDay(List<NewuserAll> in_volist) throws Exception { if (in_volist == null) { return 0; }//w w w. j a va 2s .co m SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); NewuserDao dao = session.getMapper(NewuserDao.class); boolean has_error = false; try { if (in_volist != null) { Iterator<NewuserAll> it = in_volist.iterator(); while (it.hasNext()) { NewuserAll insert = it.next(); dao.insertNewuserDay(insert); } } List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }
From source file:ph.fingra.hadoop.dbms.parts.performance.service.NewuserServiceImpl.java
License:Apache License
public int deleteNewuserDayByDate(String year, String month, String day) throws Exception { SqlSession session = ConnectionFactory.getSession().openSession(ExecutorType.BATCH, false); NewuserDao dao = session.getMapper(NewuserDao.class); boolean has_error = false; try {//from w ww . j a va 2 s.co m dao.deleteNewuserDayByKey(year, month, day, ""); List<BatchResult> results = session.flushStatements(); results.clear(); } catch (Exception e) { has_error = true; session.rollback(); session.close(); throw e; } finally { if (has_error == false) session.commit(); session.close(); } return (has_error == false) ? 1 : 0; }