Example usage for org.apache.ibatis.session SqlSession close

List of usage examples for org.apache.ibatis.session SqlSession close

Introduction

In this page you can find the example usage for org.apache.ibatis.session SqlSession close.

Prototype

@Override
void close();

Source Link

Document

Closes the session.

Usage

From source file:com.itfsw.mybatis.generator.plugins.tools.AbstractShellCallback.java

License:Apache License

/**
 * ?/*from  w  w w  .j  a v  a 2  s.  c  om*/
 * @param project
 */
@Override
public void refreshProject(String project) {
    SqlSession sqlSession = null;
    try {
        // 
        sqlSession = tool.compile();
        reloadProject(sqlSession, tool.getTargetClassLoader(), tool.getTargetPackage());
    } catch (Exception e) {
        e.printStackTrace();
        Assert.assertTrue(false);
    } finally {
        sqlSession.close();
    }
}

From source file:com.jwmoon.framework.sql.MyBatisQueryResolver.java

License:Open Source License

public String getQuery(String queryId, Object paramObject) {
    SqlSession session = sqlSessionFactory.openSession();
    String query = resolveQuery(session, queryId, paramObject);
    session.close();
    return query;
}

From source file:com.luxoft.mybatis.splitter.UpdateSplitterPluginTest.java

License:Apache License

public void splitterTest(ExecutorType execType) throws IOException, SQLException {
    SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder()
            .build(Resources.getResourceAsStream("configuration.xml"));
    SqlSession sqlSession = sqlSessionFactory.openSession(execType);
    sqlSession.insert("makeTable");
    sqlSession.flushStatements();/*from   ww w .j a  v  a2  s  .  co  m*/
    doInsert(sqlSession);
    Assert.assertEquals(Arrays.asList("first", "second", "third"), sqlSession.selectList("get"));
    sqlSession.insert("dropTable");
    sqlSession.flushStatements();
    sqlSession.close();
}

From source file:com.luxoft.mybatis.splitter.UpdateSplitterPluginTest.java

License:Apache License

@Test
public void mockTest() throws IOException, SQLException {
    expect(connection.getAutoCommit()).andStubReturn(false);
    expect(connection.prepareStatement("insert into test values(?)")).andReturn(statement);
    statement.setString(1, "first");
    statement.addBatch();/* w  w w .j  a  va2s .  c o  m*/
    statement.setString(1, "second");
    statement.addBatch();
    statement.setString(1, "third");
    statement.addBatch();
    expect(statement.executeBatch()).andStubReturn(new int[] { 1 });
    statement.close();
    connection.setAutoCommit(true);
    connection.rollback();
    connection.close();

    replay();

    SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder()
            .build(Resources.getResourceAsStream("configuration.xml"));
    SqlSession sqlSession = sqlSessionFactory.openSession(ExecutorType.BATCH, connection);
    doInsert(sqlSession);
    sqlSession.close();
}

From source file:com.mirth.connect.plugins.datapruner.DataPruner.java

License:Open Source License

private void pruneEvents() {
    logger.debug("Pruning events");
    status.setPruningEvents(true);/* www. j a v  a2  s.co  m*/

    try {
        status.setTaskStartTime(Calendar.getInstance());

        Calendar dateThreshold = Calendar.getInstance();
        dateThreshold.set(Calendar.DAY_OF_MONTH, dateThreshold.get(Calendar.DAY_OF_MONTH) - maxEventAge);

        SqlSession session = SqlConfig.getSqlSessionManager().openSession(true);

        try {
            Map<String, Object> params = new HashMap<String, Object>();
            params.put("dateThreshold", dateThreshold);

            int numEventsPruned = session.delete("Message.pruneEvents", params);

            Map<String, String> attributes = new HashMap<String, String>();
            attributes.put("Events Pruned", Integer.toString(numEventsPruned));
            attributes.put("Time Elapsed", getTimeElapsed());
            eventController.dispatchEvent(new ServerEvent(serverId, DataPrunerService.PLUGINPOINT,
                    Level.INFORMATION, Outcome.SUCCESS, attributes));
        } finally {
            session.close();
        }
    } finally {
        status.setEndTime(Calendar.getInstance());
        status.setPruningEvents(false);
    }
}

From source file:com.mirth.connect.plugins.datapruner.DataPruner.java

License:Open Source License

private void getIdsToPrune(Map<String, Object> params, Calendar messageDateThreshold, PruneIds messageIds,
        PruneIds contentMessageIds) throws InterruptedException {
    long minMessageId = 0;

    List<Map<String, Object>> maps;
    do {//from w  w w .j a va  2s.  c  o m
        ThreadUtils.checkInterruptedStatus();

        SqlSession session = SqlConfig.getSqlSessionManager().openSession(true);

        try {
            params.put("minMessageId", minMessageId);
            maps = session.selectList("Message.getMessagesToPrune", params);
        } finally {
            session.close();
        }

        for (Map<String, Object> map : maps) {
            long receivedDate = ((Calendar) map.get("mm_received_date")).getTimeInMillis();
            long id = (Long) map.get("id");

            if (messageDateThreshold != null && receivedDate < messageDateThreshold.getTimeInMillis()) {
                messageIds.add(id);
            } else {
                contentMessageIds.add(id);
            }
            minMessageId = id + 1;
        }
    } while (maps != null && maps.size() == ID_RETRIEVE_LIMIT);
}

From source file:com.mirth.connect.plugins.datapruner.DataPruner.java

License:Open Source License

private void archiveAndGetIdsToPrune(Map<String, Object> params, String channelId,
        Calendar messageDateThreshold, String archiveFolder, PruneIds messageIds, PruneIds contentMessageIds)
        throws Throwable {
    String tempChannelFolder = archiveFolder + "/." + channelId;
    String finalChannelFolder = archiveFolder + "/" + channelId;

    try {/*from ww w  . j  a va 2s .  c  o m*/
        MessageWriterOptions messageWriterOptions = SerializationUtils.clone(archiverOptions);
        messageWriterOptions.setBaseFolder(System.getProperty("user.dir"));

        if (messageWriterOptions.getArchiveFormat() == null) {
            messageWriterOptions.setRootFolder(tempChannelFolder);
        } else {
            messageWriterOptions.setRootFolder(archiveFolder);
            messageWriterOptions.setArchiveFileName(channelId);
        }

        logger.debug("Running archiver, channel: " + channelId + ", root folder: "
                + messageWriterOptions.getRootFolder() + ", archive format: "
                + messageWriterOptions.getArchiveFormat() + ", archive filename: "
                + messageWriterOptions.getArchiveFileName() + ", file pattern: "
                + messageWriterOptions.getFilePattern());
        numExported = 0;
        status.setArchiving(true);
        MessageWriter archiver = MessageWriterFactory.getInstance().getMessageWriter(messageWriterOptions,
                ConfigurationController.getInstance().getEncryptor());

        AttachmentSource attachmentSource = null;
        if (messageWriterOptions.includeAttachments()) {
            attachmentSource = new AttachmentSource() {
                @Override
                public List<Attachment> getMessageAttachments(Message message) throws ClientException {
                    return MessageController.getInstance().getMessageAttachment(message.getChannelId(),
                            message.getMessageId());
                }
            };
        }
        long minMessageId = 0;
        try {
            List<Map<String, Object>> maps;
            do {
                ThreadUtils.checkInterruptedStatus();
                SqlSession session = SqlConfig.getSqlSessionManager().openSession(true);

                try {
                    params.put("minMessageId", minMessageId);
                    maps = session.selectList("Message.getMessagesToPrune", params);
                } finally {
                    session.close();
                }

                List<Long> archiveMessageIds = new ArrayList<Long>();
                Iterator<Map<String, Object>> iterator = maps.iterator();
                while (iterator.hasNext()) {
                    Map<String, Object> map = iterator.next();

                    long receivedDate = ((Calendar) map.get("mm_received_date")).getTimeInMillis();
                    long id = (Long) map.get("id");

                    if (messageDateThreshold != null && receivedDate < messageDateThreshold.getTimeInMillis()) {
                        messageIds.add(id);
                    } else {
                        contentMessageIds.add(id);
                    }

                    minMessageId = id + 1;
                    archiveMessageIds.add(id);

                    if (archiveMessageIds.size() == archiverBlockSize || !iterator.hasNext()) {
                        ThreadUtils.checkInterruptedStatus();
                        DonkeyDao dao = getDaoFactory().getDao();
                        try {
                            List<Message> messages = dao.getMessages(channelId, archiveMessageIds);

                            for (Message message : messages) {
                                if (attachmentSource != null) {
                                    List<Attachment> attachments = attachmentSource
                                            .getMessageAttachments(message);

                                    if (CollectionUtils.isNotEmpty(attachments)) {
                                        message.setAttachments(attachments);
                                    }
                                }

                                if (archiver.write(message)) {
                                    numExported++;
                                }
                            }

                            archiveMessageIds.clear();
                        } finally {
                            dao.close();
                        }
                    }
                }
            } while (maps != null && maps.size() == ID_RETRIEVE_LIMIT);

            archiver.finishWrite();
        } finally {
            archiver.close();
        }

        if (messageWriterOptions.getArchiveFormat() == null && new File(tempChannelFolder).isDirectory()) {
            try {
                FileUtils.moveDirectory(new File(tempChannelFolder), new File(finalChannelFolder));
            } catch (IOException e) {
                logger.error("Failed to move " + tempChannelFolder + " to " + finalChannelFolder, e);
            }
        }
    } catch (Throwable t) {
        FileUtils.deleteQuietly(new File(tempChannelFolder));
        FileUtils.deleteQuietly(new File(finalChannelFolder));
        throw t;
    } finally {
        status.setArchiving(false);
    }
}

From source file:com.mirth.connect.plugins.datapruner.DataPruner.java

License:Open Source License

private int runDelete(String query, Map<String, Object> params) {
    SqlSession session = SqlConfig.getSqlSessionManager().openSession(true);

    try {/*w w  w. j  av  a  2 s.  c om*/
        if (DatabaseUtil.statementExists("initDataPruner", session)) {
            session.update("initDataPruner");
        }

        status.setPruning(true);

        int count = session.delete(query, params);
        return count;
    } finally {
        session.close();
        status.setPruning(false);
    }
}

From source file:com.mirth.connect.plugins.datapruner.DataPrunerMessageList.java

License:Open Source License

@Override
protected List<Message> getItems(int offset, int limit) throws Exception {
    List<Map<String, Object>> maps;
    SqlSession session = SqlConfig.getSqlSessionManager().openSession();
    params.put("offset", offset);
    params.put("limit", limit);

    try {//from   w  ww. j  a v  a 2  s .c o  m
        maps = session.selectList("Message.getMessagesToPrune", params);
    } finally {
        session.close();
    }

    List<Message> messages = new ArrayList<Message>();
    DonkeyDao dao = daoFactory.getDao();

    try {
        for (Map<String, Object> map : maps) {
            Long messageId = (Long) map.get("id");
            long connectorReceivedDateMillis = ((Calendar) map.get("mm_received_date")).getTimeInMillis();

            Map<Integer, ConnectorMessage> connectorMessages = null;
            connectorMessages = dao.getConnectorMessages(channelId, messageId);

            Message message = new Message();
            message.setMessageId(messageId);
            message.setChannelId(channelId);
            message.setReceivedDate((Calendar) map.get("received_date"));
            message.setProcessed((Boolean) map.get("processed"));
            message.setServerId((String) map.get("server_id"));
            message.setImportId((Long) map.get("import_id"));
            message.getConnectorMessages().putAll(connectorMessages);

            messages.add(message);

            contentMessageIds.add(messageId);

            if (messageDateThreshold != null
                    && connectorReceivedDateMillis < messageDateThreshold.getTimeInMillis()) {
                messageIds.add(messageId);
            }
        }

        return messages;
    } finally {
        dao.close();
    }
}

From source file:com.mirth.connect.server.controllers.DefaultDatabaseTaskController.java

License:Open Source License

@Override
public Map<String, DatabaseTask> getDatabaseTasks() throws Exception {
    Map<String, DatabaseTask> tasks = new HashMap<String, DatabaseTask>();
    SqlSession session = SqlConfig.getSqlSessionManager().openSession();

    try {/* www  .java  2  s . co  m*/
        Connection connection = session.getConnection();

        // Only add the task to remove OLD_CHANNEL if OLD_MESSAGE has already been dropped
        if (DatabaseUtil.tableExists(connection, "OLD_MESSAGE")) {
            DatabaseTask task = populateTask(new DatabaseTask(TASK_REMOVE_OLD_MESSAGE));
            logger.debug("Adding database task: " + task.getName());
            tasks.put(task.getId(), task);
        } else if (DatabaseUtil.tableExists(connection, "OLD_CHANNEL")) {
            DatabaseTask task = populateTask(new DatabaseTask(TASK_REMOVE_OLD_CHANNEL));
            logger.debug("Adding database task: " + task.getName());
            tasks.put(task.getId(), task);
        }

        if (DatabaseUtil.tableExists(connection, "OLD_ATTACHMENT")) {
            DatabaseTask task = populateTask(new DatabaseTask(TASK_REMOVE_OLD_ATTACHMENT));
            logger.debug("Adding database task: " + task.getName());
            tasks.put(task.getId(), task);
        }

        if (DatabaseUtil.tableExists(connection, "OLD_CODE_TEMPLATE")) {
            DatabaseTask task = populateTask(new DatabaseTask(TASK_REMOVE_OLD_CODE_TEMPLATE));
            logger.debug("Adding database task: " + task.getName());
            tasks.put(task.getId(), task);
        }

        DonkeyDao dao = Donkey.getInstance().getDaoFactory().getDao();
        try {
            Map<String, Long> localChannelIdMap = dao.getLocalChannelIds();
            Map<String, String> affectedChannels = new HashMap<String, String>();

            for (String channelId : localChannelIdMap.keySet()) {
                long localChannelId = localChannelIdMap.get(channelId);
                String tableName = "D_MM" + localChannelId;

                if (!DatabaseUtil.indexExists(connection, tableName, tableName + "_INDEX3")) {
                    affectedChannels.put(channelId, getChannelName(channelId));
                }
            }

            if (MapUtils.isNotEmpty(affectedChannels)) {
                DatabaseTask task = populateTask(new DatabaseTask(TASK_ADD_D_MM_INDEX3));
                task.setAffectedChannels(affectedChannels);
                logger.debug("Adding migration task: " + task.getName());
                tasks.put(task.getId(), task);
            }
        } finally {
            if (dao != null) {
                dao.close();
            }
        }
    } finally {
        session.close();
    }

    DatabaseTask currentTask = getCurrentTask();
    if (currentTask != null) {
        tasks.put(currentTask.getId(), currentTask);
    }

    return tasks;
}