Example usage for org.springframework.jdbc.core JdbcTemplate query

List of usage examples for org.springframework.jdbc.core JdbcTemplate query

Introduction

In this page you can find the example usage for org.springframework.jdbc.core JdbcTemplate query.

Prototype

@Override
    public <T> List<T> query(String sql, RowMapper<T> rowMapper, @Nullable Object... args)
            throws DataAccessException 

Source Link

Usage

From source file:com.hexin.core.dao.BaseDaoSupport.java

public <T> Page<T> findMySqlPage(JdbcTemplate jdbcTemplate, String sql, Class<T> dtoClass,
        PageCondition pageCondition, Object... args) {

    Page<T> page = new Page<T>();
    StringBuffer countSqlBuf = new StringBuffer();
    int currentPage = 1;
    int pageSize = 10;
    String camelSortBy = "";
    String underlineSortBy = "";
    String orderBy = "";
    long total;/*w ww  .ja v a 2 s  . c o m*/
    long totalPage;
    List<T> resultList = null;

    // distinct
    countSqlBuf.append("select count(*) from (");
    countSqlBuf.append(StringUtils.substringBeforeLast(sql, "order "));
    countSqlBuf.append(") tmp_table");
    debugSql(countSqlBuf.toString(), args);

    // 
    total = jdbcTemplate.queryForObject(countSqlBuf.toString(), Long.class, args);
    page.setTotal(total);

    StringBuffer pageSqlBuf = new StringBuffer();
    pageSqlBuf.append("select * from (");
    pageSqlBuf.append(sql);
    pageSqlBuf.append(") t ");

    if (pageCondition != null) {
        currentPage = pageCondition.getPage();
        pageSize = pageCondition.getRows();
        camelSortBy = pageCondition.getSort();
        orderBy = pageCondition.getOrder();

        // ????
        underlineSortBy = IcpObjectUtil.camelToUnderline(camelSortBy);
    }

    if (StringUtils.isNotEmpty(underlineSortBy) && StringUtils.isNotEmpty(orderBy)) {
        pageSqlBuf.append(" order by ");
        pageSqlBuf.append(underlineSortBy).append(" ").append(orderBy).append(" ");
    }
    pageSqlBuf.append(" limit ");
    pageSqlBuf.append((currentPage - 1) * pageSize);
    pageSqlBuf.append(" ,");
    pageSqlBuf.append(pageSize);
    pageSqlBuf.append(" ");

    debugSql(pageSqlBuf.toString(), args);

    RowMapper<T> rowMapper = new BeanPropertyRowMapper<T>(dtoClass);
    resultList = jdbcTemplate.query(pageSqlBuf.toString(), rowMapper, args);

    long mod = total % pageSize;
    if (mod == 0) {
        totalPage = total / pageSize;
    } else {
        totalPage = total / pageSize + 1;
    }

    page.setRows(resultList);
    page.setCurrentPage(currentPage);
    page.setPageSize(pageSize);
    page.setTotalPage(totalPage);
    page.setPageIndex(PageIndex.getPageIndex(Constants.PAGE_RANGE, pageSize, totalPage));

    return page;
}

From source file:de.uhh.l2g.dao.VideoDao.java

/**
 * Gets the video result search list by search word.
 *
 * @param word the word//from w  w  w .  j a v a  2  s .  c  o m
 * @param limit the limit
 * @return the video result search list by search word
 */
@SuppressWarnings("unchecked")
public List<Video> getVideoResultSearchListBySearchWord(String word, int limit) {
    String like = "%" + word + "%";
    returnList = new ArrayList<Video>();
    JdbcTemplate select = new JdbcTemplate(this.getDataSource());

    Iterator<Video> i;
    List<Video> result = null;

    String query = "SELECT DISTINCT video.id, video.title,  metadata.creator, lectureseries.name, video.filename, video.generationDate, facility.typ, video.lectureseriesId, video.metadataId, video.hostId, video.producerId "
            + "FROM video, metadata, lectureseries, facility " + "WHERE ( "
            + "( video.openAccess=1 AND video.metadataId = metadata.id AND video.lectureseriesId = lectureseries.id AND video.facilityId=facility.id ) "
            + "AND (" + "( video.title LIKE ? ) OR " + "( lectureseries.name LIKE ? ) OR "
            + "( metadata.description LIKE ? ) OR " + "( lectureseries.instructorsString LIKE ? ) OR "
            + "( video.tags LIKE ? ) OR " + "( metadata.creator LIKE ? ) OR "
            + "( lectureseries.number LIKE ? ) " + ")" + ")"
            + "ORDER BY video.generationDate DESC, lectureseries.name ASC;";

    result = select.query(query, new Object[] { like, like, like, like, like, like, like },
            new VideoResultSearchRowMapper());

    i = result.iterator();
    while (i.hasNext()) {
        Video vid = i.next();
        if (vid.getId() != 0)
            returnList.add(vid);
    }

    return returnList;
}

From source file:de.uhh.l2g.dao.VideoDao.java

/**
 * Gets the video result search list by splitted search word.
 *
 * @param word the word/*w  w w  .  j a  v  a  2 s  .  co m*/
 * @param limit the limit
 * @return the video result search list by splitted search word
 */
@SuppressWarnings("unchecked")
public List<Video> getVideoResultSearchListBySplittedSearchWord(String word, int limit) {

    List<Video> returnList = new ArrayList<Video>();
    JdbcTemplate select = new JdbcTemplate(this.getDataSource());
    Iterator<Video> i;

    // split the search string and add all results to the return list
    String[] splitSearchWord = word.split(" ");

    // clean the list first
    returnList2.removeAll(returnList2);
    int count = 0;

    // search for in particular words
    for (int z = 0; z < splitSearchWord.length; z++) {
        List<Video> result = null;
        // find it!
        String splittedword = splitSearchWord[z];
        String like = "%" + splittedword + "%";
        String query = "SELECT video.id, video.title,  metadata.creator, lectureseries.name, video.filename, video.generationDate, facility.typ, video.lectureseriesId, video.metadataId, video.hostId, video.producerId "
                + "FROM video, metadata, lectureseries, facility " + "WHERE ( " + "( video.openAccess=1 AND "
                + "video.metadataId = metadata.id AND " + "video.lectureseriesId = lectureseries.id AND "
                + "video.facilityId=facility.id ) AND " + "(( lectureseries.name LIKE ? ) OR "
                + "( lectureseries.number LIKE ? ) OR " + "( lectureseries.instructorsString LIKE ? ) OR "
                + "( video.title LIKE ? ) OR " + "( video.tags LIKE ? ) OR " + "( metadata.creator LIKE ? ) OR "
                + "( metadata.description LIKE ? ) " + ")" + ");";

        result = select.query(query, new Object[] { like, like, like, like, like, like, like },
                new VideoResultSearchRowMapper());

        i = result.iterator();
        // if results more than $limit, don't save them to the result list

        while (i.hasNext()) {
            Video vid = i.next();
            if (count <= limit) {
                if (vid.getId() != 0 && !videoPresentInReturnList2(vid) && !videoPresentInReturnList(vid)) {
                    returnList2.add(vid);
                    count++;
                }
            }
        }

    }
    returnList = returnList2;

    return returnList;
}

From source file:uta.ak.usttmp.dmcore.task.QuartzMiningJob.java

@Override
public void execute(JobExecutionContext jec) {

    //====== statistic time ======
    Date statisticStartTime = new Date();
    Date preprocessTime = new Date();
    Date miningTime = new Date();
    Date trackingTime = new Date();

    long statistic_TotalNum = 0;

    String updateSql = "UPDATE `c_miningtask`  " + "SET `qrtz_job_name` = ?,  "
            + " `qrtz_job_exec_count` = ?,  " + " `status` = ?  " + "WHERE  " + "   `mme_eid` = ?;";

    DataSource ds = null;/*from  w  w w.  j a v  a  2  s  .  co m*/
    JdbcTemplate jt = null;

    int nextExecCount = 0;
    int totalExecCount = 0;

    boolean hasPC = false;
    boolean hasTC = false;

    MiningTask mt;

    try {

        miningTaskId = Long.parseLong((String) jec.getMergedJobDataMap().get("miningTaskId"));
        jobName = (String) jec.getMergedJobDataMap().get("jobName");

        applicationContext = new ClassPathXmlApplicationContext("applicationContext.xml");

        ds = (DataSource) applicationContext.getBean("dataSource");
        jt = new JdbcTemplate(ds);

        //Load Miningtask
        String querySql = "select * from c_miningtask where mme_eid=?";
        mt = (MiningTask) jt.queryForObject(querySql, new Object[] { miningTaskId }, new MiningTaskRowMapper());

        if (mt.getQrtzJobExecCount() == mt.getQrtzJobTotalCount()) {
            return;
        }
        totalExecCount = mt.getQrtzJobTotalCount();

        if (null != mt.getPreprocessComponent() && !"NONE".equals(mt.getPreprocessComponent().toUpperCase())) {
            hasPC = true;
        }
        if (null != mt.getTrackingComponent() && !"NONE".equals(mt.getTrackingComponent().toUpperCase())) {
            hasTC = true;
        }

        List<Text> textList;
        List<Topic> topicList;

        if (hasPC) {
            preprocessComponent = (PreprocessComponent) applicationContext.getBean(mt.getPreprocessComponent());
        }

        miningComponent = (MiningComponent) applicationContext.getBean(mt.getMiningComponent());

        if (hasTC) {
            trackingComponent = (TrackingComponent) applicationContext.getBean(mt.getTrackingComponent());
        }

        miningTaskService = (MiningTaskService) applicationContext.getBean("miningTaskService");

        topicMiningService = (TopicMiningService) applicationContext.getBean("topicMiningService");

        nextExecCount = mt.getQrtzJobExecCount() + 1;

        SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

        //Calculate the time period
        Calendar cal = Calendar.getInstance();
        cal.setTime(mt.getStartTime());
        cal.add(Calendar.HOUR_OF_DAY, mt.getMiningInterval() * mt.getQrtzJobExecCount());
        Date startTime = cal.getTime();
        cal.setTime(mt.getStartTime());
        cal.add(Calendar.HOUR_OF_DAY, mt.getMiningInterval() * nextExecCount);
        Date endTime = cal.getTime();

        /**************************************************************
         * 1. Preprocess text data                                    *
         **************************************************************/
        String sTag = mt.getId() + "_" + mt.getTag();

        //Clear the text table if there is existed text.
        String clearTextSQL = "DELETE " + "FROM " + "   c_text " + "WHERE " + "   ( "
                + "       text_createdate " + "      BETWEEN ? " + "      AND ? " + "   ) " + "AND tag = ?";
        jt.update(clearTextSQL, formatter.format(startTime), formatter.format(endTime), sTag);

        System.out.println("Query raw text records...");
        String querySQL = "SELECT " + "   * " + "FROM " + "   c_rawtext " + "WHERE " + "   ( "
                + "       text_createdate " + "      BETWEEN ? " + "      AND ? " + "   ) " + "AND tag = ?";

        List<RawText> rawTextList = jt.query(querySQL,
                new Object[] { formatter.format(startTime), formatter.format(endTime), mt.getTag() },
                new RawTextRowMapper());

        //if no raw text to be mined, then return
        if (null == rawTextList || rawTextList.isEmpty()) {
            int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED
                    : MiningTask.STATUS_RUNNING;
            //Update task status
            jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId);
            return;
        }

        statistic_TotalNum = (null != rawTextList) ? rawTextList.size() : 0;

        if (hasPC) {

            textList = preprocessComponent.preprocess(mt, rawTextList);
        } else {

            textList = new ArrayList<>();
            for (RawText rt : rawTextList) {
                Text tx = new Text();
                tx.setCreateTime(rt.getCreateTime());
                tx.setRawTextId(rt.getId());
                tx.setTag(sTag);
                tx.setText(rt.getText());
                tx.setTitle(rt.getTitle());

                textList.add(tx);
            }
        }

        //if no text to be mined, then return
        if (null == textList || textList.isEmpty()) {
            int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED
                    : MiningTask.STATUS_RUNNING;
            //Update task status
            jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId);
            return;
        }

        List<Object[]> text_lines = new ArrayList<>();
        for (Text tx : textList) {
            Object[] ojarr = new Object[] { tx.getTitle(), tx.getText(), sTag,
                    String.valueOf(tx.getRawTextId()), formatter.format(tx.getCreateTime()) };
            text_lines.add(ojarr);
        }

        String insertSQL = "INSERT INTO c_text(mme_lastupdate, mme_updater, title, text, tag, rawtext_id, text_createdate) "
                + "VALUES (NOW(), \"USTTMP\", ?, ?, ?, ?, ?)";

        System.out.println("Start to insert text records...");
        jt.batchUpdate(insertSQL, text_lines);

        //statistic time
        preprocessTime = new Date();

        /**************************************************************
         * 2. Mining topics                                           *
         **************************************************************/

        //Clear the existed topics

        String clearTopicSQL = "DELETE " + "FROM " + "   c_topic " + "WHERE "
                + "miningtask_id=? AND seq_no = ?";
        jt.update(clearTopicSQL, miningTaskId, nextExecCount);

        topicList = miningComponent.generateTopics(mt, textList);

        String insertTpSQL = "INSERT INTO `c_topic` ( " + "   `mme_lastupdate`, " + "   `mme_updater`, "
                + "   `name`, " + "   `content`, " + "   `remark`, " + "   `miningtask_id`, " + "   `seq_no` "
                + ") " + "VALUES " + "   (NOW(), 'USTTMP' ,?,?,?,?,?)";

        List<Object[]> tpArgsList = new ArrayList<>();

        for (Topic tm : topicList) {
            Object[] objarr = new Object[] { tm.getName(), tm.toString(),
                    (null != tm.getRemark()) ? tm.getRemark() : "", miningTaskId, nextExecCount };
            tpArgsList.add(objarr);
        }

        System.out.println("Inserting records into the c_topic table...");
        jt.batchUpdate(insertTpSQL, tpArgsList);

        //statistic time
        miningTime = new Date();

        /**************************************************************
         * 3. Evolution tracking                                      *
         **************************************************************/

        if (hasTC) {
            if (nextExecCount > 1) {
                if (null != topicList && !topicList.isEmpty()) {

                    int preTopicSeq = nextExecCount - 1;
                    int nextTopicSeq = nextExecCount;

                    //Clear existed topic evolution rela
                    String clearEvSQL = "DELETE " + "FROM " + "   c_topicevolutionrela " + "WHERE "
                            + "   miningtask_id =? " + "AND pre_topic_seq =? " + "AND next_topic_seq =?";
                    jt.update(clearEvSQL, miningTaskId, preTopicSeq, nextTopicSeq);

                    List<Topic> preTopics = topicMiningService.getTopics(miningTaskId, preTopicSeq);
                    List<Topic> nextTopics = topicMiningService.getTopics(miningTaskId, nextTopicSeq);

                    if (null == preTopics || preTopics.isEmpty()) {

                        UsttmpProcessException upe = new UsttmpProcessException(
                                UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION);
                        throw upe;
                    }
                    if (null == nextTopics || nextTopics.isEmpty()) {
                        UsttmpProcessException upe = new UsttmpProcessException(
                                UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION);
                        throw upe;
                    }

                    List<EvolutionRelationship> evRelaList = trackingComponent
                            .getTopicEvolutionRelationships(mt, preTopics, nextTopics);

                    String insertEvSql = "INSERT INTO `c_topicevolutionrela` (  " + "   `pre_topic_id`,  "
                            + "   `next_topic_id`,  " + "   `rank_against_pre_topic_in_next_group`,  "
                            + "   `rank_against_next_topic_in_pre_group`,  " + "   `similarity`  ,"
                            + "   `miningtask_id`  ," + "   `pre_topic_seq`  ," + "   `next_topic_seq`  "
                            + ")  " + "VALUES  " + "   (?, ?, ?, ?, ?, ?, ?, ?)";

                    List<Object[]> argsList = new ArrayList<>();

                    for (EvolutionRelationship er : evRelaList) {
                        Object[] objarr = new Object[] { er.getPreTopic().getId(), er.getNextTopic().getId(),
                                er.getRankAgainstPreTopicInNextGroup(), er.getRankAgainstNextTopicInPreGroup(),
                                er.getSimilarity(), miningTaskId, preTopicSeq, nextTopicSeq };
                        argsList.add(objarr);
                    }
                    jt.batchUpdate(insertEvSql, argsList);
                }
            }
        }

        int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED
                : MiningTask.STATUS_RUNNING;
        //Update task status
        jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId);

        //statistic time
        trackingTime = new Date();

    } catch (UsttmpProcessException e) {

        if (UsttmpProcessException.TYPE_CALC_EVO_RELA_EXCEPTION.equals(e.getMessage())) {

            //Update task status
            int nowStatus = (nextExecCount == totalExecCount) ? MiningTask.STATUS_COMPLETED
                    : MiningTask.STATUS_RUNNING;
            jt.update(updateSql, jobName, nextExecCount, nowStatus, miningTaskId);

        } else {

        }

        e.printStackTrace();
        StringWriter errors = new StringWriter();
        e.printStackTrace(new PrintWriter(errors));
        logger.error(errors.toString());
        //log exception table
        miningTaskService.logMiningTask(MiningTaskService.LOG_TYPE_EXCEPTION, miningTaskId, errors.toString());

        //statistic time
        trackingTime = new Date();
    } catch (Exception e) {

        e.printStackTrace();
        StringWriter errors = new StringWriter();
        e.printStackTrace(new PrintWriter(errors));
        logger.error(errors.toString());
        //log exception table
        miningTaskService.logMiningTask(MiningTaskService.LOG_TYPE_EXCEPTION, miningTaskId, errors.toString());

        //statistic time
        trackingTime = new Date();

    } finally {

        long statistic_PreprocessTime = (preprocessTime.getTime() - statisticStartTime.getTime()) / 1000;
        long statistic_MiningTime = (miningTime.getTime() - preprocessTime.getTime()) / 1000;
        ;
        long statistic_TrackingTime = (trackingTime.getTime() - miningTime.getTime()) / 1000;
        long statistic_TotalTime = (trackingTime.getTime() - statisticStartTime.getTime()) / 1000;

        logger.info("============================ " + "This is start log. " + "============================");

        logger.info("Total number of texts being processed is " + statistic_TotalNum + ".");

        logger.info("Preprocess time is " + statistic_PreprocessTime + " seconds.");

        logger.info("Mining time is " + statistic_MiningTime + " seconds.");

        logger.info("Tracking time is " + statistic_TrackingTime + " seconds.");

        logger.info("Total time is " + statistic_TotalTime + " seconds.");

        logger.info("============================ " + "This is end log. " + "============================");

    }
}

From source file:de.uhh.l2g.dao.VideoDao.java

/**
 * Gets the open access videos.//from  www .  j a  v a2s. com
 *
 * @param start the start
 * @param ende the ende
 * @param facilityId the facility id
 * @param tree the tree
 * @return the open access videos
 */
@SuppressWarnings("unchecked")
public List<Video> getOpenAccessVideos(Integer start, Integer ende, Integer facilityId, String tree) {
    //use jdbc template
    JdbcTemplate jdbst = new JdbcTemplate(this.getDataSource());
    //Prepare part query for lectureseries
    //get all lectureseries open access videos
    List<Lectureseries> lectureseriesList = getAllLectureseriesWhithOpenaccessVideos(tree);
    Iterator<Lectureseries> iC = lectureseriesList.iterator();

    //for every lectureseriesId get latest video - tree 1
    String partVideoIdQuery = "";
    partVideoIdQuery += "(";
    while (iC.hasNext()) {
        Lectureseries c = iC.next();
        if (iC.hasNext())
            partVideoIdQuery += "v.id=" + getLatestOpenAccessVideoIdFromLectureseriesId(c.getId()) + " OR ";
        else
            partVideoIdQuery += "v.id=" + getLatestOpenAccessVideoIdFromLectureseriesId(c.getId());
    }
    partVideoIdQuery += ") AND ";

    if (partVideoIdQuery.equals("() AND "))
        partVideoIdQuery = "";

    //prepare facilities list 
    //get all sub-facilitye from input-id
    List<Facility> einList = null;
    String partEinQuery = "";
    //prepare query
    String sqlquery = "";
    //if facilityId choosen
    if (facilityId != 0) {
        einList = facilityDao.getByParentId(facilityId);
        //add parentId-facility himself to list
        Facility ein = facilityDao.getById(facilityId).iterator().next();
        einList.add(ein);
        //now get all videos for each facility from list
        //prepare string for sql-query
        Iterator<Facility> iE = einList.iterator();
        partEinQuery = "(";
        while (iE.hasNext()) {
            Facility e = iE.next();
            partEinQuery += "vf.facilityId=" + e.getId() + " OR ";
        }
        // and update part query
        partEinQuery += "vf.facilityId=" + facilityId + ")";

        //check tree
        if (tree.equals("all"))
            sqlquery = "SELECT v.id, v.title, m.creator, l.name, v.openAccess, v.downloadLink, v.hits, v.uploadDate, v.generationDate, v.filename, v.surl, f.typ, v.hostId, v.producerId, v.lectureseriesId FROM video v, lectureseries l, metadata m, facility f, video_facility vf WHERE ( "
                    + partVideoIdQuery + "  v.id=vf.videoId AND " + partEinQuery
                    + " AND vf.facilityId=f.id AND v.lectureseriesId=l.id AND v.metadataId=m.id ) GROUP BY v.lectureseriesId ORDER BY v.generationDate DESC ";
        else
            sqlquery = "SELECT v.id, v.title, m.creator, l.name, v.openAccess, v.downloadLink, v.hits, v.uploadDate, v.generationDate, v.filename, v.surl, f.typ, v.hostId, v.producerId, v.lectureseriesId FROM video v, lectureseries l, metadata m, facility f, video_facility vf WHERE ( "
                    + partVideoIdQuery + "  v.id=vf.videoId AND " + partEinQuery
                    + " AND vf.facilityId=f.id AND f.typ='" + tree
                    + "' AND v.lectureseriesId=l.id AND v.metadataId=m.id) GROUP BY v.lectureseriesId ORDER BY v.generationDate DESC ";
    } else {
        //check tree
        if (tree.equals("all"))
            sqlquery = "SELECT v.id, v.title, m.creator, l.name, v.openAccess, v.downloadLink, v.hits, v.uploadDate, v.generationDate, v.filename, v.surl, f.typ, v.hostId, v.producerId, v.lectureseriesId FROM video v, lectureseries l, metadata m, facility f, video_facility vf WHERE ( "
                    + partVideoIdQuery
                    + "  v.id=vf.videoId AND vf.facilityId=f.id AND v.lectureseriesId=l.id AND v.metadataId=m.id ) GROUP BY v.lectureseriesId ORDER BY v.generationDate DESC ";
        else
            sqlquery = "SELECT v.id, v.title, m.creator, l.name, v.openAccess, v.downloadLink, v.hits, v.uploadDate, v.generationDate, v.filename, v.surl, f.typ, v.hostId, v.producerId, v.lectureseriesId FROM video v, lectureseries l, metadata m, facility f, video_facility vf WHERE ( "
                    + partVideoIdQuery + "  v.id=vf.videoId AND vf.facilityId=f.id AND f.typ='" + tree
                    + "' AND v.lectureseriesId=l.id AND v.metadataId=m.id ) GROUP BY v.lectureseriesId ORDER BY v.generationDate DESC ";
    }
    //and prepare with output - limit
    sqlquery += "LIMIT ? , ?;";

    return jdbst.query(sqlquery, new Object[] { start, ende },
            new VideoResultProducerListRowMapper(hostDao, producerDao, segmentDao));
}

From source file:no.kantega.publishing.common.ao.ContentAOJdbcImpl.java

@Override
public Content getContent(ContentIdentifier cid, boolean isAdminMode) {
    contentIdHelper.assureContentIdAndAssociationIdSet(cid);
    int requestedVersion = cid.getVersion();
    int contentVersionId = -1;

    int contentId = cid.getContentId();
    JdbcTemplate jdbcTemplate = getJdbcTemplate();
    if (isAdminMode) {
        if (requestedVersion == -1) {
            // When in administration mode users should see last version
            List<Integer> contentVersionIds = jdbcTemplate.queryForList(
                    "select ContentVersionId from contentversion where ContentId = ? order by Version desc",
                    Integer.class, contentId);
            if (contentVersionIds.isEmpty()) {
                return null;
            } else {
                contentVersionId = contentVersionIds.get(0);
            }/*from  w  w w.ja v a 2 s . co  m*/
        } else {

            try {
                contentVersionId = jdbcTemplate.queryForObject(
                        "select ContentVersionId from contentversion where ContentId = ? and Version = ? order by Version desc",
                        Integer.class, contentId, requestedVersion);
            } catch (EmptyResultDataAccessException e) {
                return null;
            }
        }
    } else if (cid.getStatus() == ContentStatus.HEARING) {
        // Find version for hearing, if no hearing is found, active version is returned
        int activeversion = jdbcTemplate.queryForObject(
                "select ContentVersionId from contentversion where ContentId = ? and contentversion.IsActive = 1 order by Version desc",
                Integer.class, contentId);
        contentVersionId = jdbcTemplate.queryForObject(
                "select ContentVersionId from contentversion where ContentId = ? AND Status = ? AND ContentVersionId > ? order by Version desc",
                Integer.class, contentId, ContentStatus.HEARING.getTypeAsInt(), activeversion);
    } else {
        // Others should see active version
        contentVersionId = -1;
    }

    StringBuilder query = new StringBuilder(
            "select * from content, contentversion where content.ContentId = contentversion.ContentId");
    List<Integer> params = new ArrayList<>(2);
    if (contentVersionId != -1) {
        // Hent angitt versjon
        query.append(" and contentversion.ContentVersionId = ?");
        params.add(contentVersionId);
    } else {
        // Hent aktiv versjon
        query.append(" and contentversion.IsActive = 1");
    }
    query.append(" and content.ContentId = ? order by Version");
    params.add(contentId);

    Content content = null;
    try {
        content = jdbcTemplate.queryForObject(query.toString(), new ContentRowMapper(false), params.toArray());
    } catch (EmptyResultDataAccessException e) {
        return null;
    }

    List<Association> associations = jdbcTemplate.query(
            "SELECT * FROM associations WHERE ContentId = ? AND (IsDeleted IS NULL OR IsDeleted = 0)",
            new AssociationRowMapper(), contentId);

    // Get associations for this page
    boolean foundCurrentAssociation = false;
    for (Association a : associations) {
        if (!foundCurrentAssociation) {
            // Dersom knytningsid ikke er angitt bruker vi default for angitt site
            int associationId = cid.getAssociationId();
            if ((associationId == a.getId()) || (associationId == -1
                    && a.getAssociationtype() == AssociationType.DEFAULT_POSTING_FOR_SITE
                    && a.getSiteId() == cid.getSiteId())) {
                foundCurrentAssociation = true;
                a.setCurrent(true);
            }
        }
        content.addAssociation(a);
    }

    if (!foundCurrentAssociation) {
        // Knytningsid er ikke angitt, og heller ikke site, bruk den frste
        for (Association a : associations) {
            if (a.getAssociationtype() == AssociationType.DEFAULT_POSTING_FOR_SITE) {
                foundCurrentAssociation = true;
                a.setCurrent(true);
                break;
            }
        }

        if (!foundCurrentAssociation && associations.size() > 0) {
            Association a = associations.get(0);
            a.setCurrent(true);
            log.debug("Fant ingen defaultknytning: {}", contentId);
        }
    }

    if (content.getAssociation() == null) {
        // All associations to page are deleted, dont return page
        return null;
    }

    // Get content attributes
    jdbcTemplate.query("select * from contentattributes where ContentVersionId = ?",
            new ContentAttributeRowMapper(content), content.getVersionId());
    content.indexAttributes();
    List<Topic> topics = topicDao.getTopicsByContentId(contentId);
    content.setTopics(topics);

    return content;
}

From source file:org.opendatakit.common.persistence.engine.pgres.DatastoreImpl.java

private final boolean updateRelation(JdbcTemplate jc, CommonFieldsBase relation, String originalStatement) {

    String qs = TableDefinition.TABLE_DEF_QUERY;
    List<?> columns;/*from  w  w w .j  a  va 2 s  . co  m*/
    columns = jc.query(qs, new Object[] { relation.getSchemaName(), relation.getTableName() }, tableDef);
    dam.recordQueryUsage(TableDefinition.INFORMATION_SCHEMA_COLUMNS, columns.size());

    if (columns.size() > 0) {
        Map<String, TableDefinition> map = new HashMap<String, TableDefinition>();
        for (Object o : columns) {
            TableDefinition t = (TableDefinition) o;
            map.put(t.getColumnName(), t);
        }

        // we may have gotten some results into columns -- go through the fields
        // and
        // assemble the results... we don't care about additional columns in the
        // map...
        for (DataField f : relation.getFieldList()) {
            TableDefinition d = map.get(f.getName());
            if (d == null) {
                StringBuilder b = new StringBuilder();
                if (originalStatement == null) {
                    b.append(" Retrieving expected definition (");
                    boolean first = true;
                    for (DataField field : relation.getFieldList()) {
                        if (!first) {
                            b.append(K_CS);
                        }
                        first = false;
                        b.append(field.getName());
                    }
                    b.append(")");
                } else {
                    b.append(" Created with: ");
                    b.append(originalStatement);
                }
                throw new IllegalStateException("did not find expected column " + f.getName() + " in table "
                        + relation.getSchemaName() + "." + relation.getTableName() + b.toString());
            }
            if (f.getDataType() == DataField.DataType.BOOLEAN && d.getDataType() == DataField.DataType.STRING) {
                d.setDataType(DataField.DataType.BOOLEAN);
                // don't care about size...
            }

            if (d.getDataType() == DataField.DataType.STRING && f.getMaxCharLen() != null
                    && f.getMaxCharLen().compareTo(d.getMaxCharLen()) > 0) {
                throw new IllegalStateException("column " + f.getName() + " in table "
                        + relation.getSchemaName() + "." + relation.getTableName()
                        + " stores string-valued keys but is shorter than required by Aggregate "
                        + d.getMaxCharLen().toString() + " < " + f.getMaxCharLen().toString());
            }

            if (f.getDataType() == DataField.DataType.URI) {
                if (d.getDataType() != DataField.DataType.STRING) {
                    throw new IllegalStateException(
                            "column " + f.getName() + " in table " + relation.getSchemaName() + "."
                                    + relation.getTableName() + " stores URIs but is not a string field");
                }
                d.setDataType(DataField.DataType.URI);
            }

            if (d.getDataType() != f.getDataType()) {
                throw new IllegalStateException("column " + f.getName() + " in table "
                        + relation.getSchemaName() + "." + relation.getTableName()
                        + " is not of the expected type " + f.getDataType().toString());
            }

            // it is OK for the data model to be more strict than the data store.
            if (!d.isNullable() && f.getNullable()) {
                throw new IllegalStateException("column " + f.getName() + " in table "
                        + relation.getSchemaName() + "." + relation.getTableName()
                        + " is defined as NOT NULL but the data model requires NULL");
            }
            f.setMaxCharLen(d.getMaxCharLen());
            f.setNumericPrecision(d.getNumericPrecision());
            f.setNumericScale(d.getNumericScale());
        }
        return true;
    } else {
        return false;
    }
}

From source file:org.opendatakit.common.persistence.engine.sqlserver.DatastoreImpl.java

private final boolean updateRelation(JdbcTemplate jc, CommonFieldsBase relation, String originalStatement) {

    String qs = TableDefinition.TABLE_DEF_QUERY;
    List<?> columns;/*from w w  w .j a v a  2 s  .  c o m*/
    columns = jc.query(qs, new Object[] { relation.getSchemaName(), relation.getTableName() }, tableDef);
    dam.recordQueryUsage(TableDefinition.INFORMATION_SCHEMA_COLUMNS, columns.size());

    if (columns.size() > 0) {
        Map<String, TableDefinition> map = new HashMap<String, TableDefinition>();
        for (Object o : columns) {
            TableDefinition t = (TableDefinition) o;
            map.put(t.getColumnName(), t);
        }

        // we may have gotten some results into columns -- go through the fields
        // and
        // assemble the results... we don't care about additional columns in the
        // map...
        for (DataField f : relation.getFieldList()) {
            TableDefinition d = map.get(f.getName());
            if (d == null) {
                StringBuilder b = new StringBuilder();
                if (originalStatement == null) {
                    b.append(" Retrieving expected definition (");
                    boolean first = true;
                    for (DataField field : relation.getFieldList()) {
                        if (!first) {
                            b.append(K_CS);
                        }
                        first = false;
                        b.append(field.getName());
                    }
                    b.append(")");
                } else {
                    b.append(" Created with: ");
                    b.append(originalStatement);
                }
                throw new IllegalStateException("did not find expected column " + f.getName() + " in table "
                        + relation.getSchemaName() + "." + relation.getTableName() + b.toString());
            }
            if (f.getDataType() == DataField.DataType.BOOLEAN && d.getDataType() == DataField.DataType.STRING) {
                d.setDataType(DataField.DataType.BOOLEAN);
                // don't care about size...
            }

            if (d.getDataType() == DataField.DataType.STRING && f.getMaxCharLen() != null
                    && f.getMaxCharLen().compareTo(d.getMaxCharLen()) > 0) {
                throw new IllegalStateException("column " + f.getName() + " in table "
                        + relation.getSchemaName() + "." + relation.getTableName()
                        + " stores string-valued keys but is shorter than required by Aggregate "
                        + d.getMaxCharLen().toString() + " < " + f.getMaxCharLen().toString());
            }

            if (f.getDataType() == DataField.DataType.URI) {
                if (d.getDataType() != DataField.DataType.STRING) {
                    throw new IllegalStateException(
                            "column " + f.getName() + " in table " + relation.getSchemaName() + "."
                                    + relation.getTableName() + " stores URIs but is not a string field");
                }
                d.setDataType(DataField.DataType.URI);
            }

            if ((d.getDataType() == DataField.DataType.LONG_STRING)
                    && (f.getDataType() == DataField.DataType.STRING)) {
                // we have an overly-large string that needed to be
                // stored as a nvarchar(max) string. This is OK
            } else if (d.getDataType() != f.getDataType()) {
                throw new IllegalStateException("column " + f.getName() + " in table "
                        + relation.getSchemaName() + "." + relation.getTableName()
                        + " is not of the expected type " + f.getDataType().toString());
            }

            // it is OK for the data model to be more strict than the data store.
            if (!d.isNullable() && f.getNullable()) {
                throw new IllegalStateException("column " + f.getName() + " in table "
                        + relation.getSchemaName() + "." + relation.getTableName()
                        + " is defined as NOT NULL but the data model requires NULL");
            }
            f.setMaxCharLen(d.getMaxCharLen());
            f.setNumericPrecision(d.getNumericPrecision());
            f.setNumericScale(d.getNumericScale());
            f.asDoublePrecision(d.isDoublePrecision());
        }
        return true;
    } else {
        return false;
    }
}

From source file:org.opennms.netmgt.notifd.TicketNotificationStrategy.java

/**
 * <p>Helper function that gets the alarmid from the eventid</p>
 *
 * @return 0 if alarmid is null//w  w  w .  j a v a 2s. co  m
 */
protected AlarmState getAlarmStateFromEvent(int eventID) {
    AlarmStateRowCallbackHandler callbackHandler = new AlarmStateRowCallbackHandler();

    JdbcTemplate template = new JdbcTemplate(DataSourceFactory.getInstance());
    template.query(
            "SELECT a.alarmid, a.tticketid, a.tticketstate FROM events AS e "
                    + "LEFT JOIN alarms AS a ON a.alarmid = e.alarmid " + "WHERE e.eventid = ?",
            new Object[] { eventID }, callbackHandler);

    return callbackHandler.getAlarmState();
}

From source file:org.openscore.engine.queue.repositories.ExecutionQueueRepositoryImpl.java

private <T> List<T> doSelect0(JdbcTemplate jdbcTemplate, String sql, RowMapper<T> rowMapper, Object... params) {
    logSQL(sql, params);//  w  ww .j  a v  a2s  .  c  om
    try {
        long t = System.currentTimeMillis();
        List<T> result = jdbcTemplate.query(sql, params, rowMapper);
        if (logger.isDebugEnabled())
            logger.debug(
                    "Fetched result: " + result.size() + '/' + (System.currentTimeMillis() - t) + " rows/ms");
        return result;
    } catch (RuntimeException ex) {
        logger.error("Failed to execute query: " + sql, ex);
        throw ex;
    }
}