Example usage for org.hibernate.hql.spi QueryTranslator getSQLString

List of usage examples for org.hibernate.hql.spi QueryTranslator getSQLString

Introduction

In this page you can find the example usage for org.hibernate.hql.spi QueryTranslator getSQLString.

Prototype

String getSQLString();

Source Link

Document

Returns the SQL string generated by the translator.

Usage

From source file:au.org.theark.core.dao.StudyDao.java

License:Open Source License

/**
 * This will get all the pheno data for the given subjects FOR THIS ONE CustomFieldGroup aka questionaire (aka data set)
 * /*from  w w  w .  jav a2s.  co m*/
 * @param allTheData
 * @param search
 * @param idsToInclude
 * @return the updated list of uids that are still left after the filtering. 
 */
private List<Long> applyPhenoDataSetFilters(DataExtractionVO allTheData, Search search,
        List<Long> idsToInclude) {

    Set<QueryFilter> filters = search.getQueryFilters();

    Collection<PhenoDataSetGroup> pdsgWithFilters = getPhenoDataSetGroupsForPhenoFilters(search, filters);
    List<Long> phenoCollectionIdsSoFar = new ArrayList<Long>();

    for (PhenoDataSetGroup phenoGroup : pdsgWithFilters) {
        log.info("Pheno group: " + phenoGroup.getName());
        if (idsToInclude != null && !idsToInclude.isEmpty()) {
            String queryToGetPhenoIdsForGivenSearchAndCFGFilters = getQueryForPhenoIdsForSearchAndCFGFilters(
                    search, phenoGroup);

            if (!queryToGetPhenoIdsForGivenSearchAndCFGFilters.isEmpty()) {
                Query query = getSession().createQuery(queryToGetPhenoIdsForGivenSearchAndCFGFilters);
                query.setParameterList("idList", idsToInclude);//TODO ASAP...this should be pheno list and not subjuid list now

                QueryTranslatorFactory translatorFactory = new ASTQueryTranslatorFactory();
                SessionFactoryImplementor factory = (SessionFactoryImplementor) getSession()
                        .getSessionFactory();
                QueryTranslator translator = translatorFactory.createQueryTranslator(query.getQueryString(),
                        query.getQueryString(), Collections.EMPTY_MAP, factory);
                translator.compile(Collections.EMPTY_MAP, false);
                log.info(translator.getSQLString());

                List<Long> phenosForThisCFG = query.list();
                phenoCollectionIdsSoFar.addAll(phenosForThisCFG);
                log.info("rows returned = " + phenoCollectionIdsSoFar.size());
            } else {
                log.info("there were no pheno custom data filters, therefore don't run filter query");
            }
        } else {
            log.info("there are no id's to filter.  therefore won't run filtering query");
        }
    }
    //now that we have all the phenoCollection IDs...get the updated list of subjects
    if (phenoCollectionIdsSoFar.isEmpty()) {
        if (!pdsgWithFilters.isEmpty()) {
            //there were no phenocollectionid's returned because they were validly filtered.  leave idsToIncludeAsItWas
            idsToInclude = new ArrayList<Long>();
        } else {
            //there were no filters so just leave the list of subjects ias it was
        }
    } else {
        idsToInclude = getSubjectIdsForPhenoDataIds(phenoCollectionIdsSoFar);
    }

    //now that we have the pheno collection id, we just find the data for the selected customfields

    if (!idsToInclude.isEmpty()) {
        Collection<PhenoDataSetFieldDisplay> customFieldToGet = getSelectedPhenoDataSetFieldDisplaysForSearch(
                search);//getSelectedPhenoCustomFieldDisplaysForSearch(search);
        // We have the list of phenos, and therefore the list of pheno custom data - now bring back all the custom data rows IF they have any data they need 
        if ((!phenoCollectionIdsSoFar.isEmpty()
                || (phenoCollectionIdsSoFar.isEmpty() && pdsgWithFilters.isEmpty()))
                && !customFieldToGet.isEmpty()) {
            String queryString = "select data from PhenoDataSetData data  "
                    + " left join fetch data.phenoDataSetCollection phenoDataSetCollection"
                    + " left join fetch data.phenoDataSetFieldDisplay phenoDataSetFieldDisplay "
                    + " left join fetch phenoDataSetFieldDisplay.phenoDataSetField phenoField "
                    + (((phenoCollectionIdsSoFar.isEmpty() && pdsgWithFilters.isEmpty())
                            ? (" where data.phenoDataSetCollection.linkSubjectStudy.id in (:idsToInclude) ")
                            : (" where data.phenoDataSetCollection.id in (:phenoIdsToInclude)")))
                    + " and data.phenoDataSetFieldDisplay in (:customFieldsList)"
                    + " order by data.phenoDataSetCollection.id";
            Query query2 = getSession().createQuery(queryString);
            if (phenoCollectionIdsSoFar.isEmpty() && pdsgWithFilters.isEmpty()) {
                query2.setParameterList("idsToInclude", idsToInclude);
            } else {
                query2.setParameterList("phenoIdsToInclude", phenoCollectionIdsSoFar);
            }
            query2.setParameterList("customFieldsList", customFieldToGet);

            QueryTranslatorFactory translatorFactory = new ASTQueryTranslatorFactory();
            SessionFactoryImplementor factory = (SessionFactoryImplementor) getSession().getSessionFactory();
            QueryTranslator translator = translatorFactory.createQueryTranslator(query2.getQueryString(),
                    query2.getQueryString(), Collections.EMPTY_MAP, factory);
            translator.compile(Collections.EMPTY_MAP, false);
            log.info(translator.getSQLString());
            List<PhenoDataSetData> phenoData = query2.list();

            HashMap<String, ExtractionVO> hashOfPhenosWithTheirPhenoCustomData = allTheData
                    .getPhenoCustomData();

            ExtractionVO valuesForThisPheno = new ExtractionVO();
            HashMap<String, String> map = null;
            Long previousPhenoId = null;
            //will try to order our results and can therefore just compare to last LSS and either add to or create new Extraction VO
            for (PhenoDataSetData data : phenoData) {

                if (previousPhenoId == null) {
                    map = new HashMap<String, String>();
                    previousPhenoId = data.getPhenoDataSetCollection().getId();
                    valuesForThisPheno.setSubjectUid(
                            data.getPhenoDataSetCollection().getLinkSubjectStudy().getSubjectUID());
                    valuesForThisPheno.setRecordDate(data.getPhenoDataSetCollection().getRecordDate());
                    valuesForThisPheno
                            .setCollectionName(data.getPhenoDataSetCollection().getQuestionnaire().getName());
                } else if (data.getPhenoDataSetCollection().getId().equals(previousPhenoId)) {
                    //then just put the data in
                } else { //if its a new LSS finalize previous map, etc
                    valuesForThisPheno.setKeyValues(map);
                    hashOfPhenosWithTheirPhenoCustomData.put(("" + previousPhenoId), valuesForThisPheno);
                    previousPhenoId = data.getPhenoDataSetCollection().getId();
                    map = new HashMap<String, String>();//reset
                    valuesForThisPheno = new ExtractionVO();
                    valuesForThisPheno.setSubjectUid(
                            data.getPhenoDataSetCollection().getLinkSubjectStudy().getSubjectUID());
                    valuesForThisPheno.setRecordDate(data.getPhenoDataSetCollection().getRecordDate());
                    valuesForThisPheno
                            .setCollectionName(data.getPhenoDataSetCollection().getQuestionnaire().getName());
                }

                //if any error value, then just use that - though, yet again I really question the acceptance of error data
                if (data.getErrorDataValue() != null && !data.getErrorDataValue().isEmpty()) {
                    map.put(data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getName(),
                            data.getErrorDataValue());
                } else {
                    // Determine field type and assign key value accordingly
                    if (data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getFieldType().getName()
                            .equalsIgnoreCase(Constants.FIELD_TYPE_DATE)) {
                        map.put(data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getName(),
                                data.getDateDataValue().toString());
                    }
                    if (data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getFieldType().getName()
                            .equalsIgnoreCase(Constants.FIELD_TYPE_NUMBER)) {
                        map.put(data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getName(),
                                data.getNumberDataValue().toString());
                    }
                    if (data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getFieldType().getName()
                            .equalsIgnoreCase(Constants.FIELD_TYPE_CHARACTER)) {
                        map.put(data.getPhenoDataSetFieldDisplay().getPhenoDataSetField().getName(),
                                data.getTextDataValue());
                    }
                }
            }

            //finalize the last entered key value sets/extraction VOs
            if (map != null && previousPhenoId != null) {
                valuesForThisPheno.setKeyValues(map);
                hashOfPhenosWithTheirPhenoCustomData.put("" + previousPhenoId, valuesForThisPheno);
            }

            //can probably now go ahead and add these to the dataVO...even though inevitable further filters may further axe this list or parts of it.
            allTheData.setPhenoCustomData(hashOfPhenosWithTheirPhenoCustomData);
        }

    }
    return idsToInclude;

}

From source file:au.org.theark.core.dao.StudyDao.java

License:Open Source License

/**
 * //from w w  w . j a  v a2 s .c o  m
 * 
 * @param allTheData
 * @param personFields
 * @param lssFields
 * @param addressFields
 * @param phoneFields
 * @param otherIDFields
 * @param subjectCFDs
 * @param search
 * @param idsAfterFiltering
 */
private void addDataFromMegaDemographicQuery(DataExtractionVO allTheData,
        Collection<DemographicField> personFields, Collection<DemographicField> lssFields,
        Collection<DemographicField> addressFields, Collection<DemographicField> phoneFields,
        Collection<DemographicField> otherIDFields, Collection<DemographicField> linkSubjectTwinsFields,
        Collection<CustomFieldDisplay> subjectCFDs, Search search, List<Long> idsAfterFiltering) {
    log.info("in addDataFromMegaDemographicQuery"); //if no id's, no need to run this
    if ((!lssFields.isEmpty() || !personFields.isEmpty() || !addressFields.isEmpty() || !phoneFields.isEmpty()
            || !linkSubjectTwinsFields.isEmpty() || !subjectCFDs.isEmpty()) && !idsAfterFiltering.isEmpty()) { // hasEmailFields(dfs)
        //note.  filtering is happening previously...we then do the fetch when we have narrowed down the list of subjects to save a lot of processing
        String queryString = "select distinct lss " // , address, lss, email " +
                + " from LinkSubjectStudy lss "
                + ((!personFields.isEmpty()) ? " left join fetch lss.person person " : "")
                + ((!addressFields.isEmpty()) ? " left join lss.person.addresses a " : "")
                + ((!phoneFields.isEmpty()) ? " left join lss.person.phones p " : "")
                + ((!linkSubjectTwinsFields.isEmpty())
                        ? " left join lss.linkSubjectTwinsAsFirstSubject lstAsFirst  "
                        : "")
                + ((!linkSubjectTwinsFields.isEmpty())
                        ? " left join lss.linkSubjectTwinsAsSecondSubject lstAsSecond  "
                        : "")
                + " where lss.study.id = " + search.getStudy().getId() + " and lss.id in (:idsToInclude) "
                + " order by lss.subjectUID";

        Query query = getSession().createQuery(queryString);
        query.setParameterList("idsToInclude", idsAfterFiltering);
        List<LinkSubjectStudy> subjects = query.list();

        QueryTranslatorFactory translatorFactory = new ASTQueryTranslatorFactory();
        SessionFactoryImplementor factory = (SessionFactoryImplementor) getSession().getSessionFactory();
        QueryTranslator translator = translatorFactory.createQueryTranslator(query.getQueryString(),
                query.getQueryString(), Collections.EMPTY_MAP, factory);
        translator.compile(Collections.EMPTY_MAP, false);
        log.info(translator.getSQLString());

        // DataExtractionVO devo; = new DataExtractionVO();
        HashMap<String, ExtractionVO> hashOfSubjectsWithTheirDemographicData = allTheData.getDemographicData();

        /* this is putting the data we extracted into a generic kind of VO doc that will be converted to an appopriate format later (such as csv/xls/pdf/xml/etc) */
        for (LinkSubjectStudy lss : subjects) {
            ExtractionVO sev = new ExtractionVO();
            sev.setKeyValues(constructKeyValueHashmap(lss, personFields, lssFields, addressFields, phoneFields,
                    otherIDFields, linkSubjectTwinsFields));
            hashOfSubjectsWithTheirDemographicData.put(lss.getSubjectUID(), sev);
        }

    }
}

From source file:com.evolveum.midpoint.repo.sql.util.HibernateToSqlTranslator.java

License:Apache License

/**
 * Do not use in production code! Only for testing purposes only. Used for example during query engine upgrade.
 * Method provides translation from hibernate HQL query to plain SQL string query.
 *
 * @param sessionFactory/*from w ww  .j a v  a2 s .  co  m*/
 * @param hqlQueryText
 * @return SQL string, null if hqlQueryText parameter is empty.
 */
public static String toSql(SessionFactory sessionFactory, String hqlQueryText) {
    Validate.notNull(sessionFactory, "Session factory must not be null.");

    if (StringUtils.isEmpty(hqlQueryText)) {
        return null;
    }

    final QueryTranslatorFactory translatorFactory = new ASTQueryTranslatorFactory();
    final SessionFactoryImplementor factory = (SessionFactoryImplementor) sessionFactory;
    final QueryTranslator translator = translatorFactory.createQueryTranslator(hqlQueryText, hqlQueryText,
            Collections.EMPTY_MAP, factory);
    translator.compile(Collections.EMPTY_MAP, false);
    return translator.getSQLString();
}