Example usage for org.apache.solr.client.solrj SolrQuery setFields

List of usage examples for org.apache.solr.client.solrj SolrQuery setFields

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery setFields.

Prototype

public SolrQuery setFields(String... fields) 

Source Link

Usage

From source file:edu.vt.vbi.patric.portlets.PhylogeneticTree.java

License:Apache License

@Override
public void init() throws PortletException {
    super.init();

    ObjectMapper objectMapper = new ObjectMapper();
    jsonReader = objectMapper.reader(Map.class);

    // update genome-id mapping cache
    try {/* ww w  .  j  av a2  s .  com*/
        DataApiHandler dataApi = new DataApiHandler();

        SolrQuery query = new SolrQuery("*:*");
        query.setFields("genome_name,genome_id").setRows(dataApi.MAX_ROWS);

        LOGGER.trace("[{}] {}", SolrCore.GENOME, query);

        String apiResponse = dataApi.solrQuery(SolrCore.GENOME, query);

        Map resp = jsonReader.readValue(apiResponse);
        Map respBody = (Map) resp.get("response");
        List<Genome> genomes = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class);

        StringBuilder sb = new StringBuilder();
        sb.append("var genomeMap = new Array();\n");

        for (Genome genome : genomes) {
            sb.append("genomeMap[\"").append(genome.getGenomeName().replaceAll("[\\s\\(\\)\\:\\[\\],]+", "_"))
                    .append("\"] = \"").append(genome.getId()).append("\";\n");
        }

        // 2nd query due to the DataAPI has 25k limit
        query.setStart(25001);

        LOGGER.trace("[{}] {}", SolrCore.GENOME, query);

        apiResponse = dataApi.solrQuery(SolrCore.GENOME, query);
        resp = jsonReader.readValue(apiResponse);
        respBody = (Map) resp.get("response");
        genomes = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class);

        for (Genome genome : genomes) {
            sb.append("genomeMap[\"").append(genome.getGenomeName().replaceAll("[\\s\\(\\)\\:\\[\\],]+", "_"))
                    .append("\"] = \"").append(genome.getId()).append("\";\n");
        }

        PrintWriter out = new PrintWriter(new FileWriter(getPortletContext().getRealPath("/js/genomeMaps.js")));
        out.println(sb.toString());
        out.close();
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }
}

From source file:edu.vt.vbi.patric.portlets.PhylogeneticTree.java

License:Apache License

protected void doView(RenderRequest request, RenderResponse response) throws PortletException, IOException {
    response.setContentType("text/html");

    SiteHelper.setHtmlMetaElements(request, response, "Phylogeny");

    List<Integer> phylogenyOrderIds = Arrays.asList(2037, 1385, 80840, 213849, 51291, 186802, 91347, 186826,
            118969, 356, 766, 136, 72273, 135623);

    String contextType = request.getParameter("context_type");
    String contextId = request.getParameter("context_id");

    if (contextType != null && contextId != null) {

        DataApiHandler dataApi = new DataApiHandler(request);

        List<Map<String, Object>> orderList = new ArrayList<>();
        int taxonId = 0;

        try {//w  w w .j ava2  s.c  om
            if (contextType.equals("genome")) {

                Genome genome = dataApi.getGenome(contextId);
                taxonId = genome.getTaxonId();
            } else {
                taxonId = Integer.parseInt(contextId);
            }

            // Step1. has Order in lineage?
            Taxonomy taxonomy = dataApi.getTaxonomy(taxonId);

            List<String> lineageRanks = taxonomy.getLineageRanks();

            if (lineageRanks.contains("order")) {
                List<Integer> lineageIds = taxonomy.getLineageIds();
                List<String> lineageNames = taxonomy.getLineageNames();

                int index = lineageRanks.indexOf("order");
                int orderTaxonId = lineageIds.get(index);

                if (phylogenyOrderIds.contains(orderTaxonId)) {
                    Map order = new HashMap();
                    order.put("name", lineageNames.get(index));
                    order.put("taxonId", lineageIds.get(index));

                    orderList.add(order);
                }
            }

            if (orderList.isEmpty()) {
                // no rank Order found in lineage, then,
                // Step2. has Order rank in descendants
                SolrQuery query = new SolrQuery(
                        "lineage_ids:" + taxonId + " AND taxon_rank:order AND taxon_id:("
                                + StringUtils.join(phylogenyOrderIds, " OR ") + ")");
                query.setFields("taxon_id,taxon_name,taxon_rank");
                query.setRows(100);

                LOGGER.trace("[{}] {}", SolrCore.TAXONOMY.getSolrCoreName(), query.toString());
                String apiResponse = dataApi.solrQuery(SolrCore.TAXONOMY, query);

                Map resp = jsonReader.readValue(apiResponse);
                Map respBody = (Map) resp.get("response");

                List<Map> sdl = (List<Map>) respBody.get("docs");
                for (Map doc : sdl) {
                    if (doc.get("taxon_rank").equals("order")) {
                        Map node = new HashMap<>();
                        node.put("taxonId", doc.get("taxon_id"));
                        node.put("name", doc.get("taxon_name").toString());

                        orderList.add(node);
                    }
                }
            }
        } catch (MalformedURLException e) {
            LOGGER.error(e.getMessage(), e);
        }

        request.setAttribute("orderList", orderList);
        request.setAttribute("taxonId", taxonId);

        PortletRequestDispatcher prd = getPortletContext().getRequestDispatcher("/index.jsp");
        prd.include(request, response);
    }
}

From source file:edu.vt.vbi.patric.portlets.SpecialtyGeneEvidencePortlet.java

License:Apache License

@Override
protected void doView(RenderRequest request, RenderResponse response) throws PortletException, IOException {

    response.setContentType("text/html");

    SiteHelper.setHtmlMetaElements(request, response, "Specialty Gene Evidence");
    response.setTitle("Specialty Gene Evidence");

    String source = request.getParameter("sp_source");
    String sourceId = request.getParameter("sp_source_id"); //lmo0433, Rv3875

    if (source != null && !source.equals("") && sourceId != null && !sourceId.equals("")) {

        List<String> properties = Arrays.asList("property", "source", "source_id", "gene_name", "organism",
                "product", "gi", "gene_id");
        List<String> headers = Arrays.asList("Property", "Source", "Source ID", "Gene", "Organism", "Product",
                "GI Number", "Gene ID");

        // get properties of gene
        Map<String, Object> gene = null;
        SolrQuery query = new SolrQuery("source:" + source + " AND source_id:" + sourceId);

        DataApiHandler dataApi = new DataApiHandler(request);
        String apiResponse = dataApi.solrQuery(SolrCore.SPECIALTY_GENE, query);

        Map resp = jsonReader.readValue(apiResponse);
        Map respBody = (Map) resp.get("response");

        List<Map> sdl = (List<Map>) respBody.get("docs");

        if (!sdl.isEmpty()) {
            gene = sdl.get(0);/*from   w w w  .  j  av  a2 s . co  m*/
        }

        // get PATRIC feature
        GenomeFeature feature = null;
        query = new SolrQuery("source:" + source + " AND source_id:" + sourceId + " AND evidence:Literature");
        query.setFields("feature_id");

        apiResponse = dataApi.solrQuery(SolrCore.SPECIALTY_GENE_MAPPING, query);

        resp = jsonReader.readValue(apiResponse);
        respBody = (Map) resp.get("response");

        sdl = (List<Map>) respBody.get("docs");

        if (!sdl.isEmpty()) {
            Map doc = sdl.get(0);
            feature = dataApi.getFeature(doc.get("feature_id").toString());
        } else {
            query = new SolrQuery("alt_locus_tag:" + sourceId);

            apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query);

            resp = jsonReader.readValue(apiResponse);
            respBody = (Map) resp.get("response");

            List<GenomeFeature> features = dataApi.bindDocuments((List<Map>) respBody.get("docs"),
                    GenomeFeature.class);

            if (!features.isEmpty()) {
                feature = features.get(0);
            }
        }

        // get Homolog count
        int cntHomolog = 0;
        query = new SolrQuery("source:" + source + " AND source_id:" + sourceId);

        apiResponse = dataApi.solrQuery(SolrCore.SPECIALTY_GENE_MAPPING, query);

        resp = jsonReader.readValue(apiResponse);
        respBody = (Map) resp.get("response");

        cntHomolog = (Integer) respBody.get("numFound");

        // get list of evidence
        List<Map<String, Object>> specialtyGeneEvidence = new ArrayList<>();
        query = new SolrQuery("source:" + source + " AND source_id:" + sourceId);
        query.addSort("specific_organism", SolrQuery.ORDER.asc);
        query.addSort("specific_host", SolrQuery.ORDER.asc);
        query.addSort("classification", SolrQuery.ORDER.asc);

        apiResponse = dataApi.solrQuery(SolrCore.SPECIALTY_GENE_EVIDENCE, query);

        resp = jsonReader.readValue(apiResponse);
        respBody = (Map) resp.get("response");

        List<Map> evidence = (List<Map>) respBody.get("docs");
        for (Map doc : evidence) {
            specialtyGeneEvidence.add(doc);
        }

        request.setAttribute("source", source);
        request.setAttribute("sourceId", sourceId);
        request.setAttribute("gene", gene);
        request.setAttribute("properties", properties);
        request.setAttribute("headers", headers);
        request.setAttribute("feature", feature);
        request.setAttribute("cntHomolog", cntHomolog);
        request.setAttribute("specialtyGeneEvidence", specialtyGeneEvidence);

        PortletRequestDispatcher prd = getPortletContext()
                .getRequestDispatcher("/WEB-INF/jsp/overview/specialty_gene_evidence.jsp");
        prd.include(request, response);
    }
}

From source file:edu.vt.vbi.patric.portlets.SpecialtyGenePropertiesPortlet.java

License:Apache License

protected void doView(RenderRequest request, RenderResponse response) throws PortletException, IOException {

    response.setContentType("text/html");
    String cType = request.getParameter("context_type");
    String cId = request.getParameter("context_id");

    if (cType != null && cId != null && cType.equals("feature")) {

        DataApiHandler dataApi = new DataApiHandler(request);

        SolrQuery query = new SolrQuery("feature_id:" + cId);
        query.setFields(
                "evidence,property,source,source_id,organism,pmid,subject_coverage,query_coverage,identity,e_value");
        query.addSort("evidence", SolrQuery.ORDER.desc);
        query.addSort("property", SolrQuery.ORDER.asc);
        query.addSort("source", SolrQuery.ORDER.asc);

        String apiResponse = dataApi.solrQuery(SolrCore.SPECIALTY_GENE_MAPPING, query);
        Map resp = jsonReader.readValue(apiResponse);
        Map respBody = (Map) resp.get("response");

        List<SpecialtyGene> listSpecialtyGenes = dataApi.bindDocuments((List<Map>) respBody.get("docs"),
                SpecialtyGene.class);

        if (!listSpecialtyGenes.isEmpty()) {
            request.setAttribute("listSpecialtyGenes", listSpecialtyGenes);

            PortletRequestDispatcher prd = getPortletContext()
                    .getRequestDispatcher("/WEB-INF/jsp/overview/specialty_gene_properties.jsp");
            prd.include(request, response);
        } else {//w  w  w .  jav  a 2 s .c  o  m
            PrintWriter writer = response.getWriter();
            writer.write("<!-- no specialty gene property found -->");
            writer.close();
        }
    } else {
        PrintWriter writer = response.getWriter();
        writer.write("<p>Invalid Parameter - missing context information</p>");
        writer.close();
    }
}

From source file:edu.vt.vbi.patric.portlets.TranscriptomicsGene.java

License:Apache License

public void serveResource(ResourceRequest request, ResourceResponse response)
        throws PortletException, IOException {
    String callType = request.getParameter("callType");

    if (callType != null) {
        switch (callType) {
        case "saveParams": {

            String keyword = request.getParameter("keyword");
            DataApiHandler dataApi = new DataApiHandler(request);

            Map<String, String> key = new HashMap<>();
            key.put("keyword", "locus_tag:(" + keyword + ") OR refseq_locus_tag:(" + keyword + ") ");
            key.put("fields", "pid");

            SolrQuery query = dataApi.buildSolrQuery(key, null, null, 0, -1, false);

            String apiResponse = dataApi.solrQuery(SolrCore.TRANSCRIPTOMICS_GENE, query);

            Map resp = jsonReader.readValue(apiResponse);
            Map respBody = (Map) resp.get("response");

            List<Map> sdl = (List<Map>) respBody.get("docs");

            Set<String> sampleIds = new HashSet<>();

            for (Map doc : sdl) {
                sampleIds.add(doc.get("pid").toString());
            }//from ww  w .ja v  a  2  s. c  o m

            String sId = StringUtils.join(sampleIds, ",");

            key = new HashMap();
            if (!keyword.equals("")) {
                key.put("keyword", keyword);
            }

            response.setContentType("text/html");
            PrintWriter writer = response.getWriter();

            if (!sId.equals("")) {
                key.put("sampleId", sId);
                long pk = (new Random()).nextLong();

                SessionHandler.getInstance().set(SessionHandler.PREFIX + pk,
                        jsonWriter.writeValueAsString(key));

                writer.write("" + pk);
            } else {
                writer.write("");
            }
            writer.close();

            break;
        }
        case "getTables": {

            String expId = request.getParameter("expId");
            String sampleId = request.getParameter("sampleId");
            String wsExperimentId = request.getParameter("wsExperimentId");
            String wsSampleId = request.getParameter("wsSampleId");
            String keyword = request.getParameter("keyword");
            DataApiHandler dataApi = new DataApiHandler(request);

            JSONArray sample = new JSONArray();

            if ((sampleId != null && !sampleId.equals("")) || (expId != null && !expId.equals(""))) {

                String query_keyword = "";

                if (expId != null && !expId.equals("")) {
                    query_keyword += "eid:(" + expId.replaceAll(",", " OR ") + ")";
                }

                if (sampleId != null && !sampleId.equals("")) {
                    if (query_keyword.length() > 0) {
                        query_keyword += " AND ";
                    }
                    query_keyword += "pid:(" + sampleId.replaceAll(",", " OR ") + ")";
                }

                Map<String, String> key = new HashMap<>();
                key.put("keyword", query_keyword);
                key.put("fields", "pid,expname,expmean,timepoint,mutant,strain,condition");

                SolrQuery query = dataApi.buildSolrQuery(key, null, null, 0, -1, false);

                String apiResponse = dataApi.solrQuery(SolrCore.TRANSCRIPTOMICS_COMPARISON, query);

                Map resp = jsonReader.readValue(apiResponse);
                Map respBody = (Map) resp.get("response");

                List<Map> sdl = (List<Map>) respBody.get("docs");

                for (final Map doc : sdl) {
                    final JSONObject item = new JSONObject(doc);
                    sample.add(item);
                }
            }

            // Read from JSON if collection parameter is there
            ExpressionDataCollection parser = null;
            if (wsExperimentId != null && !wsExperimentId.equals("")) {
                String token = getAuthorizationToken(request);

                parser = new ExpressionDataCollection(wsExperimentId, token);
                parser.read(ExpressionDataCollection.CONTENT_SAMPLE);
                if (wsSampleId != null && !wsSampleId.equals("")) {
                    parser.filter(wsSampleId, ExpressionDataCollection.CONTENT_SAMPLE);
                }
                // Append samples from collection to samples from DB
                sample = parser.append(sample, ExpressionDataCollection.CONTENT_SAMPLE);
            }

            String sampleList = "";
            sampleList += ((JSONObject) sample.get(0)).get("pid");

            for (int i = 1; i < sample.size(); i++) {
                sampleList += "," + ((JSONObject) sample.get(i)).get("pid");
            }

            JSONObject jsonResult = new JSONObject();
            jsonResult.put(ExpressionDataCollection.CONTENT_SAMPLE + "Total", sample.size());
            jsonResult.put(ExpressionDataCollection.CONTENT_SAMPLE, sample);
            JSONArray expression = new JSONArray();

            if ((sampleId != null && !sampleId.equals("")) || (expId != null && !expId.equals(""))) {

                String query_keyword = "";

                if (keyword != null && !keyword.equals("")) {
                    query_keyword += "(alt_locus_tag:(" + keyword + ") OR refseq_locus_tag:(" + keyword + ")) ";
                }

                if (expId != null && !expId.equals("")) {
                    if (query_keyword.length() > 0) {
                        query_keyword += " AND ";
                    }
                    query_keyword += "eid:(" + expId.replaceAll(",", " OR ") + ")";
                }

                if (sampleId != null && !sampleId.equals("")) {
                    if (query_keyword.length() > 0) {
                        query_keyword += " AND ";
                    }
                    query_keyword += "pid:(" + sampleId.replaceAll(",", " OR ") + ")";
                }

                Map<String, String> key = new HashMap<>();
                key.put("keyword", query_keyword);
                key.put("fields", "pid,refseq_locus_tag,feature_id,log_ratio,z_score");

                SolrQuery query = dataApi.buildSolrQuery(key, null, null, 0, -1, false);

                LOGGER.trace("getTables: [{}] {}", SolrCore.TRANSCRIPTOMICS_GENE.getSolrCoreName(), query);

                String apiResponse = dataApi.solrQuery(SolrCore.TRANSCRIPTOMICS_GENE, query);

                Map resp = jsonReader.readValue(apiResponse);
                Map respBody = (Map) resp.get("response");

                List<Map> sdl = (List<Map>) respBody.get("docs");

                for (final Map doc : sdl) {
                    final JSONObject item = new JSONObject(doc);
                    expression.add(item);
                }

                // TODO: re-implement when data API removes limit 25k records
                int start = 0;
                int fetchedSize = sdl.size();
                while (fetchedSize == 25000) {
                    start += 25000;
                    query.setStart(start);

                    LOGGER.trace("getTables: [{}] {}", SolrCore.TRANSCRIPTOMICS_GENE.getSolrCoreName(), query);

                    final String apiResponseSub = dataApi.solrQuery(SolrCore.TRANSCRIPTOMICS_GENE, query);
                    final Map respSub = jsonReader.readValue(apiResponseSub);
                    final Map respBodySub = (Map) respSub.get("response");

                    sdl = (List<Map>) respBodySub.get("docs");
                    fetchedSize = sdl.size();

                    for (final Map doc : sdl) {
                        final JSONObject item = new JSONObject(doc);
                        expression.add(item);
                    }
                }
            }

            if (wsExperimentId != null && !wsExperimentId.equals("")) {

                parser.read(ExpressionDataCollection.CONTENT_EXPRESSION);
                if (wsSampleId != null && !wsSampleId.equals(""))
                    parser.filter(wsSampleId, ExpressionDataCollection.CONTENT_EXPRESSION);

                // Append expression from collection to expression from DB
                expression = parser.append(expression, ExpressionDataCollection.CONTENT_EXPRESSION);
            }

            JSONArray stats = getExperimentStats(dataApi, expression, sampleList, sample);
            jsonResult.put(ExpressionDataCollection.CONTENT_EXPRESSION + "Total", stats.size());
            jsonResult.put(ExpressionDataCollection.CONTENT_EXPRESSION, stats);

            response.setContentType("application/json");
            PrintWriter writer = response.getWriter();
            jsonResult.writeJSONString(writer);
            writer.close();

            break;
        }
        case "doClustering": {

            String data = request.getParameter("data");
            String g = request.getParameter("g");
            String e = request.getParameter("e");
            String m = request.getParameter("m");
            String ge = request.getParameter("ge");
            String pk = request.getParameter("pk");
            String action = request.getParameter("action");

            String folder = "/tmp/";
            String filename = folder + "tmp_" + pk + ".txt";
            String output_filename = folder + "cluster_tmp_" + pk;
            try {

                PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(filename, true)));
                out.write(data);
                out.close();

            } catch (Exception es) {
                LOGGER.error(es.getMessage(), es);
            }

            response.setContentType("text/html");
            PrintWriter writer = response.getWriter();
            if (action.equals("Run"))
                writer.write(doCLustering(filename, output_filename, g, e, m, ge).toString());

            writer.close();

            break;
        }
        case "saveState": {

            String keyType = request.getParameter("keyType");
            String pageAt = request.getParameter("pageAt");
            String sampleFilter = request.getParameter("sampleFilter");
            String regex = request.getParameter("regex");
            String regexGN = request.getParameter("regexGN");
            String upFold = request.getParameter("upFold");
            String downFold = request.getParameter("downFold");
            String upZscore = request.getParameter("upZscore");
            String downZscore = request.getParameter("downZscore");
            String significantGenes = request.getParameter("significantGenes");
            String ClusterColumnOrder = request.getParameter("ClusterColumnOrder");
            String ClusterRowOrder = request.getParameter("ClusterRowOrder");
            String heatmapState = request.getParameter("heatmapState");
            String heatmapAxis = request.getParameter("heatmapAxis");
            String colorScheme = request.getParameter("colorScheme");
            String filterOffset = request.getParameter("filterOffset");

            Map<String, String> key = new HashMap<>();
            key.put("sampleFilter", (sampleFilter == null) ? "" : sampleFilter);
            key.put("pageAt", (pageAt == null) ? "" : pageAt);
            key.put("regex", (regex == null) ? "" : regex);
            key.put("regexGN", (regexGN == null) ? "" : regexGN);
            key.put("upFold", (upFold == null) ? "" : upFold);
            key.put("downFold", (downFold == null) ? "" : downFold);
            key.put("upZscore", (upZscore == null) ? "" : upZscore);
            key.put("downZscore", (downZscore == null) ? "" : downZscore);
            key.put("significantGenes", (significantGenes == null) ? "" : significantGenes);
            key.put("ClusterRowOrder", (ClusterRowOrder == null) ? "" : ClusterRowOrder);
            key.put("ClusterColumnOrder", (ClusterColumnOrder == null) ? "" : ClusterColumnOrder);
            key.put("heatmapState", (heatmapState == null) ? "" : heatmapState);
            key.put("heatmapAxis", (heatmapAxis == null) ? "" : heatmapAxis);
            key.put("colorScheme", (colorScheme == null) ? "" : colorScheme);
            key.put("filterOffset", (filterOffset == null) ? "" : filterOffset);

            long pk = (new Random()).nextLong();
            SessionHandler.getInstance().set(SessionHandler.PREFIX + pk, jsonWriter.writeValueAsString(key));

            response.setContentType("text/html");
            PrintWriter writer = response.getWriter();
            writer.write("" + pk);
            writer.close();

            break;
        }
        case "getState": {

            String keyType = request.getParameter("keyType");
            String pk = request.getParameter("random");

            if ((pk != null) && (keyType != null)) {
                JSONArray results = new JSONArray();
                JSONObject a = new JSONObject();
                Map<String, String> key = jsonReader
                        .readValue(SessionHandler.getInstance().get(SessionHandler.PREFIX + pk));
                if (key != null) {
                    a.put("sampleFilter", key.get("sampleFilter"));
                    a.put("pageAt", key.get("pageAt"));
                    a.put("regex", key.get("regex"));
                    a.put("regexGN", key.get("regexGN"));
                    a.put("upFold", key.get("upFold"));
                    a.put("downFold", key.get("downFold"));
                    a.put("upZscore", key.get("upZscore"));
                    a.put("downZscore", key.get("downZscore"));
                    a.put("significantGenes", key.get("significantGenes"));
                    a.put("ClusterRowOrder", key.get("ClusterRowOrder"));
                    a.put("ClusterColumnOrder", key.get("ClusterColumnOrder"));
                    a.put("heatmapState", key.get("heatmapState"));
                    a.put("heatmapAxis", key.get("heatmapAxis"));
                    a.put("colorScheme", key.get("colorScheme"));
                    a.put("filterOffset", key.get("filterOffset"));
                }
                results.add(a);
                response.setContentType("application/json");
                PrintWriter writer = response.getWriter();
                results.writeJSONString(writer);
                writer.close();
            }
            break;
        }
        case "downloadFeatures": {
            String featureIds = request.getParameter("featureIds");
            String fileFormat = request.getParameter("fileFormat");
            String fileName = "Table_Gene";

            List<String> tableHeader = DownloadHelper.getHeaderForFeatures();
            List<String> tableField = DownloadHelper.getFieldsForFeatures();
            JSONArray tableSource = new JSONArray();

            DataApiHandler dataApi = new DataApiHandler(request);

            SolrQuery query = new SolrQuery("feature_id:(" + featureIds.replaceAll(",", " OR ") + ")");
            query.setFields(StringUtils.join(DownloadHelper.getFieldsForFeatures(), ","));
            query.setRows(dataApi.MAX_ROWS);

            LOGGER.trace("downloadFeatures: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query);

            final String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query);

            final Map resp = jsonReader.readValue(apiResponse);
            final Map respBody = (Map) resp.get("response");

            final List<GenomeFeature> features = (List) dataApi.bindDocuments((List) respBody.get("docs"),
                    GenomeFeature.class);

            for (final GenomeFeature feature : features) {
                tableSource.add(feature.toJSONObject());
            }

            final ExcelHelper excel = new ExcelHelper("xssf", tableHeader, tableField, tableSource);
            excel.buildSpreadsheet();

            if (fileFormat.equalsIgnoreCase("xlsx")) {
                response.setContentType("application/octetstream");
                response.addProperty("Content-Disposition",
                        "attachment; filename=\"" + fileName + "." + fileFormat + "\"");

                excel.writeSpreadsheettoBrowser(response.getPortletOutputStream());
            } else {
                response.setContentType("application/octetstream");
                response.addProperty("Content-Disposition",
                        "attachment; filename=\"" + fileName + "." + fileFormat + "\"");

                response.getWriter().write(excel.writeToTextFile());
            }
        }
        }
    }
}

From source file:edu.vt.vbi.patric.portlets.TranscriptomicsGene.java

License:Apache License

private JSONArray getExperimentStats(DataApiHandler dataApi, JSONArray data, String samples,
        JSONArray sample_data) throws IOException {

    JSONArray results = new JSONArray();

    Map<String, ExpressionDataGene> genes = new HashMap<>();
    Map<String, String> sample = new HashMap<>();

    for (Object aSample_data : sample_data) {
        JSONObject a = (JSONObject) aSample_data;
        sample.put(a.get("pid").toString(), a.get("expname").toString());
    }/* w w  w .  j  a v  a 2  s .  c  om*/

    for (final Object aData : data) {

        final JSONObject a = (JSONObject) aData;
        String id;

        if (a.containsKey("feature_id")) {
            id = (String) a.get("feature_id");
        } else if (a.containsKey("na_feature_id")) {
            id = (String) a.get("na_feature_id");
        } else {
            LOGGER.debug("data error: {}", a);
            id = "";
        }
        ExpressionDataGene b;

        if (genes.containsKey(id)) {
            b = genes.get(id);
        } else {
            b = new ExpressionDataGene(a);
        }

        b.addSamplestoGene(a, sample); // Sample HashMap is used to create absence/presence string
        genes.put(id, b);
    }

    List<String> featureIdList = new ArrayList<>();
    List<String> p2FeatureIdList = new ArrayList<>();

    JSONObject temp = new JSONObject();

    for (final Map.Entry<String, ExpressionDataGene> entry : genes.entrySet()) {

        final ExpressionDataGene value = entry.getValue();

        JSONObject a = new JSONObject();

        a.put("refseq_locus_tag", value.getRefSeqLocusTag());
        a.put("feature_id", value.getFeatureID());
        value.setSampleBinary(samples);
        a.put("sample_binary", value.getSampleBinary());
        a.put("sample_size", value.getSampleCounts());
        a.put("samples", value.getSamples());

        if (value.hasFeatureId()) {
            featureIdList.add(value.getFeatureID());
            temp.put(value.getFeatureID(), a);
        } else {
            p2FeatureIdList.add(value.getP2FeatureId());
            temp.put(value.getP2FeatureId(), a);
        }
    }

    featureIdList.remove(null);
    p2FeatureIdList.remove(null);

    LOGGER.trace("featureIdList[{}], p2FeatureIdList[{}]", featureIdList.size(), p2FeatureIdList.size());

    SolrQuery query = new SolrQuery("*:*");
    if (!featureIdList.isEmpty() && !p2FeatureIdList.isEmpty()) {
        query.addFilterQuery("feature_id:(" + StringUtils.join(featureIdList, " OR ") + ") OR p2_feature_id:("
                + StringUtils.join(p2FeatureIdList, " OR ") + ")");
    } else if (featureIdList.isEmpty() && !p2FeatureIdList.isEmpty()) {
        query.addFilterQuery("p2_feature_id:(" + StringUtils.join(p2FeatureIdList, " OR ") + ")");
    } else if (!featureIdList.isEmpty() && p2FeatureIdList.isEmpty()) {
        query.addFilterQuery("feature_id:(" + StringUtils.join(featureIdList, " OR ") + ")");
    } else {
        // this should not occur
        query.addFilterQuery("feature_id:1");
    }
    query.setFields(
            "feature_id,p2_feature_id,strand,product,accession,start,end,patric_id,alt_locus_tag,genome_name,gene");
    query.setRows(featureIdList.size() + p2FeatureIdList.size());

    LOGGER.trace("getExperimentStats:[{}] {}", SolrCore.FEATURE.getSolrCoreName(), query);

    final String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query);

    final Map resp = jsonReader.readValue(apiResponse);
    final Map respBody = (Map) resp.get("response");

    final List<GenomeFeature> features = dataApi.bindDocuments((List<Map>) respBody.get("docs"),
            GenomeFeature.class);

    for (final GenomeFeature feature : features) {
        JSONObject json = (JSONObject) temp.get(feature.getId());
        if (json == null) {
            json = (JSONObject) temp.get("" + feature.getP2FeatureId());
        }

        json.put("feature_id", feature.getId());
        json.put("strand", feature.getStrand());
        json.put("patric_product", feature.getProduct());
        json.put("patric_accession", feature.getAccession());
        json.put("start", feature.getStart());
        json.put("end", feature.getEnd());
        json.put("alt_locus_tag", feature.getAltLocusTag());
        json.put("patric_id", feature.getPatricId());
        json.put("genome_name", feature.getGenomeName());
        json.put("gene", feature.getGene());

        // for default sorting
        json.put("accession_start", feature.getAccession() + "_" + String.format("%08d", feature.getStart()));

        results.add(json);
    }

    return results;
}

From source file:edu.vt.vbi.patric.portlets.TranscriptomicsGeneExp.java

License:Apache License

@SuppressWarnings("unchecked")
private JSONObject processCorrelation(ResourceRequest request) {

    String featureId = request.getParameter("featureId");
    String cutoffValue = request.getParameter("cutoffValue");
    String cutoffDir = request.getParameter("cutoffDir");

    JSONObject jsonResult = new JSONObject();
    DataApiHandler dataApi = new DataApiHandler(request);

    GenomeFeature feature = dataApi.getFeature(featureId);
    Map<String, Map<String, Object>> correlationMap = new HashMap<>();
    int numFound = 0;

    try {/*  w  ww.  j  ava  2s.  c  o  m*/
        SolrQuery query = new SolrQuery("genome_id:" + feature.getGenomeId());
        query.setFilterQueries(
                "{!correlation fieldId=refseq_locus_tag fieldCondition=pid fieldValue=log_ratio srcId="
                        + feature.getRefseqLocusTag() + " filterCutOff=" + cutoffValue + " filterDir="
                        + cutoffDir.substring(0, 3) + " cost=101}");
        query.setRows(0).set("json.nl", "map");

        LOGGER.trace("[{}] {}", SolrCore.TRANSCRIPTOMICS_GENE.getSolrCoreName(), query.toString());
        String apiResponse = dataApi.solrQuery(SolrCore.TRANSCRIPTOMICS_GENE, query);

        Map resp = jsonReader.readValue(apiResponse);
        List<Map> transcriptomicsGenes = (List) resp.get("correlation");

        numFound = transcriptomicsGenes.size();

        for (Map doc : transcriptomicsGenes) {
            correlationMap.put(doc.get("id").toString(), doc);
        }
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }

    jsonResult.put("total", numFound);
    JSONArray results = new JSONArray();

    if (!correlationMap.isEmpty()) {
        try {
            SolrQuery query = new SolrQuery(
                    "refseq_locus_tag:(" + StringUtils.join(correlationMap.keySet(), " OR ")
                            + ") AND genome_id:" + feature.getGenomeId());
            query.setFilterQueries("annotation:PATRIC AND feature_type:CDS");
            query.setFields(
                    "genome_id,genome_name,accession,feature_id,start,end,strand,feature_type,annotation,alt_locus_tag,refseq_locus_tag,patric_id,na_length,aa_length,protein_id,gene,product");
            query.setRows(numFound);

            LOGGER.trace("[{}] {}", SolrCore.FEATURE.getSolrCoreName(), query.toString());
            String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query);

            Map resp = jsonReader.readValue(apiResponse);
            Map respBody = (Map) resp.get("response");

            List<GenomeFeature> features = dataApi.bindDocuments((List<Map>) respBody.get("docs"),
                    GenomeFeature.class);

            for (GenomeFeature f : features) {
                JSONObject obj = new JSONObject();
                obj.put("genome_id", f.getGenomeId());
                obj.put("genome_name", f.getGenomeName());
                obj.put("accession", f.getAccession());
                obj.put("feature_id", f.getId());
                obj.put("alt_locus_tag", f.getAltLocusTag());
                obj.put("refseq_locus_tag", f.getRefseqLocusTag());
                obj.put("patric_id", f.getPatricId());
                obj.put("gene", f.getGene());
                obj.put("annotation", f.getAnnotation());
                obj.put("feature_type", f.getFeatureType());
                obj.put("start", f.getStart());
                obj.put("end", f.getEnd());
                obj.put("na_length", f.getNaSequenceLength());
                obj.put("strand", f.getStrand());
                obj.put("protein_id", f.getProteinId());
                obj.put("aa_length", f.getProteinLength());
                obj.put("product", f.getProduct());

                Map<String, Object> corr = correlationMap.get(f.getRefseqLocusTag());
                obj.put("correlation", corr.get("correlation"));
                obj.put("count", corr.get("conditions"));

                results.add(obj);
            }
        } catch (IOException e) {
            LOGGER.error(e.getMessage(), e);
        }
    }
    jsonResult.put("results", results);

    return jsonResult;
}

From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java

License:Apache License

@SuppressWarnings("unchecked")
public JSONArray getDetails(ResourceRequest request) throws IOException {

    JSONArray arr = new JSONArray();

    String genomeIds = request.getParameter("detailsGenomes");
    String familyIds = request.getParameter("detailsFamilyIds");
    String familyType = request.getParameter("familyType");

    LOGGER.debug("params for getDetails:{}", request.getParameterMap());
    final String familyId = familyType + "_id";

    SolrQuery query = new SolrQuery();
    query.setQuery("genome_id:(" + genomeIds + ") AND " + familyId + ":(" + familyIds + ")");
    query.setFields(StringUtils.join(DownloadHelper.getFieldsForFeatures(), ","));
    query.setFilterQueries("annotation:PATRIC AND feature_type:CDS");
    query.setRows(DataApiHandler.MAX_ROWS);

    LOGGER.debug("getDetails(): [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query);

    String apiResponse = dataApiHandler.solrQuery(SolrCore.FEATURE, query);
    Map resp = jsonReader.readValue(apiResponse);
    Map respBody = (Map) resp.get("response");

    List<GenomeFeature> features = dataApiHandler.bindDocuments((List<Map>) respBody.get("docs"),
            GenomeFeature.class);

    for (GenomeFeature feature : features) {
        arr.add(feature.toJSONObject());
    }//from ww  w.  ja v a 2s. c o  m

    return arr;
}

From source file:eu.annocultor.converters.solr.SolrDocumentTagger.java

License:Apache License

public int tag() throws Exception {
    int recordsPassed = 0;
    for (int page = 0; true; page++) {
        int queryStart = page * DOCUMENTS_PER_READ;
        int queryEnd = queryStart + DOCUMENTS_PER_READ;
        if (queryEnd > start) {

            SolrQuery solrQuery = new SolrQuery(query);
            solrQuery.setStart(queryStart);
            solrQuery.setRows(DOCUMENTS_PER_READ);
            if (FIELDS_TO_EXTRACT.length > 0) {
                solrQuery.setFields(FIELDS_TO_EXTRACT);
            }/*from ww w.  j  av a 2 s  .  co  m*/
            QueryResponse response = solrServerFrom.query(solrQuery);
            SolrDocumentList sourceDocs = response.getResults();
            System.out.println("retrieved document query OK: " + sourceDocs.getNumFound());
            if (sourceDocs.isEmpty() || page > MAX_PAGES_TO_TAG) {
                log.flush();
                report();
                return recordsPassed;
            }

            int retry = 0;
            while (retry < MAX_RETRIES) {
                try {
                    List<SolrInputDocument> destDocs = new ArrayList<SolrInputDocument>();
                    tagDocumentList(sourceDocs, destDocs);
                    recordsPassed += sourceDocs.size();
                    System.out.println("Let's try");
                    solrServerTo.add(destDocs);
                    log.println(new Date() + "Completed " + recordsPassed + " SOLR documents");
                    log.println("Passed " + originalWordCount + " original words and added "
                            + enrichmentWordCount + " enrichment words");
                    break;
                } catch (Exception e) {
                    e.printStackTrace();
                    log.println(new Date() + "Stopped at document " + recordsPassed);
                    log.println(new Date() + "Retry " + retry + " failed, keep trying");
                    log.flush();
                    Thread.sleep(60000 * retry);
                    retry++;
                } finally {
                    log.flush();
                }
            }

            if (retry >= MAX_RETRIES) {
                throw new Exception("Failed completely.");
            }
        } else {
            log.println("Skipping page " + page);
            log.flush();
        }
    }
}

From source file:eu.europeana.core.BeanQueryModelFactory.java

License:EUPL

@Override
public List<?> getDocIdList(Map<String, String[]> params, Locale locale)
        throws EuropeanaQueryException, SolrServerException {
    SolrQuery solrQuery = createFromQueryParams(params, locale);
    Integer start = solrQuery.getStart();
    if (start > 1) {
        solrQuery.setStart(start - 2);//from w ww. j a v  a  2  s.c om
    }
    solrQuery.setRows(3);
    solrQuery.setFields("europeana_uri");
    // Fetch results from server
    final PortalTheme theme = ThemeFilter.getTheme();
    if (theme != null) {
        solrServer.setBaseURL(theme.getSolrSelectUrl());
    }
    QueryResponse queryResponse = solrServer.query(solrQuery);
    // fetch beans
    return getDocIdsFromQueryResponse(queryResponse);
}