Example usage for org.apache.solr.client.solrj SolrQuery setQuery

List of usage examples for org.apache.solr.client.solrj SolrQuery setQuery

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery setQuery.

Prototype

public SolrQuery setQuery(String query) 

Source Link

Usage

From source file:edu.vt.vbi.patric.portlets.PathwayFinder.java

License:Apache License

@SuppressWarnings("unchecked")
private JSONObject processEcNumberTab(DataApiHandler dataApi, String pathwayId, String ecNumber,
        String annotation, String taxonId, String genomeId, String keyword)
        throws PortletException, IOException {

    JSONObject jsonResult = new JSONObject();
    SolrQuery query = new SolrQuery("*:*");

    if (pathwayId != null && !pathwayId.equals("")) {
        query.addFilterQuery("pathway_id:" + pathwayId);
    }//from  www  .  j  a v a 2s .  c o m

    if (ecNumber != null && !ecNumber.equals("")) {
        query.addFilterQuery("ec_number:" + ecNumber);
    }

    if (annotation != null && !annotation.equals("")) {
        query.addFilterQuery("annotation:" + annotation);
    }

    if (taxonId != null && !taxonId.equals("")) {
        query.addFilterQuery(
                SolrCore.GENOME.getSolrCoreJoin("genome_id", "genome_id", "taxon_lineage_ids:" + taxonId));
    }

    if (genomeId != null && !genomeId.equals("")) {
        query.addFilterQuery(SolrCore.GENOME.getSolrCoreJoin("genome_id", "genome_id",
                "genome_id:(" + genomeId.replaceAll(",", " OR ") + ")"));
    }

    if (keyword != null && !keyword.equals("")) {
        query.setQuery(keyword);
    }

    JSONArray items = new JSONArray();
    int count_total = 0;
    int count_unique = 0;

    try {
        Set<String> listPathwayIds = new HashSet<>();
        Set<String> listEcNumbers = new HashSet<>();

        // get pathway stat
        query.setRows(0).setFacet(true);
        query.add("json.facet",
                "{stat:{field:{field:pathway_ec,limit:-1,facet:{genome_count:\"unique(genome_id)\",gene_count:\"unique(feature_id)\",ec_count:\"unique(ec_number)\"}}}}");

        LOGGER.trace("processEcNumberTab: [{}] {}", SolrCore.PATHWAY.getSolrCoreName(), query);

        String apiResponse = dataApi.solrQuery(SolrCore.PATHWAY, query);
        Map resp = jsonReader.readValue(apiResponse);
        List<Map> buckets = (List<Map>) ((Map) ((Map) resp.get("facets")).get("stat")).get("buckets");

        Map<String, Map> mapStat = new HashMap<>();
        for (Map value : buckets) {

            if (!value.get("genome_count").toString().equals("0")) {
                mapStat.put(value.get("val").toString(), value);

                String[] pathway_ec = value.get("val").toString().split("_");
                listPathwayIds.add(pathway_ec[0]);
                listEcNumbers.add(pathway_ec[1]);
            }
        }

        // get pathway list
        SolrQuery pathwayQuery = new SolrQuery("*:*");
        if (!listPathwayIds.isEmpty()) {
            pathwayQuery.setQuery("pathway_id:(" + StringUtils.join(listPathwayIds, " OR ") + ")");

            pathwayQuery.setFields("pathway_id,pathway_name,pathway_class,ec_number,ec_description");
            pathwayQuery.setRows(Math.max(1000000, listPathwayIds.size()));

            LOGGER.trace("processEcNumberTab: [{}] {}", SolrCore.PATHWAY_REF.getSolrCoreName(), pathwayQuery);

            apiResponse = dataApi.solrQuery(SolrCore.PATHWAY_REF, pathwayQuery);
            resp = jsonReader.readValue(apiResponse);
            Map respBody = (Map) resp.get("response");

            List<Map> sdl = (List<Map>) respBody.get("docs");

            for (Map doc : sdl) {
                String aPathwayId = doc.get("pathway_id").toString();
                String aEcNumber = doc.get("ec_number").toString();
                Map stat = mapStat.get(aPathwayId + "_" + aEcNumber);

                if (stat != null && !stat.get("genome_count").toString().equals("0")) {
                    JSONObject item = new JSONObject();
                    item.put("pathway_id", aPathwayId);
                    item.put("pathway_name", doc.get("pathway_name"));
                    item.put("pathway_class", doc.get("pathway_class"));

                    float genome_count = Float.parseFloat(stat.get("genome_count").toString());
                    float gene_count = Float.parseFloat(stat.get("gene_count").toString());

                    item.put("ec_name", doc.get("ec_description"));
                    item.put("ec_number", doc.get("ec_number"));
                    item.put("gene_count", gene_count);
                    item.put("genome_count", genome_count);
                    item.put("algorithm", annotation);

                    items.add(item);
                }
            }
            count_total = items.size();
            count_unique = listEcNumbers.size();
        }
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }

    // Wrapping jsonResult
    try {
        jsonResult.put("total", count_total);
        jsonResult.put("results", items);
        jsonResult.put("unique", count_unique);
    } catch (Exception ex) {
        LOGGER.error(ex.getMessage(), ex);
    }

    return jsonResult;
}

From source file:edu.vt.vbi.patric.portlets.PathwayFinder.java

License:Apache License

@SuppressWarnings("unchecked")
private JSONObject processGeneTab(DataApiHandler dataApi, String pathwayId, String ecNumber, String annotation,
        String taxonId, String genomeId, String keyword) throws PortletException, IOException {

    LOGGER.debug("pathwayId:{}, ecNumber:{}, annotation:{}, taxonId:{}, genomeId:{}, keyword:{}", pathwayId,
            ecNumber, annotation, taxonId, genomeId, keyword);

    JSONObject jsonResult = new JSONObject();
    SolrQuery query = new SolrQuery("*:*");

    if (pathwayId != null && !pathwayId.equals("")) {
        query.addFilterQuery("pathway_id:" + pathwayId);
    }/*  ww w .jav a  2 s .c o m*/

    if (ecNumber != null && !ecNumber.equals("")) {
        query.addFilterQuery("ec_number:(" + ecNumber.replaceAll(",", " OR ").replaceAll("'", "") + ")");
    }

    if (annotation != null && !annotation.equals("")) {
        query.addFilterQuery("annotation:" + annotation);
    }

    if (taxonId != null && !taxonId.equals("")) {
        query.addFilterQuery(
                SolrCore.GENOME.getSolrCoreJoin("genome_id", "genome_id", "taxon_lineage_ids:" + taxonId));
    }

    if (genomeId != null && !genomeId.equals("")) {
        query.addFilterQuery(SolrCore.GENOME.getSolrCoreJoin("genome_id", "genome_id",
                "genome_id:(" + genomeId.replaceAll(",", " OR ") + ")"));
    }

    if (keyword != null && !keyword.equals("")) {
        query.setQuery(keyword);
    }

    JSONArray items = new JSONArray();
    int count_total = 0;
    int count_unique = 0;

    try {
        Set<String> listFeatureIds = new HashSet<>();

        query.setFields("pathway_id,pathway_name,feature_id,ec_number,ec_description");
        query.setRows(dataApi.MAX_ROWS);

        LOGGER.trace("processGeneTab: [{}] {}", SolrCore.PATHWAY.getSolrCoreName(), query);

        String apiResponse = dataApi.solrQuery(SolrCore.PATHWAY, query);
        Map resp = jsonReader.readValue(apiResponse);
        Map respBody = (Map) resp.get("response");

        List<Map> sdl = (List<Map>) respBody.get("docs");

        Map<String, Map> mapStat = new HashMap<>();
        for (Map doc : sdl) {

            mapStat.put(doc.get("feature_id").toString(), doc);
            listFeatureIds.add(doc.get("feature_id").toString());
        }

        // get pathway list
        if (!listFeatureIds.isEmpty()) {
            SolrQuery featureQuery = new SolrQuery(
                    "feature_id:(" + StringUtils.join(listFeatureIds, " OR ") + ")");
            featureQuery.setFields(
                    "genome_name,genome_id,accession,alt_locus_tag,refseq_locus_tag,patric_id,feature_id,gene,product");
            featureQuery.setRows(Math.max(dataApi.MAX_ROWS, listFeatureIds.size()));

            LOGGER.trace("processGeneTab: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), featureQuery);

            apiResponse = dataApi.solrQuery(SolrCore.FEATURE, featureQuery);
            resp = jsonReader.readValue(apiResponse);
            respBody = (Map) resp.get("response");

            List<GenomeFeature> features = dataApi.bindDocuments((List<Map>) respBody.get("docs"),
                    GenomeFeature.class);

            for (GenomeFeature feature : features) {
                String featureId = feature.getId();
                Map stat = mapStat.get(featureId);

                JSONObject item = new JSONObject();
                item.put("genome_name", feature.getGenomeName());
                item.put("genome_id", feature.getGenomeId());
                item.put("accession", feature.getAccession());
                item.put("feature_id", feature.getId());
                item.put("alt_locus_tag", feature.getAltLocusTag());
                item.put("refseq_locus_tag", feature.getRefseqLocusTag());
                item.put("algorithm", annotation);
                item.put("patric_id", feature.getPatricId());
                item.put("gene", feature.getGene());
                item.put("product", feature.getProduct());

                item.put("ec_name", stat.get("ec_description"));
                item.put("ec_number", stat.get("ec_number"));
                item.put("pathway_id", stat.get("pathway_id"));
                item.put("pathway_name", stat.get("pathway_name"));

                items.add(item);
            }
            count_total = items.size();
            count_unique = count_total;
        }
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }

    // Wrapping jsonResult
    try {
        jsonResult.put("total", count_total);
        jsonResult.put("results", items);
        jsonResult.put("unique", count_unique);
    } catch (Exception ex) {
        LOGGER.error(ex.getMessage(), ex);
    }

    return jsonResult;
}

From source file:edu.vt.vbi.patric.portlets.TaxonomyTreePortlet.java

License:Apache License

public void serveResource(ResourceRequest request, ResourceResponse response)
        throws PortletException, IOException {
    int taxonId = Integer.parseInt(request.getParameter("taxonId"));
    String mode = request.getParameter("mode");

    DataApiHandler dataApi = new DataApiHandler(request);
    JSONArray tree;/* w w w. j  av  a2 s  .co  m*/
    response.setContentType("application/json");

    switch (mode) {
    case "txtree":
        tree = OrganismTreeBuilder.buildGenomeTree(dataApi, taxonId);
        tree.writeJSONString(response.getWriter());
        break;
    case "azlist":
        tree = OrganismTreeBuilder.buildGenomeList(dataApi, taxonId);
        tree.writeJSONString(response.getWriter());
        break;
    case "tgm":
        tree = OrganismTreeBuilder.buildTaxonGenomeMapping(dataApi, taxonId);
        tree.writeJSONString(response.getWriter());
        break;
    case "search":
        String searchOn = request.getParameter("searchon");
        String keyword = request.getParameter("query");

        tree = new JSONArray();
        SolrQuery query = new SolrQuery();
        if (searchOn.equals("txtree")) {
            query.setQuery("taxon_name:" + keyword + " AND genomes:[1 TO *] AND lineage_ids:" + taxonId);
            query.setRows(10000).addField("taxon_name,taxon_id");

            String apiResponse = dataApi.solrQuery(SolrCore.TAXONOMY, query);
            Map resp = jsonReader.readValue(apiResponse);
            Map respBody = (Map) resp.get("response");

            List<Taxonomy> taxonomyList = dataApi.bindDocuments((List<Map>) respBody.get("docs"),
                    Taxonomy.class);

            for (Taxonomy taxon : taxonomyList) {
                JSONObject item = new JSONObject();
                item.put("display_name", taxon.getTaxonName());
                item.put("taxon_id", taxon.getId());

                tree.add(item);
            }
        } else {
            // searchOn is "azlist"
            query.setQuery("genome_name:" + keyword + " AND taxon_lineage_ids:" + taxonId);
            query.setRows(10000).addField("genome_name,genome_id");

            String apiResponse = dataApi.solrQuery(SolrCore.TAXONOMY, query);
            Map resp = jsonReader.readValue(apiResponse);
            Map respBody = (Map) resp.get("response");

            List<Genome> genomeList = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class);

            for (Genome genome : genomeList) {
                JSONObject item = new JSONObject();
                item.put("display_name", genome.getGenomeName());
                item.put("genome_id", genome.getId());

                tree.add(item);
            }
        }

        JSONObject result = new JSONObject();
        result.put("genomeList", tree);
        result.put("keyword", keyword);
        result.put("totalCount", tree.size());

        result.writeJSONString(response.getWriter());
        break;
    default:
        tree = new JSONArray();
        tree.writeJSONString(response.getWriter());
        break;
    }
}

From source file:edu.vt.vbi.patric.portlets.TranscriptomicsGeneExp.java

License:Apache License

private JSONObject processSummary(ResourceRequest request) {
    JSONObject jsonResult = new JSONObject();

    String paramFeatureId = request.getParameter("featureId");
    String paramSampleId = request.getParameter("sampleId");
    String paramKeyword = request.getParameter("keyword");
    String paramLogRatio = request.getParameter("log_ratio");
    String paramZScore = request.getParameter("zscore");

    try {//from w  w w  .j  av a 2 s. co m
        DataApiHandler dataApi = new DataApiHandler(request);
        //select?q=feature_id:PATRIC.83332.12.NC_000962.CDS.34.1524.fwd&rows=0&facet=true&facet.range.other=before&facet.range.other=after
        // &facet.range.start=-2&facet.range.end=2&facet.range.gap=0.5&facet.range=z_score&facet.range=log_ratio

        //select?q=feature_id:PATRIC.83332.12.NC_000962.CDS.34.1524.fwd&rows=0&facet=true&facet.mincount=1&facet.field=strain&facet.field=mutant&facet.field=condition
        SolrQuery query = new SolrQuery();

        if (paramKeyword != null && !paramKeyword.equals("")) {
            query.setQuery(paramKeyword + " AND feature_id:" + paramFeatureId);
        } else {
            query.setQuery("feature_id:" + paramFeatureId);
        }

        if (paramSampleId != null && !paramSampleId.equals("")) {
            String[] pids = paramSampleId.split(",");

            query.addFilterQuery("pid:(" + StringUtils.join(pids, " OR ") + ")");
        }
        if (paramLogRatio != null && !paramLogRatio.equals("") && !paramLogRatio.equals("0")) {
            query.addFilterQuery(
                    "log_ratio:[* TO -" + paramLogRatio + "] OR log_ratio:[" + paramLogRatio + " TO *]");
        }
        if (paramZScore != null && !paramZScore.equals("") && !paramZScore.equals("0")) {
            query.addFilterQuery("z_score:[* TO -" + paramZScore + "] OR z_score:[" + paramZScore + " TO *]");
        }

        query.setRows(dataApi.MAX_ROWS);
        query.setFacet(true).setFacetMinCount(1).set("json.nl", "map");
        query.set("facet.range.other", "before").add("facet.range.other", "after");
        query.addNumericRangeFacet("log_ratio", -2, 2, 0.5).addNumericRangeFacet("z_score", -2, 2, 0.5);
        query.addFacetField("strain").addFacetField("mutant").addFacetField("condition");

        LOGGER.debug("[{}] {}", SolrCore.TRANSCRIPTOMICS_GENE.getSolrCoreName(), query.toString());

        String apiResponse = dataApi.solrQuery(SolrCore.TRANSCRIPTOMICS_GENE, query);

        Map resp = jsonReader.readValue(apiResponse);
        Map respBody = (Map) resp.get("response");

        List<Map> sdl = (List<Map>) respBody.get("docs");

        // features
        JSONArray features = new JSONArray();
        for (Map doc : sdl) {
            JSONObject feature = new JSONObject();
            feature.put("exp_accession", doc.get("accession"));
            // feature.put("exp_channels", doc.get(""));
            feature.put("exp_condition", doc.get("condition"));
            feature.put("exp_id", doc.get("eid"));
            feature.put("exp_locustag", doc.get("refseq_locus_tag"));
            feature.put("exp_mutant", doc.get("mutant"));
            feature.put("exp_name", doc.get("expname"));
            feature.put("exp_organism", doc.get("organism"));
            feature.put("exp_pavg", doc.get("avg_intensity"));
            feature.put("exp_platform", doc.get("")); // ??
            feature.put("exp_pratio", doc.get("log_ratio"));
            feature.put("exp_samples", doc.get("")); // ??
            feature.put("exp_strain", doc.get("")); // ??
            feature.put("exp_timepoint", doc.get("timepoint"));
            feature.put("exp_zscore", doc.get("z_score"));
            // feature.put("figfam_id", doc.get("")); // ??
            feature.put("locus_tag", doc.get("alt_locus_tag"));
            feature.put("feature_id", doc.get("feature_id"));
            feature.put("pid", doc.get("pid"));
            feature.put("pmid", doc.get("pmid"));

            features.add(feature);
        }
        jsonResult.put("features", features);

        Map facets = (Map) resp.get("facet_counts");

        Map facetRanges = (Map) facets.get("facet_ranges");

        if (facetRanges.containsKey("log_ratio")) {
            Map facetLogRatio = (Map) facetRanges.get("log_ratio");
            final int before = (Integer) facetLogRatio.get("before");
            final int after = (Integer) facetLogRatio.get("after");
            Map facetRangeLogRatio = (Map) facetLogRatio.get("counts");

            List<JSONObject> list = new ArrayList<>();

            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetRangeLogRatio.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }

            boolean hasMinBucket = false;
            boolean hasMaxBucket = false;
            for (JSONObject entry : list) {
                if (entry.get("category").equals("-2.0")) {
                    entry.put("count", ((Integer) entry.get("count") + before));
                    hasMinBucket = true;
                } else if (entry.get("category").equals("2.0")) {
                    entry.put("count", ((Integer) entry.get("count") + after));
                    hasMaxBucket = true;
                }
            }
            if (!hasMinBucket) {
                JSONObject json = new JSONObject();
                json.put("category", "-2.0");
                json.put("count", before);
                list.add(json);
            }
            if (!hasMaxBucket) {
                JSONObject json = new JSONObject();
                json.put("category", "2.0");
                json.put("count", after);
                list.add(json);
            }

            jsonResult.put("log_ratio", list);
        }

        if (facetRanges.containsKey("z_score")) {
            Map facetZscore = (Map) facetRanges.get("z_score");
            final int before = (Integer) facetZscore.get("before");
            final int after = (Integer) facetZscore.get("after");
            Map facetRangeZscore = (Map) facetZscore.get("counts");

            List<JSONObject> list = new ArrayList<>();
            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetRangeZscore.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }

            boolean hasMinBucket = false;
            boolean hasMaxBucket = false;
            for (JSONObject entry : list) {
                if (entry.get("category").equals("-2.0")) {
                    entry.put("count", ((Integer) entry.get("count") + before));
                    hasMinBucket = true;
                } else if (entry.get("category").equals("2.0")) {
                    entry.put("count", ((Integer) entry.get("count") + after));
                    hasMaxBucket = true;
                }
            }
            if (!hasMinBucket) {
                JSONObject json = new JSONObject();
                json.put("category", "-2.0");
                json.put("count", before);
                list.add(json);
            }
            if (!hasMaxBucket) {
                JSONObject json = new JSONObject();
                json.put("category", "2.0");
                json.put("count", after);
                list.add(json);
            }

            jsonResult.put("z_score", list);
        }

        Map facetFields = (Map) facets.get("facet_fields");

        // strain
        if (facetFields.containsKey("strain")) {
            Map facetStrain = (Map) facetFields.get("strain");
            List<JSONObject> list = new ArrayList<>();
            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetStrain.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }
            jsonResult.put("strain", list);
        }

        // mutant
        if (facetFields.containsKey("mutant")) {
            Map facetMutant = (Map) facetFields.get("mutant");
            List<JSONObject> list = new ArrayList<>();
            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetMutant.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }
            jsonResult.put("mutant", list);
        }

        // condition
        if (facetFields.containsKey("condition")) {
            Map facetCondition = (Map) facetFields.get("condition");
            List<JSONObject> list = new ArrayList<>();
            for (Map.Entry<String, Integer> entry : (Iterable<Map.Entry>) facetCondition.entrySet()) {
                JSONObject json = new JSONObject();
                json.put("category", entry.getKey());
                json.put("count", entry.getValue());

                list.add(json);
            }
            jsonResult.put("condition", list);
        }
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }

    return jsonResult;
}

From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java

License:Apache License

@SuppressWarnings("unchecked")
public void getLocusTags(ResourceRequest request, PrintWriter writer) {

    JSONArray arr = new JSONArray();

    DataApiHandler dataApi = new DataApiHandler(request);
    try {/*from   ww w .j  a  v  a2s.  co  m*/
        final String familyType = request.getParameter("familyType");
        final String familyId = familyType + "_id";

        SolrQuery solr_query = new SolrQuery();
        solr_query.setQuery("genome_id:(" + request.getParameter("genomeIds") + ") AND " + familyId + ":("
                + request.getParameter("familyIds") + ")");
        solr_query.setFilterQueries("annotation:PATRIC AND feature_type:CDS");
        solr_query.addField("feature_id,patric_id,refseq_locus_tag,alt_locus_tag");
        solr_query.setRows(DataApiHandler.MAX_ROWS);

        LOGGER.debug("getLocusTags(): [{}] {}", SolrCore.FEATURE.toString(), solr_query);

        String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, solr_query);
        Map resp = jsonReader.readValue(apiResponse);
        Map respBody = (Map) resp.get("response");

        List<GenomeFeature> features = dataApi.bindDocuments((List<Map>) respBody.get("docs"),
                GenomeFeature.class);

        for (GenomeFeature feature : features) {
            arr.add(feature.toJSONObject());
        }
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }

    JSONObject data = new JSONObject();
    data.put("data", arr);
    try {
        data.writeJSONString(writer);
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    }
}

From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java

License:Apache License

@SuppressWarnings("unchecked")
public JSONArray getDetails(ResourceRequest request) throws IOException {

    JSONArray arr = new JSONArray();

    String genomeIds = request.getParameter("detailsGenomes");
    String familyIds = request.getParameter("detailsFamilyIds");
    String familyType = request.getParameter("familyType");

    LOGGER.debug("params for getDetails:{}", request.getParameterMap());
    final String familyId = familyType + "_id";

    SolrQuery query = new SolrQuery();
    query.setQuery("genome_id:(" + genomeIds + ") AND " + familyId + ":(" + familyIds + ")");
    query.setFields(StringUtils.join(DownloadHelper.getFieldsForFeatures(), ","));
    query.setFilterQueries("annotation:PATRIC AND feature_type:CDS");
    query.setRows(DataApiHandler.MAX_ROWS);

    LOGGER.debug("getDetails(): [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query);

    String apiResponse = dataApiHandler.solrQuery(SolrCore.FEATURE, query);
    Map resp = jsonReader.readValue(apiResponse);
    Map respBody = (Map) resp.get("response");

    List<GenomeFeature> features = dataApiHandler.bindDocuments((List<Map>) respBody.get("docs"),
            GenomeFeature.class);

    for (GenomeFeature feature : features) {
        arr.add(feature.toJSONObject());
    }/*from   w  ww  .j a  v  a2  s .co  m*/

    return arr;
}

From source file:eu.annocultor.data.sources.SolrServerDataSource.java

License:Apache License

@Override
protected boolean parseQuery(DefaultHandler handler, String query, Path recordSeparatingPath,
        Path recordIdentifyingPath) throws Exception {

    ConverterHandlerDataObjects flatHandler = makeHandler(handler, recordSeparatingPath);

    boolean passedARecord = false;

    SolrQuery solrQuery = new SolrQuery();
    solrQuery.setQueryType("advanced");
    solrQuery.setQuery(query);
    solrQuery.setRows(500);/*from   w w  w. j av a  2s. co m*/
    solrQuery.setStart(0);
    solrQuery.setParam("spellcheck", false);

    System.out.println("query: " + solrQuery);
    QueryResponse response = server.query(solrQuery);
    System.out.println(response.getResponseHeader());
    System.out.println(response.getResults().size());
    for (SolrDocument doc : response.getResults()) {

        flatHandler.startDocument();
        passedARecord = true;
        String id = doc.getFirstValue(idField).toString();
        flatHandler.attemptDataObjectChange(id);

        for (String fieldName : doc.getFieldNames()) {

            for (Object value : doc.getFieldValues(fieldName)) {

                String preprocessedValue = preprocessValue(fieldName, value.toString());
                if (preprocessedValue != null) {
                    flatHandler.addField(fieldName, new LiteralValue(preprocessedValue));
                    System.out.println(id + "-" + fieldName + "-" + preprocessedValue);
                }
            }
        }
        flatHandler.endDocument();
    }
    return passedARecord;
}

From source file:eu.clarin.cmdi.vlo.importer.MetadataImporter.java

/**
 * Update "days since last import" field for all Solr records of dataRoot.
 * Notice that it will not touch records that have a "last seen" value newer
 * than today. Therefore this should be called <em>after</em> normal 
 * processing of data root!//  www  . j  ava 2  s . co m
 *
 * @param dataRoot
 * @throws SolrServerException
 * @throws IOException
 */
private void updateDaysSinceLastImport(DataRoot dataRoot) throws SolrServerException, IOException {
    LOG.info("Updating \"days since last import\" in Solr for: {}", dataRoot.getOriginName());

    SolrQuery query = new SolrQuery();
    query.setQuery(
            //we're going to process all records in the current data root...
            FacetConstants.FIELD_DATA_PROVIDER + ":" + ClientUtils.escapeQueryChars(dataRoot.getOriginName())
                    + " AND "
                    // ...that have a "last seen" value _older_ than today (on update/initialisation all records get 0 so we can skip the rest)
                    + FacetConstants.FIELD_LAST_SEEN + ":[* TO NOW-1DAY]");
    query.setFields(FacetConstants.FIELD_ID, FacetConstants.FIELD_LAST_SEEN);
    int fetchSize = 1000;
    query.setRows(fetchSize);
    QueryResponse rsp = solrServer.query(query);

    final long totalResults = rsp.getResults().getNumFound();
    final LocalDate nowDate = LocalDate.now();

    final int docsListSize = config.getMaxDocsInList();
    List<SolrInputDocument> updateDocs = new ArrayList<>(docsListSize);

    Boolean updatedDocs = false;
    int offset = 0;

    while (offset < totalResults) {
        query.setStart(offset);
        query.setRows(fetchSize);

        for (SolrDocument doc : solrServer.query(query).getResults()) {
            updatedDocs = true;

            String recordId = (String) doc.getFieldValue(FacetConstants.FIELD_ID);
            Date lastImportDate = (Date) doc.getFieldValue(FacetConstants.FIELD_LAST_SEEN);
            LocalDate oldDate = lastImportDate.toInstant().atZone(ZoneId.systemDefault()).toLocalDate();
            long daysSinceLastSeen = DAYS.between(oldDate, nowDate);

            SolrInputDocument updateDoc = new SolrInputDocument();
            updateDoc.setField(FacetConstants.FIELD_ID, recordId);

            Map<String, Long> partialUpdateMap = new HashMap<>();
            partialUpdateMap.put("set", daysSinceLastSeen);
            updateDoc.setField(FacetConstants.FIELD_DAYS_SINCE_LAST_SEEN, partialUpdateMap);

            updateDocs.add(updateDoc);

            if (updateDocs.size() == docsListSize) {
                solrServer.add(updateDocs);
                if (serverError != null) {
                    throw new SolrServerException(serverError);
                }
                updateDocs = new ArrayList<>(docsListSize);
            }
        }
        offset += fetchSize;
        LOG.info("Updating \"days since last import\": {} out of {} records updated", offset, totalResults);
    }

    if (!updateDocs.isEmpty()) {
        solrServer.add(updateDocs);
        if (serverError != null) {
            throw new SolrServerException(serverError);
        }
    }

    if (updatedDocs) {
        solrServer.commit();
    }

    LOG.info("Updating \"days since last import\" done.");
}

From source file:eu.europeana.core.BeanQueryModelFactory.java

License:EUPL

@Override
public SolrQuery createFromUri(String europeanaUri) throws EuropeanaQueryException {
    if (europeanaUri == null) {
        throw new EuropeanaQueryException(QueryProblem.MALFORMED_URL.toString()); // Expected uri query parameter
    }/*  w  w  w.ja  v  a  2 s  . c  o m*/
    SolrQuery solrQuery = new SolrQuery();
    solrQuery.setQuery("europeana_uri:\"" + europeanaUri + "\"");
    solrQuery.setQueryType(QueryType.MORE_LIKE_THIS_QUERY.toString());
    return solrQuery;
}

From source file:eu.europeana.core.BeanQueryModelFactory.java

License:EUPL

@Override
public FullBeanView getFullResultView(Map<String, String[]> params, Locale locale)
        throws EuropeanaQueryException, SolrServerException {
    String europeanaUri = "";
    if (params.get("uri") != null) {
        europeanaUri = params.get("uri")[0];
    } else if (params.get("id") != null) {
        europeanaUri = params.get("id")[0];
    } else {//from  w  ww.j  a va 2s .  com
        throw new EuropeanaQueryException(QueryProblem.MALFORMED_URL.toString()); // Expected uri query parameter
    }
    SolrQuery solrQuery = new SolrQuery();
    solrQuery.setQuery("europeana_uri:\"" + europeanaUri + "\"");
    solrQuery.setQueryType(QueryType.MORE_LIKE_THIS_QUERY.toString());
    return new FullBeanViewImpl(solrQuery, getSolrResponse(solrQuery, false, params), params, locale);
}