Example usage for org.apache.solr.client.solrj.response QueryResponse getNextCursorMark

List of usage examples for org.apache.solr.client.solrj.response QueryResponse getNextCursorMark

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.response QueryResponse getNextCursorMark.

Prototype

public String getNextCursorMark() 

Source Link

Usage

From source file:bamboo.trove.rule.RuleChangeUpdateManager.java

License:Apache License

private void processQuery(SolrQuery query, WorkLog workLog) throws SolrServerException, IOException {
    log.debug("Query for rule : {}", query.toString());
    Timer.Context context = getTimer(getName() + ".processQuery").time();
    // need to commit here so that we can ignore documents just processed
    client.commit();//from   w  ww . j av  a  2  s  .co  m

    boolean more = true;
    String cursor = CursorMarkParams.CURSOR_MARK_START;
    while (more) {
        query.set(CursorMarkParams.CURSOR_MARK_PARAM, cursor);
        Timer.Context contextQuery = getTimer(getName() + ".query").time();

        QueryResponse response = client.query(query);
        workLog.ranSearch();
        SolrDocumentList results = response.getResults();
        log.debug("Found {} (of {} docs) in QT = {} ms", results.size(), results.getNumFound(),
                response.getQTime());
        String nextCursor = response.getNextCursorMark();
        if (nextCursor == null || cursor.equals(nextCursor)) {
            more = false;
        }
        distributeResponse(results, workLog);
        cursor = nextCursor;
        contextQuery.stop();
    }

    // We do this at a higher level too, so this would seem redundant. There is a trade-off. Allowing parallelism
    // between rules means rules can sometimes be re-processed redundantly. The higher level waitUntilCaughtUp() will
    // ensure we never process rules at the same time rules are being changed.
    // By doing a wait here as well however, we can collect accurate statistics about how much actual write activity we
    // are really generating by passing the workLog into the work pool.
    // When we have a better awareness of the typical work patterns it might be worth disabling this method call and
    // then stop collecting the metrics to improve throughput.
    waitUntilCaughtUp();
    context.stop();
}

From source file:com.frank.search.solr.core.SolrTemplate.java

License:Apache License

public <T> Cursor<T> queryForCursor(Query query, final Class<T> clazz) {

    return new DelegatingCursor<T>(queryParsers.getForClass(query.getClass()).constructSolrQuery(query)) {

        @Override/*from   ww  w. ja v  a 2  s. c o  m*/
        protected PartialResult<T> doLoad(SolrQuery nativeQuery) {

            QueryResponse response = executeSolrQuery(nativeQuery);
            if (response == null) {
                return new PartialResult<T>("", Collections.<T>emptyList());
            }

            return new PartialResult<T>(response.getNextCursorMark(),
                    convertQueryResponseToBeans(response, clazz));
        }

    }.open();
}

From source file:com.hurence.logisland.service.solr.api.SolrClientService.java

License:Apache License

@Override
public void copyCollection(String reindexScrollTimeout, String src, String dst)
        throws DatastoreClientServiceException {
    SolrQuery solrQuery = new SolrQuery();
    solrQuery.setRows(1000);//from w ww.j  a  v a  2 s.  c om
    solrQuery.setQuery("*:*");
    solrQuery.addSort("id", SolrQuery.ORDER.asc); // Pay attention to this line
    String cursorMark = CursorMarkParams.CURSOR_MARK_START;
    boolean done = false;
    QueryResponse response;
    try {
        do {
            solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
            response = getClient().query(src, solrQuery);
            List<SolrInputDocument> documents = new ArrayList<>();
            for (SolrDocument document : response.getResults()) {
                SolrInputDocument inputDocument = getConverter().toSolrInputDocument(document);
                inputDocument.removeField("_version_");
                documents.add(inputDocument);
            }

            getClient().add(dst, documents);

        } while (cursorMark.equals(response.getNextCursorMark()));

        getClient().commit(dst);
    } catch (Exception e) {
        throw new DatastoreClientServiceException(e);
    }
}

From source file:de.hebis.it.hds.gnd.out.AutorityRecordFileWriter.java

License:Open Source License

/**
 * Loop over all entries in the repository to write them to the output file
 */// www.j a  v  a 2  s  . c  o m
private void listAllEntries() {
    String cursorMark = "*";
    String nextCursorMark = null;
    QueryResponse rsp = null;
    SolrQuery query = new SolrQuery("id:*");
    query.setRows(Integer.valueOf(config.getProperty("StepSizeForExport", "100")));
    query.setSort(SortClause.asc("id"));
    do {
        // start with '*' in the first iteration, then use the last position
        if (nextCursorMark != null) {
            cursorMark = nextCursorMark;
        }
        // use the last position as new start value.
        query.set("cursorMark", new String[] { cursorMark });
        if (LOG.isTraceEnabled()) {
            LOG.trace(query.toString());
        }
        // execute the query
        try {
            rsp = server.query(query);
            if (rsp.getStatus() != 0)
                throw new SolrServerException("Responsestatus: " + rsp.getStatus());
        } catch (SolrServerException | IOException e) {
            e.printStackTrace();
            throw new RuntimeException("The index can't eval \"" + query.toString() + "\".", e);
        }
        nextCursorMark = rsp.getNextCursorMark();
        // get the results of partial results
        List<AuthorityBean> partialResults = rsp.getBeans(AuthorityBean.class);
        if (LOG.isTraceEnabled())
            LOG.trace(partialResults.size() + " records in this packet");
        // loop over the results
        for (AuthorityBean entry : partialResults) {
            if (LOG.isTraceEnabled())
                LOG.trace("Bearbeite:  " + entry.id);
            printOut(entry);
            if ((maxCount != Integer.MAX_VALUE) && (count >= maxCount))
                return; // optional exit for debug purposes
        }
        out.flush();
    } while (!cursorMark.equals(nextCursorMark));
}

From source file:io.logspace.hq.core.solr.event.SolrEventService.java

License:Open Source License

private EventPage retrieve(EventFilter eventFilter, int count, String cursorMark, String sort) {
    SolrQuery solrQuery = new SolrQuery(ALL_DOCS_QUERY);
    solrQuery.setRows(count);// w  w w. j  av  a2 s .c  om
    solrQuery.set(CURSOR_MARK_PARAM, cursorMark);
    solrQuery.set(SORT, sort);

    for (EventFilterElement eachElement : eventFilter) {
        solrQuery.addFilterQuery(this.createFilterQuery(eachElement));
    }

    try {
        EventPage result = new EventPage();

        QueryResponse response = this.solrClient.query(solrQuery);
        for (SolrDocument eachSolrDocument : response.getResults()) {
            result.addEvent(this.createEvent(eachSolrDocument));
        }

        result.setNextCursorMark(response.getNextCursorMark());
        result.setTotalCount(response.getResults().getNumFound());

        return result;
    } catch (SolrServerException | IOException | SolrException e) {
        String message = "Failed to retrieve events.";
        this.logger.error(message, e);
        throw EventStoreException.retrieveFailed(message, e);
    }
}

From source file:it.damore.solr.importexport.App.java

License:Open Source License

/**
 * @param client//from   w  ww .j ava2  s .  c  om
 * @param outputFile
 * @throws SolrServerException
 * @throws IOException
 */
private static void readAllDocuments(HttpSolrClient client, File outputFile)
        throws SolrServerException, IOException {

    SolrQuery solrQuery = new SolrQuery();
    solrQuery.setQuery("*:*");
    if (config.getFilterQuery() != null) {
        solrQuery.addFilterQuery(config.getFilterQuery());
    }
    solrQuery.setRows(0);

    solrQuery.addSort(config.getUniqueKey(), ORDER.asc); // Pay attention to this line

    String cursorMark = CursorMarkParams.CURSOR_MARK_START;

    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));

    // objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, true);
    DateFormat df = new SimpleDateFormat("YYYY-MM-dd'T'HH:mm:sss'Z'");
    objectMapper.setDateFormat(df);
    objectMapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true);

    QueryResponse r = client.query(solrQuery);

    long nDocuments = r.getResults().getNumFound();
    logger.info("Found " + nDocuments + " documents");

    if (!config.getDryRun()) {
        logger.info("Creating " + config.getFileName());

        Set<SkipField> skipFieldsEquals = config.getSkipFieldsSet().stream()
                .filter(s -> s.getMatch() == MatchType.EQUAL).collect(Collectors.toSet());
        Set<SkipField> skipFieldsStartWith = config.getSkipFieldsSet().stream()
                .filter(s -> s.getMatch() == MatchType.STARTS_WITH).collect(Collectors.toSet());
        Set<SkipField> skipFieldsEndWith = config.getSkipFieldsSet().stream()
                .filter(s -> s.getMatch() == MatchType.ENDS_WITH).collect(Collectors.toSet());

        try (PrintWriter pw = new PrintWriter(outputFile)) {
            solrQuery.setRows(200);
            boolean done = false;
            while (!done) {
                solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
                QueryResponse rsp = client.query(solrQuery);
                String nextCursorMark = rsp.getNextCursorMark();

                for (SolrDocument d : rsp.getResults()) {
                    skipFieldsEquals.forEach(f -> d.removeFields(f.getText()));
                    if (skipFieldsStartWith.size() > 0 || skipFieldsEndWith.size() > 0) {
                        Map<String, Object> collect = d.entrySet().stream()
                                .filter(e -> !skipFieldsStartWith.stream()
                                        .filter(f -> e.getKey().startsWith(f.getText())).findFirst()
                                        .isPresent())
                                .filter(e -> !skipFieldsEndWith.stream()
                                        .filter(f -> e.getKey().endsWith(f.getText())).findFirst().isPresent())
                                .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
                        pw.write(objectMapper.writeValueAsString(collect));
                    } else {
                        pw.write(objectMapper.writeValueAsString(d));
                    }
                    pw.write("\n");
                }
                if (cursorMark.equals(nextCursorMark)) {
                    done = true;
                }

                cursorMark = nextCursorMark;
            }

        }
    }

}

From source file:org.apache.drill.exec.store.solr.SolrRecordReader.java

License:Apache License

@Override
public int next() {
    int counter = 0;
    int statsCounter = 0;
    SolrRecordReader.logger.info("Use SOLR Stream : " + useSolrStream);

    QueryResponse rsp = null;
    String solrUrl = solrSubScan.getSolrScanSpec().getSolrUrl();
    String uniqueKey = solrSubScan.getSolrScanSpec().getCvSchema().getUniqueKey();
    String solrCoreName = solrSubScan.getSolrScanSpec().getSolrCoreName();
    SolrFilterParam filters = solrSubScan.getSolrScanSpec().getFilter();
    List<SolrSortParam> solrSortParams = solrSubScan.getSolrScanSpec().getSortParams();
    ArrayList<String> fieldListCopy = new ArrayList(this.fields);
    long solrDocFetchCount = solrSubScan.getSolrScanSpec().getSolrDocFetchCount(solrCoreName, fieldListCopy);
    boolean isDistinct = solrSubScan.getSolrScanSpec().isDistinct();
    boolean isGroup = solrSubScan.getSolrScanSpec().isGroup();
    boolean useFacetPivotFromGroupCount = isCountOnlyQuery() && isGroup;
    boolean isDataQuery = solrAggrParams.isEmpty() && !isGroup && !useFacetPivotFromGroupCount;

    if (!solrStreamReadFinished) {
        StringBuilder filterBuilder = new StringBuilder();
        if (filters != null) {
            for (String filter : filters) {
                filterBuilder.append(filter);
            }//from ww w.j  av a 2s . c o  m
        }

        if ((solrUrl != null) && solrUrl.contains(solrCoreName)) {
            solrServerUrl = solrUrl;
            solrCoreName = null; // setting it null since solrUrl already contains the coreName;
        }

        if (useSolrStream) {
            SolrStream solrStream = solrClientApiExec.getSolrStreamResponse(solrServerUrl, solrCoreName,
                    this.fields, filterBuilder, uniqueKey, solrDocFetchCount);

            counter = processSolrStream(solrStream);
        } else {

            if (!solrAggrParams.isEmpty() && !isGroup && isCountOnlyQuery()) {
                SolrRecordReader.logger.info("Processing COUNT only query...");
                ValueVector vv1 = vectors.get(String.valueOf(statsCounter));
                processCountQuery(vv1, solrDocFetchCount, statsCounter, counter);
                statsCounter++;
                counter++; // actual record counter
            } else {
                if (solrDocFetchCount != 0) {
                    if (isDistinct) {
                        // make facet query.
                    }

                    if (isDataQuery && !solrSubScan.getSolrScanSpec().isLimitApplied()) {
                        solrDocFetchCount = SolrPluginConstants.SOLR_DEFAULT_FETCH_COUNT;
                    }
                    rsp = solrClientApiExec.getSolrDocs(solrServerUrl, solrCoreName, uniqueKey, this.fields,
                            solrDocFetchCount, cursorMark, filterBuilder, solrAggrParams, solrSortParams,
                            solrScanSpec.getProjectFieldNames(), isGroup, isCountOnlyQuery());

                    SolrDocumentList solrDocList = rsp.getResults();
                    Map<String, FieldStatsInfo> fieldStatsInfoMap = rsp.getFieldStatsInfo();
                    NamedList<List<PivotField>> facetPivots = rsp.getFacetPivot();

                    // solr doc iteration
                    if (solrDocList != null) {
                        counter = processSolrDocs(solrDocList);
                    }
                    // facet iteration (for single group by)
                    if (rsp.getFacetFields() != null) {
                        counter = processFacetResponse(rsp, isCountOnlyQuery());
                    }
                    // stats/stats.facet iteration (for stats with single group by should be replace by
                    // facet pivot)
                    if (fieldStatsInfoMap != null) {
                        counter = processStatsFieldResponse(fieldStatsInfoMap, isGroup, uniqueKey);
                    }
                    // facet pivot response
                    if (facetPivots != null) {
                        List<PivotField> pivotList = facetPivots.get(Joiner.on(",").join(fields));
                        counter = processFacetPivotResponse(pivotList);
                    }
                }
            }
        }

    }
    for (String key : vectors.keySet()) {
        ValueVector vv = vectors.get(key);
        vv.getMutator().setValueCount(counter);
    }
    if (isDataQuery && rsp != null && cursorMark != rsp.getNextCursorMark()) {
        solrStreamReadFinished = false;
        cursorMark = rsp.getNextCursorMark();
        SolrRecordReader.logger
                .info("Retrieving resultset from SOLR is not yet finished. Retriving with next cursor [ "
                        + cursorMark + " ] with rows:" + solrDocFetchCount);
    } else {
        solrStreamReadFinished = true;
    }
    return counter;
}

From source file:org.apache.hadoop.hive.solr.SolrBatchReader.java

License:Apache License

@Override
public void run() {

    QueryResponse response = null;
    buffer.clear();//from  w ww  . j  ava 2s .  c om
    // Read data from the SOLR server.
    try {
        query.setRows(window);
        query.set("sort", "score desc,id asc");
        query.set("cursorMark", nextCursorMark.toString());
        response = solrServer.query(query);
        nextCursorMark.delete(0, nextCursorMark.length());
        nextCursorMark.append(response.getNextCursorMark());
    } catch (SolrServerException ex) {
        LOG.log(Level.ERROR, "Exception occured while querying the solr server", ex);
    }
    buffer.addAll(response.getResults());
    // Signal the parent thread that I am done.
    try {
        cb.await();
    } catch (BrokenBarrierException ex) { // TODO: Catch proper exceptions
        LOG.log(Level.ERROR, "Exception occured while waiting on cyclic buffer", ex);
    } catch (InterruptedException ex) {
        // Restore the interrupted status
        Thread.currentThread().interrupt();
    }
}

From source file:org.apache.metron.solr.dao.SolrMetaAlertSearchDao.java

License:Apache License

@Override
public SearchResponse getAllMetaAlertsForAlert(String guid) throws InvalidSearchException {
    if (guid == null || guid.trim().isEmpty()) {
        throw new InvalidSearchException("Guid cannot be empty");
    }/*  w  ww  .  ja  va 2  s  .  com*/

    // Searches for all alerts containing the meta alert guid in it's "metalerts" array
    // The query has to match the parentFilter to avoid errors.  Guid must also be explicitly
    // included.
    String activeClause = MetaAlertConstants.STATUS_FIELD + ":" + MetaAlertStatus.ACTIVE.getStatusString();
    String guidClause = Constants.GUID + ":" + guid;
    String fullClause = "{!parent which=" + activeClause + "}" + guidClause;
    String metaalertTypeClause = config.getSourceTypeField() + ":" + MetaAlertConstants.METAALERT_TYPE;
    SolrQuery solrQuery = new SolrQuery().setQuery(fullClause)
            .setFields("*", "[child parentFilter=" + metaalertTypeClause + " limit=999]")
            .addSort(Constants.GUID, SolrQuery.ORDER.asc); // Just do basic sorting to track where we are

    // Use Solr's Cursors to handle the paging, rather than doing it manually.
    List<SearchResult> allResults = new ArrayList<>();
    try {
        String cursorMark = CursorMarkParams.CURSOR_MARK_START;
        boolean done = false;
        while (!done) {
            solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark);
            QueryResponse rsp = solrClient.query(METAALERTS_COLLECTION, solrQuery);
            String nextCursorMark = rsp.getNextCursorMark();
            rsp.getResults().stream()
                    .map(solrDocument -> SolrUtilities.getSearchResult(solrDocument, null,
                            solrSearchDao.getAccessConfig().getIndexSupplier()))
                    .forEachOrdered(allResults::add);
            if (cursorMark.equals(nextCursorMark)) {
                done = true;
            }
            cursorMark = nextCursorMark;
        }
    } catch (IOException | SolrServerException e) {
        throw new InvalidSearchException("Unable to complete search", e);
    }

    SearchResponse searchResponse = new SearchResponse();
    searchResponse.setResults(allResults);
    searchResponse.setTotal(allResults.size());
    return searchResponse;
}

From source file:org.apache.nifi.processors.solr.GetSolr.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    final ComponentLog logger = getLogger();
    final AtomicBoolean continuePaging = new AtomicBoolean(true);
    final SolrQuery solrQuery = new SolrQuery();

    try {/*from   w  w  w .  j a  v  a2s.  c  om*/
        if (id_field == null) {
            id_field = getFieldNameOfUniqueKey();
        }

        final String dateField = context.getProperty(DATE_FIELD).getValue();

        final Map<String, String> stateMap = new HashMap<String, String>();
        stateMap.putAll(context.getStateManager().getState(Scope.CLUSTER).toMap());

        solrQuery.setQuery("*:*");
        final String query = context.getProperty(SOLR_QUERY).getValue();
        if (!StringUtils.isBlank(query) && !query.equals("*:*")) {
            solrQuery.addFilterQuery(query);
        }
        final StringBuilder automatedFilterQuery = (new StringBuilder()).append(dateField).append(":[")
                .append(stateMap.get(STATE_MANAGER_FILTER)).append(" TO *]");
        solrQuery.addFilterQuery(automatedFilterQuery.toString());

        final List<String> fieldList = new ArrayList<String>();
        final String returnFields = context.getProperty(RETURN_FIELDS).getValue();
        if (!StringUtils.isBlank(returnFields)) {
            fieldList.addAll(Arrays.asList(returnFields.trim().split("[,]")));
            if (!fieldList.contains(dateField)) {
                fieldList.add(dateField);
                dateFieldNotInSpecifiedFieldsList.set(true);
            }
            for (String returnField : fieldList) {
                solrQuery.addField(returnField.trim());
            }
        }

        solrQuery.setParam(CursorMarkParams.CURSOR_MARK_PARAM, stateMap.get(STATE_MANAGER_CURSOR_MARK));
        solrQuery.setRows(context.getProperty(BATCH_SIZE).asInteger());

        final StringBuilder sortClause = (new StringBuilder()).append(dateField).append(" asc,")
                .append(id_field).append(" asc");
        solrQuery.setParam("sort", sortClause.toString());

        while (continuePaging.get()) {
            final QueryRequest req = new QueryRequest(solrQuery);
            if (isBasicAuthEnabled()) {
                req.setBasicAuthCredentials(getUsername(), getPassword());
            }

            logger.debug(solrQuery.toQueryString());
            final QueryResponse response = req.process(getSolrClient());
            final SolrDocumentList documentList = response.getResults();

            if (response.getResults().size() > 0) {
                final SolrDocument lastSolrDocument = documentList.get(response.getResults().size() - 1);
                final String latestDateValue = df.format(lastSolrDocument.get(dateField));
                final String newCursorMark = response.getNextCursorMark();

                solrQuery.setParam(CursorMarkParams.CURSOR_MARK_PARAM, newCursorMark);
                stateMap.put(STATE_MANAGER_CURSOR_MARK, newCursorMark);
                stateMap.put(STATE_MANAGER_FILTER, latestDateValue);

                FlowFile flowFile = session.create();
                flowFile = session.putAttribute(flowFile, "solrQuery", solrQuery.toString());

                if (context.getProperty(RETURN_TYPE).getValue().equals(MODE_XML.getValue())) {
                    if (dateFieldNotInSpecifiedFieldsList.get()) {
                        for (SolrDocument doc : response.getResults()) {
                            doc.removeFields(dateField);
                        }
                    }
                    flowFile = session.write(flowFile,
                            SolrUtils.getOutputStreamCallbackToTransformSolrResponseToXml(response));
                    flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(),
                            "application/xml");

                } else {
                    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER)
                            .asControllerService(RecordSetWriterFactory.class);
                    final RecordSchema schema = writerFactory.getSchema(null, null);
                    final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(),
                            schema);
                    final StringBuffer mimeType = new StringBuffer();
                    flowFile = session.write(flowFile, new OutputStreamCallback() {
                        @Override
                        public void process(final OutputStream out) throws IOException {
                            try {
                                final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema,
                                        out);
                                writer.write(recordSet);
                                writer.flush();
                                mimeType.append(writer.getMimeType());
                            } catch (SchemaNotFoundException e) {
                                throw new ProcessException("Could not parse Solr response", e);
                            }
                        }
                    });
                    flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(),
                            mimeType.toString());
                }
                session.transfer(flowFile, REL_SUCCESS);
            }
            continuePaging.set(response.getResults().size() == Integer
                    .parseInt(context.getProperty(BATCH_SIZE).getValue()));
        }
        context.getStateManager().setState(stateMap, Scope.CLUSTER);
    } catch (SolrServerException | SchemaNotFoundException | IOException e) {
        context.yield();
        session.rollback();
        logger.error("Failed to execute query {} due to {}", new Object[] { solrQuery.toString(), e }, e);
        throw new ProcessException(e);
    } catch (final Throwable t) {
        context.yield();
        session.rollback();
        logger.error("Failed to execute query {} due to {}", new Object[] { solrQuery.toString(), t }, t);
        throw t;
    }
}