Example usage for org.apache.solr.client.solrj SolrClient add

List of usage examples for org.apache.solr.client.solrj SolrClient add

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrClient add.

Prototype

public UpdateResponse add(Iterator<SolrInputDocument> docIterator) throws SolrServerException, IOException 

Source Link

Document

Adds the documents supplied by the given iterator.

Usage

From source file:org.broadleafcommerce.core.search.service.solr.index.SolrIndexServiceImpl.java

License:Open Source License

@Override
public Collection<SolrInputDocument> buildIncrementalIndex(List<? extends Indexable> indexables,
        SolrClient solrServer) throws ServiceException {
    TransactionStatus status = TransactionUtils.createTransaction("executeIncrementalIndex",
            TransactionDefinition.PROPAGATION_REQUIRED, transactionManager, true);
    if (SolrIndexCachedOperation.getCache() == null) {
        LOG.warn("Consider using SolrIndexService.performCachedOperation() in combination with "
                + "SolrIndexService.buildIncrementalIndex() for better caching performance during solr indexing");
    }/*from ww w. j  a v  a 2 s . com*/

    if (LOG.isDebugEnabled()) {
        LOG.debug(String.format("Building incremental product index - pageSize: [%s]...", indexables.size()));
    }

    StopWatch s = new StopWatch();
    try {
        sandBoxHelper.ignoreCloneCache(true);
        extensionManager.getProxy().startBatchEvent(indexables);
        Collection<SolrInputDocument> documents = new ArrayList<SolrInputDocument>();
        List<Locale> locales = getAllLocales();

        List<Long> productIds = BLCCollectionUtils.collectList(indexables, new TypedTransformer<Long>() {
            @Override
            public Long transform(Object input) {
                return shs.getCurrentProductId((Indexable) input);
            }
        });

        solrIndexDao.populateProductCatalogStructure(productIds, SolrIndexCachedOperation.getCache());

        List<IndexField> fields = null;
        FieldEntity currentFieldType = null;
        for (Indexable indexable : indexables) {
            if (fields == null || ObjectUtils.notEqual(currentFieldType, indexable.getFieldEntityType())) {
                fields = indexFieldDao.readFieldsByEntityType(indexable.getFieldEntityType());
            }

            SolrInputDocument doc = buildDocument(indexable, fields, locales);
            //If someone overrides the buildDocument method and determines that they don't want a product 
            //indexed, then they can return null. If the document is null it does not get added to 
            //to the index.
            if (doc != null) {
                documents.add(doc);
            }
        }

        extensionManager.getProxy().modifyBuiltDocuments(documents, indexables, fields, locales);

        logDocuments(documents);

        if (!CollectionUtils.isEmpty(documents) && solrServer != null) {
            solrServer.add(documents);
            commit(solrServer);
        }
        TransactionUtils.finalizeTransaction(status, transactionManager, false);

        if (LOG.isDebugEnabled()) {
            LOG.debug(String.format("Built incremental product index - pageSize: [%s] in [%s]",
                    indexables.size(), s.toLapString()));
        }

        return documents;
    } catch (SolrServerException e) {
        TransactionUtils.finalizeTransaction(status, transactionManager, true);
        throw new ServiceException("Could not rebuild index", e);
    } catch (IOException e) {
        TransactionUtils.finalizeTransaction(status, transactionManager, true);
        throw new ServiceException("Could not rebuild index", e);
    } catch (RuntimeException e) {
        TransactionUtils.finalizeTransaction(status, transactionManager, true);
        throw e;
    } finally {
        extensionManager.getProxy().endBatchEvent(indexables);
        sandBoxHelper.ignoreCloneCache(false);
    }
}

From source file:org.codice.ddf.persistence.internal.PersistentStoreImpl.java

License:Open Source License

@Override
// Input Map is expected to have the suffixes on the key names
public void add(String type, Map<String, Object> properties) throws PersistenceException {
    LOGGER.debug("type = {}", type);
    if (type == null || type.isEmpty()) {
        throw new PersistenceException(
                "The type of object(s) to retrieve must be non-null and not blank, e.g., notification, metacard, etc.");
    }// w  w  w.j a va 2s. com
    if (properties == null || properties.isEmpty() || properties.containsValue("guest")) {
        return;
    }

    LOGGER.debug("Adding entry of type {}", type);

    // Set Solr Core name to type and create solr client
    SolrClient solrClient = getSolrClient(type);
    if (solrClient == null) {
        throw new PersistenceException("Unable to create Solr client.");
    }

    Date now = new Date();
    //DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
    //String createdDate = df.format(now);

    SolrInputDocument solrInputDocument = new SolrInputDocument();
    solrInputDocument.addField("createddate_tdt", now);

    for (Map.Entry<String, Object> entry : properties.entrySet()) {
        solrInputDocument.addField(entry.getKey(), entry.getValue());
    }

    try {
        UpdateResponse response = solrClient.add(solrInputDocument);
        LOGGER.debug("UpdateResponse from add of SolrInputDocument:  {}", response);
    } catch (SolrServerException e) {
        LOGGER.info("SolrServerException while adding Solr index for persistent type {}", type, e);
        doRollback(solrClient, type);
        throw new PersistenceException(
                "SolrServerException while adding Solr index for persistent type " + type, e);
    } catch (IOException e) {
        LOGGER.info("IOException while adding Solr index for persistent type {}", type, e);
        doRollback(solrClient, type);
        throw new PersistenceException("IOException while adding Solr index for persistent type " + type, e);
    } catch (RuntimeException e) {
        LOGGER.info("RuntimeException while adding Solr index for persistent type {}", type, e);
        doRollback(solrClient, type);
        throw new PersistenceException("RuntimeException while adding Solr index for persistent type " + type,
                e);
    }
}

From source file:org.intermine.web.autocompletion.AutoCompleter.java

License:GNU General Public License

/**
 * Build the index from the database blob
 * @param os Objectstore/*from  w ww  . j  ava  2  s  .  c o m*/
 * @throws IOException IOException
 * @throws ObjectStoreException ObjectStoreException
 * @throws ClassNotFoundException ClassNotFoundException
 */
public void buildIndex(ObjectStore os) throws IOException, ObjectStoreException, ClassNotFoundException {

    List<SolrInputDocument> solrDocumentList = new ArrayList<SolrInputDocument>();
    List<String> fieldList = new ArrayList<String>();

    fieldList.add(CLASSNAME_FIELD);

    for (Map.Entry<String, String> entry : classFieldMap.entrySet()) {
        String key = entry.getKey();
        String value = entry.getValue();

        String className = key;
        ClassDescriptor cld = os.getModel().getClassDescriptorByName(className);
        if (cld == null) {
            throw new RuntimeException("a class mentioned in ObjectStore summary properties " + "file ("
                    + className + ") is not in the model");
        }
        List<String> fieldNames = Arrays.asList(value.split(" "));
        for (Iterator<String> i = fieldNames.iterator(); i.hasNext();) {

            String fieldName = i.next();
            String classAndField = cld.getUnqualifiedName() + "." + fieldName;
            System.out.println("Indexing " + classAndField);

            if (!fieldList.contains(fieldName)) {
                fieldList.add(fieldName);
            }

            Query q = new Query();
            q.setDistinct(true);
            QueryClass qc = new QueryClass(Class.forName(cld.getName()));
            q.addToSelect(new QueryField(qc, fieldName));
            q.addFrom(qc);
            Results results = os.execute(q);

            for (Object resRow : results) {
                @SuppressWarnings("rawtypes")
                Object fieldValue = ((ResultsRow) resRow).get(0);
                if (fieldValue != null) {
                    SolrInputDocument solrInputDocument = new SolrInputDocument();
                    solrInputDocument.addField(fieldName, fieldValue.toString());
                    solrInputDocument.addField(CLASSNAME_FIELD, cld.getUnqualifiedName());
                    solrDocumentList.add(solrInputDocument);
                }
            }
        }
    }

    SolrClient solrClient = SolrClientHandler.getClientInstance(this.propertiesManager.getSolrUrl());

    try {
        solrClient.deleteByQuery("*:*");
        solrClient.commit();
    } catch (SolrServerException e) {
        LOG.error("Deleting old index failed", e);
    } catch (IOException e) {
        e.printStackTrace();
    }

    for (String fieldName : fieldList) {
        Map<String, Object> fieldAttributes = new HashMap();
        fieldAttributes.put("name", fieldName);
        fieldAttributes.put("type", "text_general");
        fieldAttributes.put("stored", true);
        fieldAttributes.put("indexed", true);
        fieldAttributes.put("multiValued", true);
        fieldAttributes.put("required", false);

        try {
            SchemaRequest.AddField schemaRequest = new SchemaRequest.AddField(fieldAttributes);
            SchemaResponse.UpdateResponse response = schemaRequest.process(solrClient);

        } catch (SolrServerException e) {
            LOG.error("Error while adding autocomplete fields to the solrclient.", e);
            e.printStackTrace();
        }
    }

    try {
        UpdateResponse response = solrClient.add(solrDocumentList);

        solrClient.commit();
    } catch (SolrServerException e) {

        LOG.error("Error while commiting the AutoComplete " + "SolrInputdocuments to the Solrclient. "
                + "Make sure the Solr instance is up", e);

        e.printStackTrace();
    }
}

From source file:org.mycore.solr.index.handlers.content.MCRSolrMCRContentMapIndexHandler.java

License:Open Source License

@Override
public void index() throws IOException, SolrServerException {
    int totalCount = contentMap.size();
    LOGGER.info("Handling " + totalCount + " documents");
    //multithread processing will result in too many http request
    UpdateResponse updateResponse;/*  w w w. j  a  va  2 s  .  co m*/
    try {
        Iterator<SolrInputDocument> documents = MCRSolrInputDocumentFactory.getInstance()
                .getDocuments(contentMap);
        SolrClient solrClient = getSolrClient();
        if (solrClient instanceof ConcurrentUpdateSolrClient) {
            //split up to speed up processing
            splitup(documents);
            return;
        }
        if (LOGGER.isDebugEnabled()) {
            ArrayList<SolrInputDocument> debugList = new ArrayList<>();
            while (documents.hasNext()) {
                debugList.add(documents.next());
            }
            LOGGER.debug("Sending these documents: " + debugList);
            //recreate documents interator;
            documents = debugList.iterator();
        }
        if (solrClient instanceof HttpSolrClient) {
            updateResponse = ((HttpSolrClient) solrClient).add(documents);
        } else {
            ArrayList<SolrInputDocument> docs = new ArrayList<>(totalCount);
            while (documents.hasNext()) {
                docs.add(documents.next());
            }
            updateResponse = solrClient.add(docs);
        }
    } catch (Throwable e) {
        LOGGER.warn("Error while indexing document collection. Split and retry.", e);
        splitup();
        return;
    }
    if (updateResponse.getStatus() != 0) {
        LOGGER.error(
                "Error while indexing document collection. Split and retry: " + updateResponse.getResponse());
        splitup();
    } else {
        LOGGER.info("Sending " + totalCount + " documents was successful in " + updateResponse.getElapsedTime()
                + " ms.");
    }

}

From source file:org.opencms.search.solr.spellchecking.CmsSpellcheckDictionaryIndexer.java

License:Open Source License

/**
 * Add a list of documents to the Solr client.<p>
 *
 * @param client The SolrClient instance object.
 * @param documents The documents that should be added.
 * @param commit boolean flag indicating whether a "commit" call should be made after adding the documents
 *
 * @throws IOException in case something goes wrong
 * @throws SolrServerException in case something goes wrong
 *//* w w w.  ja v a  2 s. com*/
static void addDocuments(SolrClient client, List<SolrInputDocument> documents, boolean commit)
        throws IOException, SolrServerException {

    if ((null == client) || (null == documents)) {
        return;
    }

    if (!documents.isEmpty()) {
        client.add(documents);
    }

    if (commit) {
        client.commit();
    }
}

From source file:org.phenotips.variantstore.db.solr.SolrVariantUtils.java

License:Open Source License

/**
 * Add a document to a SolrClient./*  ww w  . j  a  v a  2  s  . c  o m*/
 *
 * @param doc    the SolrDocument do be added
 * @param server the solr server to assist communication with a Solr server
 * @throws DatabaseException DatabaseException
 */
public static void addDoc(SolrInputDocument doc, SolrClient server) throws DatabaseException {
    try {
        server.add(doc);
        doc.clear();
    } catch (SolrServerException | IOException e) {
        throw new DatabaseException("Error adding variants to Solr", e);
    }
}

From source file:se.uu.ub.cora.solrindex.SolrRecordIndexer.java

License:Open Source License

private void sendDocumentToSolr() {
    try {/*from   ww w  . ja  va  2 s .c o  m*/
        SolrClient solrClient = solrClientProvider.getSolrClient();
        solrClient.add(document);
        solrClient.commit();
    } catch (Exception e) {
        throw SolrIndexException.withMessage(
                "Error while indexing record with type: " + type + " and id: " + id + " " + e.getMessage());
    }
}

From source file:uk.bl.wa.annotation.Annotator.java

License:Open Source License

private static void searchAndApplyAnnotations(Annotator anr, SolrClient solr, SolrQuery parameters)
        throws SolrServerException, URISyntaxException, IOException {
    QueryResponse response = solr.query(parameters);
    SolrDocumentList list = response.getResults();
    for (SolrDocument doc : list) {
        SolrInputDocument solrInDoc = new SolrInputDocument();
        solrInDoc.setField(SolrFields.ID, doc.getFieldValue(SolrFields.ID));
        solrInDoc.setField(SolrFields.CRAWL_DATE, doc.getFieldValue(SolrFields.CRAWL_DATE));
        solrInDoc.setField(SolrFields.SOLR_URL, doc.getFieldValue(SolrFields.SOLR_URL));
        String uriString = (String) solrInDoc.getFieldValue(SolrFields.SOLR_URL);
        URI uri = new URI(uriString);
        // Update all of those records with the applicable
        // categories etc.
        anr.applyAnnotations(uri, solrInDoc);
        solr.add(solrInDoc);
    }//from   ww  w  .  j  a va 2 s .  co m
}

From source file:uk.bl.wa.annotation.CollectionsUpdateTest.java

License:Open Source License

private static void doUpdate(SolrClient ss, String id, String collection, String collections)
        throws SolrServerException, IOException {

    ss.add(createUpdateDocument(id, collection, collections));

    ss.commit(true, true);/*from   w w  w .  j av a  2s. c o  m*/

    System.out.println("Updated.");
}