Example usage for org.apache.solr.client.solrj.impl CloudSolrClient commit

List of usage examples for org.apache.solr.client.solrj.impl CloudSolrClient commit

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.impl CloudSolrClient commit.

Prototype

public UpdateResponse commit() throws SolrServerException, IOException 

Source Link

Document

Performs an explicit commit, causing pending documents to be committed for indexing waitFlush=true and waitSearcher=true to be inline with the defaults for plain HTTP access <p> Be very careful when triggering commits from the client side.

Usage

From source file:de.qaware.spark.importer.spark.SimpleSparkSolrMetricsImporter.java

License:Apache License

/**
 * Helper to import a single file.// ww  w  .java 2s  .c o m
 *
 * @param fileUrl    the filename.
 * @param fileStream the stream.
 */
private void importIntoSolr(String fileUrl, PortableDataStream fileStream) throws ParseException {

    DateFormat dateFormat = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss.SSS", Locale.US);

    // Cloud Client
    final CloudSolrClient solrCloudClient = new CloudSolrClient.Builder().withZkHost(zkHost).build();
    solrCloudClient.setDefaultCollection(COLLECTION_NAME);

    //HttpSolrClient solrCloudClient = new HttpSolrClient.Builder("http://localhost:8983/solr/ekgdata2").build();

    try (BufferedReader reader = new BufferedReader(new InputStreamReader(fileStream.open()), 1000000)) {
        String line;

        // assuming first line is a csv header
        // Caution: This only works with files < 128 MB / One Hadoop Block
        String firstLine = reader.readLine();
        String[] fieldNames = firstLine.split(DELIMITER);

        // split host/process/type information out of the filename
        FileNameParts parts = new FileNameParts(fileUrl);

        // loop over csv file, produce and add documents
        List<SolrInputDocument> documents = new ArrayList<>();
        while ((line = reader.readLine()) != null) {
            List<SolrInputDocument> docs = createDocumentFromLine(line, fieldNames, parts, dateFormat);
            documents.addAll(docs);
            if (documents.size() > BATCH_SIZE) {
                solrCloudClient.add(documents);
                documents.clear();
            }
        }
        if (!documents.isEmpty()) {
            solrCloudClient.add(documents); // add the rest (last chunk)
            solrCloudClient.commit();
        }

    } catch (IOException | SolrServerException e) {
        //Logger.getLogger(SimpleSparkSolrMetricsImporter.class.getName()).warning(e.getMessage());
        System.err.println(e.getMessage());
    }
}

From source file:org.apache.coheigea.bigdata.solr.SolrCloudTest.java

License:Apache License

@Test
public void testAddAndQuery() throws Exception {
    CloudSolrClient cloudSolrClient = server.getSolrClient();

    cloudSolrClient.setDefaultCollection("docs");

    // Add document
    SolrInputDocument doc = new SolrInputDocument();
    doc.addField("title", "Title of Doc");
    doc.addField("content", "Test Content");

    cloudSolrClient.add(doc);/*from   w  w  w  .  j a  va 2 s  .  c  o m*/
    cloudSolrClient.commit();

    ModifiableSolrParams params = new ModifiableSolrParams();
    // Test it's uploaded
    params.set("q", "*");
    QueryResponse qResp = cloudSolrClient.query(params);

    SolrDocumentList foundDocs = qResp.getResults();
    Assert.assertEquals(1, foundDocs.getNumFound());

    SolrDocument foundDoc = foundDocs.get(0);
    Assert.assertEquals("Title of Doc", foundDoc.getFieldValue("title"));
}

From source file:org.apache.metron.solr.integration.components.SolrComponent.java

License:Apache License

public List<Map<String, Object>> getAllIndexedDocs(String collection) {
    List<Map<String, Object>> docs = new ArrayList<>();
    CloudSolrClient solr = miniSolrCloudCluster.getSolrClient();
    solr.setDefaultCollection(collection);
    SolrQuery parameters = new SolrQuery();
    parameters.set("q", "*:*");
    try {//from ww w  . j  av a  2 s .  c  o m
        solr.commit();
        QueryResponse response = solr.query(parameters);
        for (SolrDocument solrDocument : response.getResults()) {
            docs.add(solrDocument);
        }
    } catch (SolrServerException | IOException e) {
        e.printStackTrace();
    }
    return docs;
}

From source file:org.apache.nifi.processors.solr.QuerySolrIT.java

License:Apache License

@BeforeClass
public static void setup() throws IOException, SolrServerException {
    CloudSolrClient solrClient = createSolrClient();
    Path currentDir = Paths.get(ZK_CONFIG_PATH);
    solrClient.uploadConfig(currentDir, ZK_CONFIG_NAME);
    solrClient.setDefaultCollection(SOLR_COLLECTION);

    if (!solrClient.getZkStateReader().getClusterState().hasCollection(SOLR_COLLECTION)) {
        CollectionAdminRequest.Create createCollection = CollectionAdminRequest
                .createCollection(SOLR_COLLECTION, ZK_CONFIG_NAME, 1, 1);
        createCollection.process(solrClient);
    } else {/*from   w w  w  .java2 s  .co  m*/
        solrClient.deleteByQuery("*:*");
    }

    for (int i = 0; i < 10; i++) {
        SolrInputDocument doc = new SolrInputDocument();
        doc.addField("id", "doc" + i);
        Date date = new Date();
        doc.addField("created", DATE_FORMAT.format(date));
        doc.addField("string_single", "single" + i + ".1");
        doc.addField("string_multi", "multi" + i + ".1");
        doc.addField("string_multi", "multi" + i + ".2");
        doc.addField("integer_single", i);
        doc.addField("integer_multi", 1);
        doc.addField("integer_multi", 2);
        doc.addField("integer_multi", 3);
        doc.addField("double_single", 0.5 + i);

        solrClient.add(doc);
    }
    solrClient.commit();
}