Example usage for org.apache.solr.client.solrj SolrQuery SolrQuery

List of usage examples for org.apache.solr.client.solrj SolrQuery SolrQuery

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery SolrQuery.

Prototype

public SolrQuery(String q) 

Source Link

Document

Create a new SolrQuery

Usage

From source file:de.hybris.platform.solrfacetsearch.solr.SolrSchemaTest.java

License:Open Source License

@Test
public void testDynamicTrieDouble() throws Exception {
    final String dynamicField = "dynamic_tdouble";
    final String dynamicFieldMultiValued = "dynamic_tdouble_mv";
    final SolrServer solrServer = getSolrServer();
    final String id = UUID.randomUUID().toString();
    try {/*w  w  w  . j a  v  a 2 s  .com*/
        final SolrInputDocument document = new SolrInputDocument();
        document.addField("id", id);
        document.addField(dynamicField, Double.valueOf(1.0f));
        document.addField(dynamicFieldMultiValued, Double.valueOf(2.0f));
        document.addField(dynamicFieldMultiValued, Double.valueOf(3.0f));
        solrServer.add(document);
        solrServer.commit();

        // Perform the query, getting the response, and validating the results
        final QueryResponse response = solrServer.query(new SolrQuery("id:" + id));
        Assert.assertNotNull(response);
        final SolrDocumentList documents = response.getResults();
        Assert.assertNotNull(documents);
        Assert.assertEquals(1, documents.size());
        final SolrDocument resultDocument = documents.iterator().next();
        Assert.assertNotNull(resultDocument);

        // Verify the identifier field
        Assert.assertEquals(resultDocument.getFieldValue("id"), id);

        // Verify the non-multivalued field
        Assert.assertEquals(resultDocument.getFieldValue(dynamicField), Double.valueOf(1.0f));

        // Verify the multivalued field
        final Collection<Object> dynamicFieldValues = resultDocument.getFieldValues(dynamicFieldMultiValued);
        Assert.assertNotNull(dynamicFieldValues);
        Assert.assertEquals(2, dynamicFieldValues.size());
        Assert.assertTrue(dynamicFieldValues.contains(Double.valueOf(2.0f)));
        Assert.assertTrue(dynamicFieldValues.contains(Double.valueOf(3.0f)));
    } finally {
        if (solrServer != null) {
            solrServer.deleteById(id);
            solrServer.commit();
        }
    }
}

From source file:de.hybris.platform.solrfacetsearch.solr.SolrSchemaTest.java

License:Open Source License

@Test
public void testDynamicTrieDate() throws Exception {
    final String dynamicField = "dynamic_tdate";
    final String dynamicFieldMultiValued = "dynamic_tdate_mv";
    final SolrServer solrServer = getSolrServer();
    final String id = UUID.randomUUID().toString();
    try {/*from  www.j  a va  2s  .  co  m*/
        final Calendar date1 = Calendar.getInstance();
        final Calendar date2 = Calendar.getInstance();
        final Calendar date3 = Calendar.getInstance();
        final SolrInputDocument document = new SolrInputDocument();
        document.addField("id", id);
        document.addField(dynamicField, date1.getTime());
        document.addField(dynamicFieldMultiValued, date2.getTime());
        document.addField(dynamicFieldMultiValued, date3.getTime());
        solrServer.add(document);
        solrServer.commit();

        // Perform the query, getting the response, and validating the results
        final QueryResponse response = solrServer.query(new SolrQuery("id:" + id));
        Assert.assertNotNull(response);
        final SolrDocumentList documents = response.getResults();
        Assert.assertNotNull(documents);
        Assert.assertEquals(1, documents.size());
        final SolrDocument resultDocument = documents.iterator().next();
        Assert.assertNotNull(resultDocument);

        // Verify the identifier field
        Assert.assertEquals(resultDocument.getFieldValue("id"), id);

        // Verify the non-multivalued field
        Assert.assertEquals(resultDocument.getFieldValue(dynamicField), date1.getTime());

        // Verify the multivalued field
        final Collection<Object> dynamicFieldValues = resultDocument.getFieldValues(dynamicFieldMultiValued);
        Assert.assertNotNull(dynamicFieldValues);
        Assert.assertEquals(2, dynamicFieldValues.size());
        Assert.assertTrue(dynamicFieldValues.contains(date2.getTime()));
        Assert.assertTrue(dynamicFieldValues.contains(date3.getTime()));
    } finally {
        if (solrServer != null) {
            solrServer.deleteById(id);
            solrServer.commit();
        }
    }
}

From source file:de.qaware.chronix.examples.exploration.ui.MainController.java

License:Apache License

private void queryTimeSeries() {
    Task task = new Task<Void>() {
        @Override/*  www. ja  va 2 s  .  c o m*/
        protected Void call() throws Exception {
            String queryString = query.getText().trim();
            String fq = fqQuery.getText().trim();

            Platform.runLater(() -> {
                chart.getData().clear();
                rows.clear();
                //Start the query
                chart.setTitle("Your Query was q=" + queryString + " fq=" + fq);
            });

            SolrQuery query = new SolrQuery(queryString);
            query.addField("+data");

            boolean hasFilterQueries = !fq.isEmpty();

            if (hasFilterQueries) {
                query.addFilterQuery(fq);
            }

            long queryStart = System.currentTimeMillis();
            List<MetricTimeSeries> result = chronix.stream(solr, query).collect(Collectors.toList());
            long queryEnd = System.currentTimeMillis();
            LOGGER.info("Query took: {} ms for {} points", (queryEnd - queryStart), size(result));
            queryStart = System.currentTimeMillis();
            result.forEach(ts -> {
                if (hasFilterQueries) {
                    addFunctionsToTable(ts);
                }
                convertTsToSeries(ts);
            });
            queryEnd = System.currentTimeMillis();
            LOGGER.info("Charting took: {} ms", (queryEnd - queryStart));
            return null;
        }
    };
    new Thread(task).start();

}

From source file:de.qaware.chronix.examples.server.ChronixClientExampleWithGenericTimeSeries.java

License:Apache License

public static void main(String[] args) {
    SolrClient solr = new HttpSolrClient.Builder().withBaseSolrUrl("http://localhost:8983/solr/chronix/")
            .build();//from   w  w w  .j  a  va  2 s. c om

    //Define a group by function for the time series records
    Function<GenericTimeSeries<Long, Double>, String> groupBy = ts -> ts.getAttribute("name") + "-"
            + ts.getAttribute("host");

    //Define a reduce function for the grouped time series records. We use the average.
    BinaryOperator<GenericTimeSeries<Long, Double>> reduce = (ts1, ts2) -> merge(ts1, ts2,
            (y1, y2) -> (y1 + y2) / 2);

    //Instantiate a Chronix Client
    ChronixClient<GenericTimeSeries<Long, Double>, SolrClient, SolrQuery> chronix = new ChronixClient<>(
            new GenericTimeSeriesConverter(), new ChronixSolrStorage<>(200, groupBy, reduce));

    //We want the maximum of all time series that metric matches *load*.
    SolrQuery query = new SolrQuery("name:*Load*");
    query.setParam("cf", "metric{max}");

    //The result is a Java Stream. We simply collect the result into a list.
    List<GenericTimeSeries<Long, Double>> maxTS = chronix.stream(solr, query).collect(Collectors.toList());

    //Just print it out.
    LOGGER.info("Result for query {} is: {}", query, maxTS);
}

From source file:de.qaware.chronix.examples.server.ChronixClientExampleWithKassiopeia.java

License:Apache License

public static void main(String[] args) {
    SolrClient solr = new HttpSolrClient("http://localhost:8983/solr/chronix/");

    //Define a group by function for the time series records
    Function<TimeSeries<Long, Double>, String> groupBy = ts -> ts.getAttribute("metric") + "-"
            + ts.getAttribute("host");

    //Define a reduce function for the grouped time series records. We use the average.
    BinaryOperator<TimeSeries<Long, Double>> reduce = (ts1, ts2) -> merge(ts1, ts2, (y1, y2) -> (y1 + y2) / 2);

    //Instantiate a Chronix Client
    ChronixClient<TimeSeries<Long, Double>, SolrClient, SolrQuery> chronix = new ChronixClient<>(
            new KassiopeiaConverter(), new ChronixSolrStorage<>(200, groupBy, reduce));

    //We want the maximum of all time series that metric matches *load*.
    SolrQuery query = new SolrQuery("metric:*Load*");
    query.addFilterQuery("function=max");

    //The result is a Java Stream. We simply collect the result into a list.
    List<TimeSeries<Long, Double>> maxTS = chronix.stream(solr, query).collect(Collectors.toList());

    //Just print it out.
    LOGGER.info("Result for query {} is: {}", query, maxTS);
}

From source file:de.qaware.chronix.examples.server.ChronixClientExampleWithKassiopeiaSimple.java

License:Apache License

public static void main(String[] args) {
    SolrClient solr = new HttpSolrClient("http://localhost:8983/solr/chronix/");

    //Define a group by function for the time series records
    Function<MetricTimeSeries, String> groupBy = ts -> ts.getMetric() + "-" + ts.attribute("host");

    //Define a reduce function for the grouped time series records
    BinaryOperator<MetricTimeSeries> reduce = (ts1, ts2) -> {
        if (ts1 == null || ts2 == null) {
            return new MetricTimeSeries.Builder("empty").build();
        }//from  w w  w.ja  v  a2  s .  c  o m
        ts1.addAll(ts2.getTimestampsAsArray(), ts2.getValuesAsArray());
        return ts1;
    };
    //Instantiate a Chronix Client
    ChronixClient<MetricTimeSeries, SolrClient, SolrQuery> chronix = new ChronixClient<>(
            new KassiopeiaSimpleConverter(), new ChronixSolrStorage<>(200, groupBy, reduce));

    //We want the maximum of all time series that metric matches *load*.
    SolrQuery query = new SolrQuery("metric:*Load*");
    query.addFilterQuery("function=max");

    //The result is a Java Stream. We simply collect the result into a list.
    List<MetricTimeSeries> maxTS = chronix.stream(solr, query).collect(Collectors.toList());

    //Just print it out.
    LOGGER.info("Result for query {} is: {}", query, prettyPrint(maxTS));
}

From source file:de.qaware.chronix.examples.server.ChronixClientExampleWithMetricTimeSeries.java

License:Apache License

public static void main(String[] args) {
    SolrClient solr = new HttpSolrClient.Builder().withBaseSolrUrl("http://localhost:8983/solr/chronix/")
            .build();/*from  w ww  .j  av a  2s .  c o  m*/

    //Define a group by function for the time series records
    Function<MetricTimeSeries, String> groupBy = ts -> ts.getName() + "-" + ts.attribute("host");

    //Define a reduce function for the grouped time series records
    BinaryOperator<MetricTimeSeries> reduce = (ts1, ts2) -> {
        if (ts1 == null || ts2 == null) {
            return new MetricTimeSeries.Builder("empty", "metric").build();
        }
        ts1.addAll(ts2.getTimestampsAsArray(), ts2.getValuesAsArray());
        return ts1;
    };
    //Instantiate a Chronix Client
    ChronixClient<MetricTimeSeries, SolrClient, SolrQuery> chronix = new ChronixClient<>(
            new MetricTimeSeriesConverter(), new ChronixSolrStorage<>(200, groupBy, reduce));

    //We want the maximum of all time series that metric matches *load*.
    SolrQuery query = new SolrQuery("name:*Load*");
    query.setParam("cf", "metric{max}");

    //The result is a Java Stream. We simply collect the result into a list.
    List<MetricTimeSeries> maxTS = chronix.stream(solr, query).collect(Collectors.toList());

    //Just print it out.
    LOGGER.info("Result for query {} is: {}", query, prettyPrint(maxTS));
}

From source file:de.qaware.chronix.spark.api.java.config.ChronixSparkLoader.java

License:Apache License

public ChronixRDD createChronixRDD(ChronixSparkContext chronixSparkContext)
        throws IOException, SolrServerException {
    SolrQuery query = new SolrQuery(chronixYAMLConfiguration.getSolrReferenceQuery());
    ChronixRDD rdd = chronixSparkContext.query(query, chronixYAMLConfiguration.getZookeeperHost(),
            chronixYAMLConfiguration.getChronixCollection(), chronixYAMLConfiguration.getStorage());
    return rdd;/*from  w  w  w  .j a  v a2s  . c  om*/
}

From source file:de.qaware.chronix.spark.api.java.ExternalizeTestData.java

License:Apache License

/**
 * @param args optional first argument: file to serialize to. A default file name is provided.
 * @throws SolrServerException//from  w ww  . j  a v a2s  .c  o  m
 * @throws FileNotFoundException
 */
public static void main(String[] args) throws SolrServerException, IOException {

    ChronixSparkLoader chronixSparkLoader = new ChronixSparkLoader();
    ChronixYAMLConfiguration config = chronixSparkLoader.getConfig();

    String file = (args.length >= 1) ? args[0] : config.getTestdataFile();

    Path filePath = Paths.get(file);
    Files.deleteIfExists(filePath);
    Output output = new Output(new DeflaterOutputStream(new FileOutputStream(filePath.toString())));
    System.out.println("Opening test data file: " + filePath.toString());

    ChronixSparkContext cSparkContext = null;

    //Create target file
    try {
        //Create Chronix Spark context
        cSparkContext = chronixSparkLoader.createChronixSparkContext();

        //Read data into ChronixRDD
        SolrQuery query = new SolrQuery(config.getSolrReferenceQuery());
        ChronixRDD rdd = cSparkContext.queryChronixChunks(query, config.getZookeeperHost(),
                config.getChronixCollection(), config.getStorage());

        System.out.println("Writing " + rdd.count() + " time series into test data file.");

        //Loop through result and serialize it to disk
        Kryo kryo = new Kryo();
        List<MetricTimeSeries> mtsList = IteratorUtils.toList(rdd.iterator());
        System.out.println("Writing objects...");
        kryo.writeObject(output, mtsList);
        output.flush();
        System.out.println("Objects written.");
    } finally {
        output.close();
        if (cSparkContext != null) {
            cSparkContext.getSparkContext().close();
        }
        System.out.println("Test data file written successfully!");
    }
}

From source file:de.qaware.chronix.storage.solr.ChronixSolrCloudStorageTest.java

License:Apache License

@Test
@Ignore// w w  w.  ja v  a 2s  .  c o m
/**
 * manual URL:
 * http://localhost:8983/solr/chronix_shard1_replica1/export?q=*:*&fl=metric&sort=id+asc
 */
public void testStreamTimeSeries() throws IOException {
    ChronixSolrCloudStorage cs = new ChronixSolrCloudStorage();
    //cs.switchToSolrTupleExport();
    List<String> shards = cs.getShardList(ZK_HOST, COLLECTION);
    for (String shard : shards) {
        Stream<MetricTimeSeries> mtss = cs.streamFromSingleNode(ZK_HOST, COLLECTION, shard,
                new SolrQuery(SOLR_REFERENCE_QUERY), new MetricTimeSeriesConverter());
        mtss.collect(Collectors.toList()).forEach(System.out::println);
    }

}