Example usage for org.apache.solr.client.solrj.impl HttpClientUtil PROP_ALLOW_COMPRESSION

List of usage examples for org.apache.solr.client.solrj.impl HttpClientUtil PROP_ALLOW_COMPRESSION

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.impl HttpClientUtil PROP_ALLOW_COMPRESSION.

Prototype

String PROP_ALLOW_COMPRESSION

To view the source code for org.apache.solr.client.solrj.impl HttpClientUtil PROP_ALLOW_COMPRESSION.

Click Source Link

Usage

From source file:com.thinkaurelius.titan.diskstorage.solr.Solr5Index.java

License:Apache License

public Solr5Index(final Configuration config) throws BackendException {
    Preconditions.checkArgument(config != null);
    configuration = config;/*  w  w w  . ja v  a  2  s . c  om*/

    mode = Mode.parse(config.get(SOLR_MODE));
    dynFields = config.get(DYNAMIC_FIELDS);
    keyFieldIds = parseKeyFieldsForCollections(config);
    maxResults = config.get(GraphDatabaseConfiguration.INDEX_MAX_RESULT_SET_SIZE);
    ttlField = config.get(TTL_FIELD);
    waitSearcher = config.get(WAIT_SEARCHER);

    if (mode == Mode.CLOUD) {
        String zookeeperUrl = config.get(Solr5Index.ZOOKEEPER_URL);
        CloudSolrClient cloudServer = new CloudSolrClient(zookeeperUrl, true);
        cloudServer.connect();
        solrClient = cloudServer;
    } else if (mode == Mode.HTTP) {
        HttpClient clientParams = HttpClientUtil.createClient(new ModifiableSolrParams() {
            {
                add(HttpClientUtil.PROP_ALLOW_COMPRESSION, config.get(HTTP_ALLOW_COMPRESSION).toString());
                add(HttpClientUtil.PROP_CONNECTION_TIMEOUT, config.get(HTTP_CONNECTION_TIMEOUT).toString());
                add(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST,
                        config.get(HTTP_MAX_CONNECTIONS_PER_HOST).toString());
                add(HttpClientUtil.PROP_MAX_CONNECTIONS, config.get(HTTP_GLOBAL_MAX_CONNECTIONS).toString());
            }
        });

        solrClient = new LBHttpSolrClient(clientParams, config.get(HTTP_URLS));

    } else {
        throw new IllegalArgumentException("Unsupported Solr operation mode: " + mode);
    }
}

From source file:org.janusgraph.diskstorage.solr.SolrIndex.java

License:Apache License

public SolrIndex(final Configuration config) throws BackendException {
    Preconditions.checkArgument(config != null);
    configuration = config;//from   w ww .  j  ava 2  s . c  o  m
    mode = Mode.parse(config.get(SOLR_MODE));
    kerberosEnabled = config.get(KERBEROS_ENABLED);
    dynFields = config.get(DYNAMIC_FIELDS);
    keyFieldIds = parseKeyFieldsForCollections(config);
    batchSize = config.get(INDEX_MAX_RESULT_SET_SIZE);
    ttlField = config.get(TTL_FIELD);
    waitSearcher = config.get(WAIT_SEARCHER);

    if (kerberosEnabled) {
        logger.debug("Kerberos is enabled. Configuring SOLR for Kerberos.");
        configureSolrClientsForKerberos();
    } else {
        logger.debug("Kerberos is NOT enabled.");
        logger.debug("KERBEROS_ENABLED name is " + KERBEROS_ENABLED.getName() + " and it is"
                + (KERBEROS_ENABLED.isOption() ? " " : " not") + " an option.");
        logger.debug("KERBEROS_ENABLED type is " + KERBEROS_ENABLED.getType().name());
    }
    final ModifiableSolrParams clientParams = new ModifiableSolrParams();
    switch (mode) {
    case CLOUD:
        final String[] zookeeperUrl = config.get(SolrIndex.ZOOKEEPER_URL);
        // Process possible zookeeper chroot. e.g. localhost:2181/solr
        // chroot has to be the same assuming one Zookeeper ensemble.
        // Parse from the last string. If found, take it as the chroot.
        String chroot = null;
        for (int i = zookeeperUrl.length - 1; i >= 0; i--) {
            int chrootIndex = zookeeperUrl[i].indexOf("/");
            if (chrootIndex != -1) {
                String hostAndPort = zookeeperUrl[i].substring(0, chrootIndex);
                if (chroot == null) {
                    chroot = zookeeperUrl[i].substring(chrootIndex);
                }
                zookeeperUrl[i] = hostAndPort;
            }
        }
        final CloudSolrClient.Builder builder = new CloudSolrClient.Builder()
                .withLBHttpSolrClientBuilder(new LBHttpSolrClient.Builder()
                        .withHttpSolrClientBuilder(
                                new HttpSolrClient.Builder().withInvariantParams(clientParams))
                        .withBaseSolrUrls(config.get(HTTP_URLS)))
                .withZkHost(Arrays.asList(zookeeperUrl)).sendUpdatesOnlyToShardLeaders();
        if (chroot != null) {
            builder.withZkChroot(chroot);
        }
        final CloudSolrClient cloudServer = builder.build();
        cloudServer.connect();
        solrClient = cloudServer;

        break;
    case HTTP:
        clientParams.add(HttpClientUtil.PROP_ALLOW_COMPRESSION, config.get(HTTP_ALLOW_COMPRESSION).toString());
        clientParams.add(HttpClientUtil.PROP_CONNECTION_TIMEOUT,
                config.get(HTTP_CONNECTION_TIMEOUT).toString());
        clientParams.add(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST,
                config.get(HTTP_MAX_CONNECTIONS_PER_HOST).toString());
        clientParams.add(HttpClientUtil.PROP_MAX_CONNECTIONS,
                config.get(HTTP_GLOBAL_MAX_CONNECTIONS).toString());
        final HttpClient client = HttpClientUtil.createClient(clientParams);
        solrClient = new LBHttpSolrClient.Builder().withHttpClient(client)
                .withBaseSolrUrls(config.get(HTTP_URLS)).build();

        break;
    default:
        throw new IllegalArgumentException("Unsupported Solr operation mode: " + mode);
    }
}