Example usage for org.apache.solr.core SolrResourceLoader SolrResourceLoader

List of usage examples for org.apache.solr.core SolrResourceLoader SolrResourceLoader

Introduction

In this page you can find the example usage for org.apache.solr.core SolrResourceLoader SolrResourceLoader.

Prototype

public SolrResourceLoader(Path instanceDir, ClassLoader parent, Properties coreProperties) 

Source Link

Document

This loader will delegate to Solr's classloader when possible, otherwise it will attempt to resolve resources using any jar files found in the "lib/" directory in the specified instance directory.

Usage

From source file:com.datasalt.pangool.solr.SolrRecordWriter.java

License:Apache License

public SolrRecordWriter(int batchSize, boolean outputZipFile, int threadCount, int queueSize,
        String localSolrHome, String zipName, TupleDocumentConverter converter, TaskAttemptContext context) {
    this.localSolrHome = localSolrHome;
    this.zipName = zipName;
    conf = context.getConfiguration();// w  w  w. j a  v a2  s  .  co  m
    this.batchSize = batchSize;

    setLogLevel("org.apache.solr.core", "WARN");
    setLogLevel("org.apache.solr.update", "WARN");
    Logger.getLogger("org.apache.solr.core").setLevel(Level.WARN);
    Logger.getLogger("org.apache.solr.update").setLevel(Level.WARN);
    java.util.logging.Logger.getLogger("org.apache.solr.core").setLevel(java.util.logging.Level.WARNING);
    java.util.logging.Logger.getLogger("org.apache.solr.update").setLevel(java.util.logging.Level.WARNING);

    setLogLevel("org.apache.solr", "WARN");
    Logger.getLogger("org.apache.solr").setLevel(Level.WARN);
    java.util.logging.Logger.getLogger("org.apache.solr").setLevel(java.util.logging.Level.WARNING);

    heartBeater = new HeartBeater(context);
    try {
        heartBeater.needHeartBeat();
        /** The actual file in hdfs that holds the configuration. */
        this.outputZipFile = outputZipFile;

        this.fs = FileSystem.get(conf);
        perm = new Path(FileOutputFormat.getOutputPath(context), getOutFileName(context, "part"));

        // Make a task unique name that contains the actual index output name to
        // make debugging simpler
        // Note: if using JVM reuse, the sequence number will not be reset for a
        // new task using the jvm

        Path temp = conf.getLocalPath("mapred.local.dir",
                "solr_" + conf.get("mapred.task.id") + '.' + sequence.incrementAndGet());

        if (outputZipFile && !perm.getName().endsWith(".zip")) {
            perm = perm.suffix(".zip");
        }
        fs.delete(temp, true); // delete old, if any
        fs.delete(perm, true); // delete old, if any
        local = fs.startLocalOutput(perm, temp);

        solrHome = findSolrConfig(conf);

        // }
        // Verify that the solr home has a conf and lib directory
        if (solrHome == null) {
            throw new IOException("Unable to find solr home setting");
        }

        // Setup a solr instance that we can batch writes to
        LOG.info("SolrHome: " + solrHome.toUri());
        String dataDir = new File(local.toString(), "data").getAbsoluteFile().toString();
        // copy the schema to the conf dir
        File confDir = new File(local.toString(), "conf");
        confDir.mkdirs();

        File unpackedSolrHome = new File(solrHome.toString());
        FileUtils.copyDirectory(new File(unpackedSolrHome, "conf"), confDir);

        Properties props = new Properties();
        props.setProperty("solr.data.dir", dataDir);
        props.setProperty("solr.home", solrHome.toString());
        SolrResourceLoader loader = new SolrResourceLoader(solrHome.toString(), null, props);
        LOG.info(String.format(
                "Constructed instance information solr.home %s (%s), instance dir %s, conf dir %s, writing index to temporary directory %s, with permdir %s",
                solrHome, solrHome.toUri(), loader.getInstanceDir(), loader.getConfigDir(), dataDir, perm));
        CoreContainer container = new CoreContainer(loader);
        CoreDescriptor descr = new CoreDescriptor(container, "core1", solrHome.toString());
        descr.setDataDir(dataDir);
        descr.setCoreProperties(props);
        core = container.create(descr);
        container.register(core, false);
        solr = new EmbeddedSolrServer(container, "core1");
        batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), threadCount,
                queueSize);

        this.converter = converter;
    } catch (Exception e) {
        e.printStackTrace();
        LOG.error(e);
        throw new IllegalStateException(String.format("Failed to initialize record writer for %s, %s",
                context.getJobName(), conf.get("mapred.task.id")), e);
    } finally {
        heartBeater.cancelHeartBeat();
    }
}

From source file:com.ngdata.hbaseindexer.mr.TestUtils.java

License:Apache License

private static EmbeddedSolrServer createEmbeddedSolrServer(File solrHomeDir, FileSystem fs, Path outputShardDir)
        throws IOException {

    LOG.info("Creating embedded Solr server with solrHomeDir: " + solrHomeDir + ", fs: " + fs
            + ", outputShardDir: " + outputShardDir);

    // copy solrHomeDir to ensure it isn't modified across multiple unit tests or multiple EmbeddedSolrServer instances
    File tmpDir = Files.createTempDir();
    tmpDir.deleteOnExit();//from  www .j a v  a 2 s  .c  o  m
    FileUtils.copyDirectory(solrHomeDir, tmpDir);
    solrHomeDir = tmpDir;

    Path solrDataDir = new Path(outputShardDir, "data");

    String dataDirStr = solrDataDir.toUri().toString();

    SolrResourceLoader loader = new SolrResourceLoader(Paths.get(solrHomeDir.toString()), null, null);

    LOG.info(String.format(Locale.ENGLISH,
            "Constructed instance information solr.home %s (%s), instance dir %s, conf dir %s, writing index to solr.data.dir %s, with permdir %s",
            solrHomeDir, solrHomeDir.toURI(), loader.getInstancePath(), loader.getConfigDir(), dataDirStr,
            outputShardDir));

    // TODO: This is fragile and should be well documented
    System.setProperty("solr.directoryFactory", HdfsDirectoryFactory.class.getName());
    System.setProperty("solr.lock.type", DirectoryFactory.LOCK_TYPE_HDFS);
    System.setProperty("solr.hdfs.nrtcachingdirectory", "false");
    System.setProperty("solr.hdfs.blockcache.enabled", "false");
    System.setProperty("solr.autoCommit.maxTime", "600000");
    System.setProperty("solr.autoSoftCommit.maxTime", "-1");

    CoreContainer container = new CoreContainer(loader);
    container.load();

    SolrCore core = container.create("core1", Paths.get(solrHomeDir.toString()),
            ImmutableMap.of(CoreDescriptor.CORE_DATADIR, dataDirStr), false);

    if (!(core.getDirectoryFactory() instanceof HdfsDirectoryFactory)) {
        throw new UnsupportedOperationException(
                "Invalid configuration. Currently, the only DirectoryFactory supported is "
                        + HdfsDirectoryFactory.class.getSimpleName());
    }

    EmbeddedSolrServer solr = new EmbeddedSolrServer(container, "core1");
    return solr;
}

From source file:org.alfresco.solr.AbstractAlfrescoSolrTests.java

License:Open Source License

/**
 * @deprecated as testHarness is used/* w w  w.  java 2 s.  c o m*/
 */
@Deprecated
public static void createAlfrescoCore(String schema)
        throws ParserConfigurationException, IOException, SAXException {
    Properties properties = new Properties();
    properties.put("solr.tests.maxBufferedDocs", "1000");
    properties.put("solr.tests.maxIndexingThreads", "10");
    properties.put("solr.tests.ramBufferSizeMB", "1024");
    properties.put("solr.tests.mergeScheduler", "org.apache.lucene.index.ConcurrentMergeScheduler");
    properties.put("alfresco.acl.tracker.cron", "0/10 * * * * ? *");
    properties.put("alfresco.content.tracker.cron", "0/10 * * * * ? *");
    properties.put("alfresco.metadata.tracker.cron", "0/10 * * * * ? *");
    properties.put("alfresco.cascade.tracker.cron", "0/10 * * * * ? *");
    properties.put("alfresco.commit.tracker.cron", "0/10 * * * * ? *");
    if ("schema.xml".equalsIgnoreCase(schema)) {
        String templateName = System.getProperty("templateName", "rerank");
        FileUtils.copyFile(Paths.get(String.format(TEMPLATE_CONF, templateName) + schema).toFile(),
                Paths.get(TEST_SOLR_CONF + schema).toFile());
    }

    SolrResourceLoader resourceLoader = new SolrResourceLoader(Paths.get(TEST_FILES_LOCATION), null,
            properties);
    TestCoresLocator locator = new TestCoresLocator(SolrTestCaseJ4.DEFAULT_TEST_CORENAME, "data",
            "solrconfig.xml", schema);

    NodeConfig nodeConfig = new NodeConfig.NodeConfigBuilder("name", resourceLoader).setUseSchemaCache(false)
            .setCoreAdminHandlerClass(AlfrescoCoreAdminHandler.class.getName()).build();
    try {
        h = new TestHarness(nodeConfig, locator);
        h.coreName = SolrTestCaseJ4.DEFAULT_TEST_CORENAME;
        CORE_NOT_YET_CREATED = false;
    } catch (Exception e) {
        LOG.info("we hit an issue", e);
    }
    lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
}

From source file:org.alfresco.solr.SolrCoreTestBase.java

License:Open Source License

@Before
public void setUpBase() throws Exception {
    Properties properties = new Properties();
    properties.put("solr.tests.maxBufferedDocs", "1000");
    properties.put("solr.tests.maxIndexingThreads", "10");
    properties.put("solr.tests.ramBufferSizeMB", "1024");
    properties.put("solr.tests.mergeScheduler", "org.apache.lucene.index.ConcurrentMergeScheduler");
    properties.put("solr.tests.mergePolicy", "org.apache.lucene.index.TieredMergePolicy");

    coreContainer = new CoreContainer(TEST_FILES_LOCATION);
    resourceLoader = new SolrResourceLoader(Paths.get(TEST_SOLR_CONF), null, properties);
    SolrConfig solrConfig = new SolrConfig(resourceLoader, "solrconfig-afts.xml", null);
    IndexSchemaFactory.buildIndexSchema("schema-afts.xml", solrConfig);
    coreDescriptor = new CoreDescriptor(coreContainer, "name", Paths.get(TEST_SOLR_COLLECTION));

    // SolrCore is final, we can't mock with mockito
    core = new SolrCore("name", null, solrConfig, null, null, coreDescriptor, null, null, null);

    FieldUtils.writeField(core, "updateHandler", updateHandler, true);
    FieldUtils.writeField(core, "resourceLoader", resourceLoader, true);
    infoRegistry = new HashMap<String, SolrInfoMBean>();
    FieldUtils.writeField(core, "infoRegistry", infoRegistry, true);
    reqHandlers = new RequestHandlers(core);
    reqHandlers.register("/select", selectRequestHandler);
    reqHandlers.register("/afts", aftsRequestHandler);
    FieldUtils.writeField(core, "reqHandlers", reqHandlers, true);

    Map<String, UpdateRequestProcessorChain> map = new HashMap<>();
    List<UpdateRequestProcessorFactory> factories = new ArrayList<UpdateRequestProcessorFactory>(1);
    factories.add(runUpdateProcessorFactory);
    when(runUpdateProcessorFactory.getInstance(any(SolrQueryRequest.class), any(SolrQueryResponse.class),
            any(UpdateRequestProcessor.class))).thenReturn(processor);
    UpdateRequestProcessorChain def = new UpdateRequestProcessorChain(factories, core);
    map.put(null, def);//from w w w .  j  a  v  a2 s.  c  om
    map.put("", def);
    FieldUtils.writeField(core, "updateProcessorChains", map, true);
}

From source file:org.gbif.ocurrence.index.solr.SolrRecordWriter.java

License:Apache License

@SuppressWarnings("unchecked")
public SolrRecordWriter(TaskAttemptContext context) {
    conf = context.getConfiguration();/*w  ww  . j  av a 2 s . c o m*/
    batchSize = SolrOutputFormat.getBatchSize(conf);

    setLogLevel("org.apache.solr.core", "WARN");
    setLogLevel("org.apache.solr.update", "WARN");

    heartBeater = new HeartBeater(context);
    try {
        heartBeater.needHeartBeat();
        /** The actual file in hdfs that holds the configuration. */

        final String configuredSolrConfigPath = conf.get(SolrOutputFormat.SETUP_OK);
        if (configuredSolrConfigPath == null) {
            throw new IllegalStateException(
                    String.format("The job did not pass %s", SolrOutputFormat.SETUP_OK));
        }
        outputZipFile = SolrOutputFormat.isOutputZipFormat(conf);

        this.fs = FileSystem.get(conf);
        perm = new Path(FileOutputFormat.getOutputPath(context), getOutFileName(context, "part"));

        // Make a task unique name that contains the actual index output name to
        // make debugging simpler
        // Note: if using JVM reuse, the sequence number will not be reset for a
        // new task using the jvm

        temp = conf.getLocalPath("mapred.local.dir",
                "solr_" + conf.get("mapred.task.id") + '.' + sequence.incrementAndGet());

        if (outputZipFile && !perm.getName().endsWith(".zip")) {
            perm = perm.suffix(".zip");
        }
        fs.delete(perm, true); // delete old, if any
        Path local = fs.startLocalOutput(perm, temp);

        solrHome = findSolrConfig(conf);

        // }
        // Verify that the solr home has a conf and lib directory
        if (solrHome == null) {
            throw new IOException("Unable to find solr home setting");
        }

        // Setup a solr instance that we can batch writes to
        LOG.info("SolrHome: " + solrHome.toUri());
        String dataDir = new File(local.toString(), "data").toString();
        // copy the schema to the conf dir
        File confDir = new File(local.toString(), "conf");
        confDir.mkdirs();
        File srcSchemaFile = new File(solrHome.toString(), "conf/schema.xml");
        assert srcSchemaFile.exists();
        FileUtils.copyFile(srcSchemaFile, new File(confDir, "schema.xml"));
        Properties props = new Properties();
        props.setProperty("solr.data.dir", dataDir);
        props.setProperty("solr.home", solrHome.toString());
        SolrResourceLoader loader = new SolrResourceLoader(solrHome.toString(), null, props);
        LOG.info(String.format(
                "Constructed instance information solr.home %s (%s), instance dir %s, conf dir %s, writing index to temporary directory %s, with permdir %s",
                solrHome, solrHome.toUri(), loader.getInstanceDir(), loader.getConfigDir(), dataDir, perm));
        CoreContainer container = new CoreContainer(loader);
        CoreDescriptor descr = new CoreDescriptor(container, "core1", solrHome.toString());
        descr.setDataDir(dataDir);
        descr.setCoreProperties(props);
        core = container.create(descr);
        container.register(core, false);
        solr = new EmbeddedSolrServer(container, "core1");
        batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(),
                SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf));

        // instantiate the converter
        String className = SolrDocumentConverter.getSolrDocumentConverter(conf);
        Class<? extends SolrDocumentConverter> cls = (Class<? extends SolrDocumentConverter>) Class
                .forName(className);
        converter = (SolrDocumentConverter<K, V>) ReflectionUtils.newInstance(cls, conf);
    } catch (Exception e) {
        throw new IllegalStateException(String.format("Failed to initialize record writer for %s, %s",
                context.getJobName(), conf.get("mapred.task.id")), e);
    } finally {
        heartBeater.cancelHeartBeat();
    }
}

From source file:org.vootoo.logging.logback.LogbackWatcherTest.java

License:Apache License

@Before
public void setUp() {
    config = new LogWatcherConfig(true, LogbackWatcher.class.getName(), null, 50);
    loader = new SolrResourceLoader(Paths.get("."), Runtime.getRuntime().getClass().getClassLoader(), null);
}