Example usage for org.apache.solr.core SolrCore getDataDir

List of usage examples for org.apache.solr.core SolrCore getDataDir

Introduction

In this page you can find the example usage for org.apache.solr.core SolrCore getDataDir.

Prototype

public String getDataDir() 

Source Link

Usage

From source file:com.billiger.solr.handler.component.QLTBComponent.java

License:Apache License

/**
 * Inform component of core reload./*  w  w  w  . j a  v  a2  s.c  o m*/
 *
 * This will both set the analyzer according to the configured
 * queryFieldType, and load the QLTB data. Data source can be (in this
 * order) ZooKeeper, the conf/ directory or the data/ directory.
 */
@Override
public final void inform(final SolrCore core) {
    // load analyzer
    String queryFieldType = initArgs.get(FIELD_TYPE);
    if (queryFieldType != null) {
        FieldType ft = core.getLatestSchema().getFieldTypes().get(queryFieldType);
        if (ft == null) {
            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                    "unknown FieldType \"" + queryFieldType + "\" used in QLTBComponent");
        }
        analyzer = ft.getQueryAnalyzer();
    } else {
        analyzer = null;
    }
    synchronized (qltbCache) {
        qltbCache.clear();
        try {
            // retrieve QLTB data filename
            String qltbFile = initArgs.get(QLTB_FILE);
            if (qltbFile == null) {
                throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                        "QLTBComponent must specify argument: \"" + QLTB_FILE + "\" - path to QLTB data");
            }
            boolean exists = false;
            // check ZooKeeper
            ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
            if (zkController != null) {
                exists = zkController.configFileExists(zkController.readConfigName(
                        core.getCoreDescriptor().getCloudDescriptor().getCollectionName()), qltbFile);
            } else {
                // no ZooKeeper, check conf/ and data/ directories
                File fConf = new File(core.getResourceLoader().getConfigDir(), qltbFile);
                File fData = new File(core.getDataDir(), qltbFile);
                if (fConf.exists() == fData.exists()) {
                    // both or neither exist
                    throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                            "QLTBComponent missing config file: \"" + qltbFile + "\": either "
                                    + fConf.getAbsolutePath() + " or " + fData.getAbsolutePath()
                                    + " must exist, but not both");
                }
                if (fConf.exists()) {
                    // conf/ found, load it
                    exists = true;
                    log.info("QLTB source conf/: " + fConf.getAbsolutePath());
                    Config cfg = new Config(core.getResourceLoader(), qltbFile);
                    qltbCache.put(null, loadQLTBMap(cfg, core));
                }
            }
            if (!exists) {
                // Neither ZooKeeper nor conf/, so must be in data/
                // We need an IndexReader and the normal
                RefCounted<SolrIndexSearcher> searcher = null;
                try {
                    searcher = core.getNewestSearcher(false);
                    IndexReader reader = searcher.get().getIndexReader();
                    getQLTBMap(reader, core);
                } finally {
                    if (searcher != null) {
                        searcher.decref();
                    }
                }
            }
        } catch (Exception ex) {
            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error initializing QltbComponent.",
                    ex);
        }
    }
}

From source file:com.billiger.solr.handler.component.QLTBComponent.java

License:Apache License

/**
 * Get QLTB map for the given IndexReader.
 *
 * If the QLTB map is located in the conf/ directory, it is independent
 * of the IndexReader and reloaded only during a core reload.
 * If, however, QLTB data is read from ZooKeeper or the data/ directory,
 * it is reloaded for each new IndexReader via the core's resource loader.
 *
 * @return QLTB map for the given IndexReader.
 *//*w w  w .ja v  a 2 s. c  om*/
private Map<String, List<Query>> getQLTBMap(final IndexReader reader, final SolrCore core) throws Exception {
    Map<String, List<Query>> map = null;
    synchronized (qltbCache) {
        map = qltbCache.get(null); // Magic "null" key for data from conf/
        if (map != null) {
            // QLTB data from the conf/ directory, reader-independent.
            return map;
        }
        map = qltbCache.get(reader);
        if (map == null) {
            // No QLTB map for this reader yet, load it from ZooKeeper or
            // the data/ directory.
            log.info("load QLTB map for new IndexReader");
            String qltbFile = initArgs.get(QLTB_FILE);
            if (qltbFile == null) {
                throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                        "QLTBComponent must specify argument: " + QLTB_FILE);
            }
            Config cfg;
            ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
            if (zkController != null) {
                // We're running under ZooKeeper control...
                cfg = new Config(core.getResourceLoader(), qltbFile, null, null);
            } else {
                // No ZooKeeper, use data/ directory
                InputStream is = VersionedFile.getLatestFile(core.getDataDir(), qltbFile);
                cfg = new Config(core.getResourceLoader(), qltbFile, new InputSource(is), null);
            }
            map = loadQLTBMap(cfg, core);
            qltbCache.put(reader, map);
        }
        return map;
    }
}

From source file:com.grantingersoll.intell.clustering.KMeansClusteringEngine.java

License:Apache License

@Override
public String init(NamedList config, SolrCore core) {
    String result = super.init(config, core);
    SolrParams params = SolrParams.toSolrParams(config);
    this.core = core;
    String dirStr = params.get("dir");
    clusterBaseDir = new File(dirStr);
    if (clusterBaseDir.isAbsolute() == false) {
        clusterBaseDir = new File(core.getDataDir(), dirStr);
    }//  www  . j  av a  2  s .  co  m
    clusterBaseDir.mkdirs();
    inputField = params.get("inputField");
    String distMeas = params.get("distanceMeasure");
    Class distClass = core.getResourceLoader().findClass(distMeas);

    try {
        measure = (DistanceMeasure) distClass.newInstance();
    } catch (InstantiationException e) {
        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to load measure class", e);
    } catch (IllegalAccessException e) {
        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to load measure class", e);
    }
    convergence = params.getDouble("convergence", 0.001);
    maxIters = params.getInt("maxIterations", 20);
    cacheClusters = params.getBool("cacheClusters", true);
    cachePoints = params.getBool("cachePoints", true);
    this.k = params.getInt("k");
    //See if we have clusters already
    File nowFile = new File(clusterBaseDir, "lastJob");
    if (nowFile.exists()) {
        lastSuccessful = readJobDetails(nowFile);

    }
    return result;
}

From source file:com.searchbox.engine.solr.EmbeddedSolr.java

License:Apache License

@Override
public void register(Collection collection) {

    String coreInstanceDir = this.solrHome;

    Properties properties = new Properties();

    if (this.dataDir != null) {
        File dataDir = new File(this.dataDir);
        if (dataDir.exists()) {
            try {
                FileUtils.deleteDirectory(dataDir);
            } catch (IOException e) {
                LOGGER.error("Could not delete DataDir: " + dataDir);
            }/*from  w w  w  .  ja v a  2  s. c  om*/
        }
        properties.setProperty("dataDir", this.dataDir);
    } else {
        properties.setProperty("dataDir", coreInstanceDir + "/" + collection.getName() + "/data/");
    }

    CoreDescriptor dcore = new CoreDescriptor(coreContainer, collection.getName(), coreInstanceDir, properties);

    try {
        SolrCore core = coreContainer.create(dcore);
        coreContainer.register(core, false);

        LOGGER.info("Solr Core config: " + core.getConfigResource());
        LOGGER.info("Solr SchemaResource: " + core.getSchemaResource());
        LOGGER.info("Solr Data dir: " + core.getDataDir());
    } catch (Exception e) {
        LOGGER.warn(e.getMessage());
    }
}

From source file:com.searchbox.SuggesterComponent.java

License:Apache License

@Override
// run on loadup of solr
public void inform(SolrCore core) {
    LOGGER.trace(("Hit inform"));
    // pull in stop words which will be used later
    loadStopWords(core.getResourceLoader());
    if (storeDirname != null) {
        storeDir = new File(storeDirname);
        if (!storeDir.isAbsolute()) {
            storeDir = new File(core.getDataDir() + File.separator + storeDir);
        }//from  w w w . j  a v  a 2s  .c o  m
        if (!storeDir.exists()) {
            LOGGER.warn("Directory " + storeDir.getAbsolutePath()
                    + " doesn't exist for re-load of suggester, creating emtpy "
                    + "directory, make sure to use suggester.build before first use!");
            storeDir.mkdirs();
        } else {
            try {
                // load premade dictionary object
                readFile(storeDir);
            } catch (Exception ex) {
                LOGGER.error("Error loading sbsuggester model");
            }
        }
    }

    // check to see if the new searcher should trigger a build on optimize
    // or commit
    if (buildOnCommit || buildOnOptimize) {
        LOGGER.info("Registering newSearcher listener for Searchbox Suggester: ");
        core.registerNewSearcherListener(this);
    }
}

From source file:com.searchbox.TaggerComponent.java

License:Apache License

public void inform(SolrCore core) {
    LOGGER.trace(("Hit inform"));

    if (storeDirname != null) {
        storeDir = new File(storeDirname);
        if (!storeDir.isAbsolute()) {
            //Where does core come from?
            storeDir = new File(core.getDataDir() + File.separator + storeDir);
        }//from w  w w .  j  a v a 2  s.  co  m
        if (!storeDir.exists()) {
            LOGGER.warn("Directory " + storeDir.getAbsolutePath()
                    + " doesn't exist for re-load of tagger, creating emtpy directory, make sure to use sbtagger.build before first use!");
            storeDir.mkdirs();
        } else {
            try {
                dfb = Tagger.loadTagger(storeDir, boostsFileName);
            } catch (Exception ex) {
                LOGGER.error("Error loading Tagger model");
            }
        }
    }

    if (buildOnCommit || buildOnOptimize) {
        LOGGER.info("Registering newSearcher listener for Searchbox Tagger: ");
        core.registerNewSearcherListener(this);
    }
}

From source file:org.dice.solrenhancements.spellchecker.DiceMultipleCaseSuggester.java

License:Apache License

@Override
public String init(NamedList config, SolrCore core) {
    LOG.info("init: " + config);
    String name = super.init(config, core);
    threshold = config.get(THRESHOLD_TOKEN_FREQUENCY) == null ? 0.0f
            : (Float) config.get(THRESHOLD_TOKEN_FREQUENCY);
    sourceLocation = (String) config.get(LOCATION);
    lookupImpl = (String) config.get(LOOKUP_IMPL);

    IndexSchema schema = core.getLatestSchema();
    suggestionAnalyzerFieldTypeName = (String) config.get(SUGGESTION_ANALYZER_FIELDTYPE);
    if (schema.getFieldTypes().containsKey(suggestionAnalyzerFieldTypeName)) {
        FieldType fieldType = schema.getFieldTypes().get(suggestionAnalyzerFieldTypeName);
        suggestionAnalyzer = fieldType.getQueryAnalyzer();
    }/*from  ww w.  j a va  2s. c om*/

    // support the old classnames without -Factory for config file backwards compatibility.
    if (lookupImpl == null || "org.apache.solr.spelling.suggest.jaspell.JaspellLookup".equals(lookupImpl)) {
        lookupImpl = JaspellLookupFactory.class.getName();
    } else if ("org.apache.solr.spelling.suggest.tst.TSTLookup".equals(lookupImpl)) {
        lookupImpl = TSTLookupFactory.class.getName();
    } else if ("org.apache.solr.spelling.suggest.fst.FSTLookup".equals(lookupImpl)) {
        lookupImpl = FSTLookupFactory.class.getName();
    }

    factory = core.getResourceLoader().newInstance(lookupImpl, LookupFactory.class);

    lookup = factory.create(config, core);
    String store = (String) config.get(STORE_DIR);
    if (store != null) {
        storeDir = new File(store);
        if (!storeDir.isAbsolute()) {
            storeDir = new File(core.getDataDir() + File.separator + storeDir);
        }
        if (!storeDir.exists()) {
            storeDir.mkdirs();
        } else {
            // attempt reload of the stored lookup
            try {
                lookup.load(new FileInputStream(new File(storeDir, factory.storeFileName())));
            } catch (IOException e) {
                LOG.warn("Loading stored lookup data failed", e);
            }
        }
    }
    return name;
}

From source file:org.liveSense.service.solr.api.OverridedSolrResourceLoader.java

License:Apache License

/**
 * Tell all {@link SolrCoreAware} instances about the SolrCore
 *///  w w w  .j av  a 2 s  .co  m
public void inform(SolrCore core) {
    this.dataDir = core.getDataDir();

    // make a copy to avoid potential deadlock of a callback calling newInstance and trying to
    // add something to waitingForCore.
    SolrCoreAware[] arr;

    while (waitingForCore.size() > 0) {
        synchronized (waitingForCore) {
            arr = waitingForCore.toArray(new SolrCoreAware[waitingForCore.size()]);
            waitingForCore.clear();
        }

        for (SolrCoreAware aware : arr) {
            aware.inform(core);
        }
    }

    // this is the last method to be called in SolrCore before the latch is released.
    live = true;
}