List of usage examples for org.apache.solr.core SolrCore getResourceLoader
public SolrResourceLoader getResourceLoader()
From source file:alba.components.FilteredShowFileRequestHandler.java
License:Apache License
public static String getAdminFileFromZooKeeper(SolrQueryRequest req, SolrQueryResponse rsp, SolrZkClient zkClient, Set<String> hiddenFiles) throws KeeperException, InterruptedException { String adminFile = null;//from w w w . j a v a2 s . c o m SolrCore core = req.getCore(); final ZkSolrResourceLoader loader = (ZkSolrResourceLoader) core.getResourceLoader(); String confPath = loader.getConfigSetZkPath(); String fname = req.getParams().get("file", null); if (fname == null) { adminFile = confPath; } else { fname = fname.replace('\\', '/'); // normalize slashes if (isHiddenFile(req, rsp, fname, true, hiddenFiles)) { return null; } if (fname.startsWith("/")) { // Only files relative to conf are valid fname = fname.substring(1); } adminFile = confPath + "/" + fname; } // Make sure the file exists, is readable and is not a hidden file if (!zkClient.exists(adminFile, true)) { log.error("Can not find: " + adminFile); rsp.setException(new SolrException(SolrException.ErrorCode.NOT_FOUND, "Can not find: " + adminFile)); return null; } return adminFile; }
From source file:com.billiger.solr.handler.component.QLTBComponent.java
License:Apache License
/** * Inform component of core reload.//from w ww. ja v a2s .c o m * * This will both set the analyzer according to the configured * queryFieldType, and load the QLTB data. Data source can be (in this * order) ZooKeeper, the conf/ directory or the data/ directory. */ @Override public final void inform(final SolrCore core) { // load analyzer String queryFieldType = initArgs.get(FIELD_TYPE); if (queryFieldType != null) { FieldType ft = core.getLatestSchema().getFieldTypes().get(queryFieldType); if (ft == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "unknown FieldType \"" + queryFieldType + "\" used in QLTBComponent"); } analyzer = ft.getQueryAnalyzer(); } else { analyzer = null; } synchronized (qltbCache) { qltbCache.clear(); try { // retrieve QLTB data filename String qltbFile = initArgs.get(QLTB_FILE); if (qltbFile == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "QLTBComponent must specify argument: \"" + QLTB_FILE + "\" - path to QLTB data"); } boolean exists = false; // check ZooKeeper ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController(); if (zkController != null) { exists = zkController.configFileExists(zkController.readConfigName( core.getCoreDescriptor().getCloudDescriptor().getCollectionName()), qltbFile); } else { // no ZooKeeper, check conf/ and data/ directories File fConf = new File(core.getResourceLoader().getConfigDir(), qltbFile); File fData = new File(core.getDataDir(), qltbFile); if (fConf.exists() == fData.exists()) { // both or neither exist throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "QLTBComponent missing config file: \"" + qltbFile + "\": either " + fConf.getAbsolutePath() + " or " + fData.getAbsolutePath() + " must exist, but not both"); } if (fConf.exists()) { // conf/ found, load it exists = true; log.info("QLTB source conf/: " + fConf.getAbsolutePath()); Config cfg = new Config(core.getResourceLoader(), qltbFile); qltbCache.put(null, loadQLTBMap(cfg, core)); } } if (!exists) { // Neither ZooKeeper nor conf/, so must be in data/ // We need an IndexReader and the normal RefCounted<SolrIndexSearcher> searcher = null; try { searcher = core.getNewestSearcher(false); IndexReader reader = searcher.get().getIndexReader(); getQLTBMap(reader, core); } finally { if (searcher != null) { searcher.decref(); } } } } catch (Exception ex) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error initializing QltbComponent.", ex); } } }
From source file:com.billiger.solr.handler.component.QLTBComponent.java
License:Apache License
/** * Get QLTB map for the given IndexReader. * * If the QLTB map is located in the conf/ directory, it is independent * of the IndexReader and reloaded only during a core reload. * If, however, QLTB data is read from ZooKeeper or the data/ directory, * it is reloaded for each new IndexReader via the core's resource loader. * * @return QLTB map for the given IndexReader. *///from w ww. ja va 2 s. c om private Map<String, List<Query>> getQLTBMap(final IndexReader reader, final SolrCore core) throws Exception { Map<String, List<Query>> map = null; synchronized (qltbCache) { map = qltbCache.get(null); // Magic "null" key for data from conf/ if (map != null) { // QLTB data from the conf/ directory, reader-independent. return map; } map = qltbCache.get(reader); if (map == null) { // No QLTB map for this reader yet, load it from ZooKeeper or // the data/ directory. log.info("load QLTB map for new IndexReader"); String qltbFile = initArgs.get(QLTB_FILE); if (qltbFile == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "QLTBComponent must specify argument: " + QLTB_FILE); } Config cfg; ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController(); if (zkController != null) { // We're running under ZooKeeper control... cfg = new Config(core.getResourceLoader(), qltbFile, null, null); } else { // No ZooKeeper, use data/ directory InputStream is = VersionedFile.getLatestFile(core.getDataDir(), qltbFile); cfg = new Config(core.getResourceLoader(), qltbFile, new InputSource(is), null); } map = loadQLTBMap(cfg, core); qltbCache.put(reader, map); } return map; } }
From source file:com.cominvent.solr.update.processor.MappingUpdateProcessor.java
License:Apache License
/** * The constructor initializes the processor by reading configuration * and loading a HashMap from the specified file name. *//* w ww .j a va 2 s .c om*/ public MappingUpdateProcessor(SolrCore core, SolrParams params, SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { super(next); this.core = core; // TODO: Add support for caseSensitive and expand parameters, support full synonyms.txt format if (params != null) { setEnabled(params.getBool("enabled", true)); inputField = params.get(INPUT_FIELD_PARAM, "").trim(); outputField = params.get(OUTPUT_FIELD_PARAM, "").trim(); docIdField = params.get(DOCID_PARAM, DOCID_FIELD_DEFAULT); fallbackValue = params.get(FALLBACK_VALUE_PARAM, null); mappingFile = params.get(MAPPING_FILE_PARAM, "").trim(); } if (inputField.length() == 0) { log.error("Missing or faulty configuration of MappingUpdateProcessor. Input field must be specified"); throw new SolrException(ErrorCode.NOT_FOUND, "Missing or faulty configuration of MappingUpdateProcessor. Input field must be specified"); } try { log.info("Attempting to initialize mapping file " + mappingFile); InputStream is = core.getResourceLoader().openResource(mappingFile); BufferedReader br = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8"))); String line; while ((line = br.readLine()) != null) { if (line.startsWith("#")) continue; String[] kv = line.split("=>"); if (kv.length < 2) continue; map.put(kv[0].trim(), kv[1].trim()); } log.info("Map initialized from " + mappingFile + ": " + map); } catch (Exception e) { throw new SolrException(ErrorCode.NOT_FOUND, "Error when reading mapping file " + mappingFile + "."); } }
From source file:com.grantingersoll.intell.clustering.KMeansClusteringEngine.java
License:Apache License
@Override public String init(NamedList config, SolrCore core) { String result = super.init(config, core); SolrParams params = SolrParams.toSolrParams(config); this.core = core; String dirStr = params.get("dir"); clusterBaseDir = new File(dirStr); if (clusterBaseDir.isAbsolute() == false) { clusterBaseDir = new File(core.getDataDir(), dirStr); }/* w w w. j a v a 2s. com*/ clusterBaseDir.mkdirs(); inputField = params.get("inputField"); String distMeas = params.get("distanceMeasure"); Class distClass = core.getResourceLoader().findClass(distMeas); try { measure = (DistanceMeasure) distClass.newInstance(); } catch (InstantiationException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to load measure class", e); } catch (IllegalAccessException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to load measure class", e); } convergence = params.getDouble("convergence", 0.001); maxIters = params.getInt("maxIterations", 20); cacheClusters = params.getBool("cacheClusters", true); cachePoints = params.getBool("cachePoints", true); this.k = params.getInt("k"); //See if we have clusters already File nowFile = new File(clusterBaseDir, "lastJob"); if (nowFile.exists()) { lastSuccessful = readJobDetails(nowFile); } return result; }
From source file:com.searchbox.SuggesterComponent.java
License:Apache License
@Override // run on loadup of solr public void inform(SolrCore core) { LOGGER.trace(("Hit inform")); // pull in stop words which will be used later loadStopWords(core.getResourceLoader()); if (storeDirname != null) { storeDir = new File(storeDirname); if (!storeDir.isAbsolute()) { storeDir = new File(core.getDataDir() + File.separator + storeDir); }//from ww w. j a va 2s.c o m if (!storeDir.exists()) { LOGGER.warn("Directory " + storeDir.getAbsolutePath() + " doesn't exist for re-load of suggester, creating emtpy " + "directory, make sure to use suggester.build before first use!"); storeDir.mkdirs(); } else { try { // load premade dictionary object readFile(storeDir); } catch (Exception ex) { LOGGER.error("Error loading sbsuggester model"); } } } // check to see if the new searcher should trigger a build on optimize // or commit if (buildOnCommit || buildOnOptimize) { LOGGER.info("Registering newSearcher listener for Searchbox Suggester: "); core.registerNewSearcherListener(this); } }
From source file:com.zb.mmseg.solr.MMSeg4jHandler.java
License:Open Source License
public void inform(SolrCore core) { loader = core.getResourceLoader(); // solrHome = new File(loader.getInstanceDir()); }
From source file:de.dlr.knowledgefinder.dataimport.utils.transformer.ExcludeValuesTransformer.java
License:Apache License
private CharArraySet initializeExcludeValues(String filename) throws IOException { CharArraySet loadedFile = loadedFiles.get(filename); if (loadedFile == null) { SolrCore core = context.getSolrCore(); SolrResourceLoader loader = core.getResourceLoader(); List<String> lines = Collections.emptyList(); lines = loader.getLines(filename); loadedFiles.put(filename, new CharArraySet(lines, true)); }// w w w . j a v a 2s. com return loadedFiles.get(filename); }
From source file:io.yucca.solr.processor.RegexExtractorUpdateProcessor.java
License:Apache License
/** * Initialise the regex pattern, these are compiled and put in a map under * the field name/* www. java 2 s . com*/ * * @param core * SolrCore * @param params * SolrParams * @return List<RegexExtractionPattern> */ public static List<RegexExtractionPattern> initRegex(SolrCore core, SolrParams params) { List<RegexExtractionPattern> regex = new LinkedList<RegexExtractionPattern>(); String regexFile = null; if (params != null) { regexFile = params.get(REGEX_FILE_PARAM, ""); } InputStream is = null; try { is = core.getResourceLoader().openResource(regexFile); BufferedReader reader = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8"))); String line; while ((line = reader.readLine()) != null) { if (line.startsWith("#")) { continue; } String[] kv = line.split("=>"); if (kv.length < 2 || kv.length > 3) { continue; } // split and removes spaces between comma seperated fields String[] fields = splitTrim(kv[0]); if (notEmpty(fields) == false) { continue; } String pattern = kv[1].trim(); if ("".equals(pattern)) { continue; } // get the possibly fixed values List<String> fixed = (kv.length == 3) ? Arrays.asList(splitTrim(kv[2])) : new ArrayList<String>(0); try { regex.add(new RegexExtractionPattern(fields, pattern, fixed)); } catch (PatternSyntaxException e) { log.warn("Pattern {} for field(s) {} could not be compiled! Skipping.", pattern, fields); } } } catch (IOException e) { return regex; } finally { IOUtils.closeQuietly(is); } return regex; }
From source file:jp.sf.fess.solr.plugin.update.SuggestTranslogUpdateHandlerFilter.java
License:Apache License
protected void startup() { final SolrCore core = updateHandler.getSolrCore(); final UpdateLog ulog = updateHandler.getUpdateLog(); //TODO replay? TransactionLogUtil.clearSuggestTransactionLog(ulog.getLogDir()); final SuggestUpdateConfig config = SolrConfigUtil.getUpdateHandlerConfig(core.getSolrConfig()); final List<SuggestFieldInfo> suggestFieldInfoList = SolrConfigUtil.getSuggestFieldInfoList(config); suggestUpdateController = new SuggestUpdateController(config, suggestFieldInfoList, core.getResourceLoader()); if (config.getLabelFields() != null) { for (final String label : config.getLabelFields()) { suggestUpdateController.addLabelFieldName(label); }// w w w . ja v a2 s . com } if (config.getRoleFields() != null) { for (final String role : config.getRoleFields()) { suggestUpdateController.addRoleFieldName(role); } } suggestUpdateController.setLimitDocumentQueuingNum(2); suggestUpdateController.start(); }