List of usage examples for org.apache.solr.util.plugin AbstractPluginLoader load
public T load(SolrResourceLoader loader, NodeList nodes)
From source file:com.sindicetech.siren.solr.schema.AnalyzerConfigReader.java
License:Open Source License
/** * Read an analyzer definition and instantiate an {@link Analyzer} object. * * <p> Code taken from {@link org.apache.solr.schema.FieldTypePluginLoader#readAnalyzer(org.w3c.dom.Node)}} * * @param node An analyzer node from the config file * @return An analyzer/*from ww w .java2 s .c o m*/ * @throws XPathExpressionException If an XPath expression cannot be evaluated */ protected static Analyzer readAnalyzer(final Node node, final SolrResourceLoader loader, final Version luceneMatchVersion) throws XPathExpressionException { if (node == null) return null; final NamedNodeMap attrs = node.getAttributes(); final String analyzerName = DOMUtil.getAttr(attrs, "class"); // check for all of these up front, so we can error if used in // conjunction with an explicit analyzer class. final XPath xpath = XPathFactory.newInstance().newXPath(); final NodeList charFilterNodes = (NodeList) xpath.evaluate("./charFilter", node, XPathConstants.NODESET); final NodeList tokenizerNodes = (NodeList) xpath.evaluate("./tokenizer", node, XPathConstants.NODESET); final NodeList tokenFilterNodes = (NodeList) xpath.evaluate("./filter", node, XPathConstants.NODESET); if (analyzerName != null) { // explicitly check for child analysis factories instead of // just any child nodes, because the user might have their // own custom nodes (ie: <description> or something like that) if (0 != charFilterNodes.getLength() || 0 != tokenizerNodes.getLength() || 0 != tokenFilterNodes.getLength()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Configuration Error: Analyzer class='" + analyzerName + "' can not be combined with nested analysis factories"); } try { // No need to be core-aware as Analyzers are not in the core-aware list final Class<? extends Analyzer> clazz = loader.findClass(analyzerName, Analyzer.class); try { // first try to use a ctor with version parameter (needed for many new Analyzers that have no default one anymore) final Constructor<? extends Analyzer> cnstr = clazz.getConstructor(Version.class); final String matchVersionStr = DOMUtil.getAttr(attrs, LUCENE_MATCH_VERSION_PARAM); final Version matchVersion = (matchVersionStr == null) ? luceneMatchVersion : Config.parseLuceneVersionString(matchVersionStr); if (matchVersion == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Configuration Error: Analyzer '" + clazz.getName() + "' needs a 'luceneMatchVersion' parameter"); } return cnstr.newInstance(matchVersion); } catch (final NoSuchMethodException nsme) { // otherwise use default ctor return clazz.newInstance(); } } catch (final Exception e) { logger.error("Cannot load analyzer: " + analyzerName, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Cannot load analyzer: " + analyzerName, e); } } // Load the CharFilters // -------------------------------------------------------------------------------- final ArrayList<CharFilterFactory> charFilters = new ArrayList<CharFilterFactory>(); final AbstractPluginLoader<CharFilterFactory> charFilterLoader = new AbstractPluginLoader<CharFilterFactory>( "[analyzerConfig] analyzer/charFilter", CharFilterFactory.class, false, false) { @Override protected CharFilterFactory create(final SolrResourceLoader loader, final String name, final String className, final Node node) throws Exception { final Map<String, String> params = DOMUtil.toMap(node.getAttributes()); String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM); params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion, CharFilterFactory.class.getSimpleName(), luceneMatchVersion).toString()); CharFilterFactory factory = loader.newInstance(className, CharFilterFactory.class, getDefaultPackages(), new Class[] { Map.class }, new Object[] { params }); factory.setExplicitLuceneMatchVersion(null != configuredVersion); return factory; } @Override protected void init(final CharFilterFactory plugin, final Node node) throws Exception { if (plugin != null) { charFilters.add(plugin); } } @Override protected CharFilterFactory register(final String name, final CharFilterFactory plugin) { return null; // used for map registration } }; charFilterLoader.load(loader, charFilterNodes); // Load the Tokenizer // Although an analyzer only allows a single Tokenizer, we load a list to make sure // the configuration is ok // -------------------------------------------------------------------------------- final ArrayList<TokenizerFactory> tokenizers = new ArrayList<TokenizerFactory>(1); final AbstractPluginLoader<TokenizerFactory> tokenizerLoader = new AbstractPluginLoader<TokenizerFactory>( "[analyzerConfig] analyzer/tokenizer", TokenizerFactory.class, false, false) { @Override protected TokenizerFactory create(final SolrResourceLoader loader, final String name, final String className, final Node node) throws Exception { final Map<String, String> params = DOMUtil.toMap(node.getAttributes()); String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM); params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion, TokenizerFactory.class.getSimpleName(), luceneMatchVersion).toString()); TokenizerFactory factory = loader.newInstance(className, TokenizerFactory.class, getDefaultPackages(), new Class[] { Map.class }, new Object[] { params }); factory.setExplicitLuceneMatchVersion(null != configuredVersion); return factory; } @Override protected void init(final TokenizerFactory plugin, final Node node) throws Exception { if (!tokenizers.isEmpty()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Multiple tokenizers defined for: " + node); } tokenizers.add(plugin); } @Override protected TokenizerFactory register(final String name, final TokenizerFactory plugin) { return null; // used for map registration } }; tokenizerLoader.load(loader, tokenizerNodes); // Make sure something was loaded if (tokenizers.isEmpty()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "analyzer without class or tokenizer & filter list"); } // Load the Filters // -------------------------------------------------------------------------------- final ArrayList<TokenFilterFactory> filters = new ArrayList<TokenFilterFactory>(); final AbstractPluginLoader<TokenFilterFactory> filterLoader = new AbstractPluginLoader<TokenFilterFactory>( "[analyzerConfig] analyzer/filter", TokenFilterFactory.class, false, false) { @Override protected TokenFilterFactory create(final SolrResourceLoader loader, final String name, final String className, final Node node) throws Exception { final Map<String, String> params = DOMUtil.toMap(node.getAttributes()); String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM); params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion, TokenFilterFactory.class.getSimpleName(), luceneMatchVersion).toString()); TokenFilterFactory factory = loader.newInstance(className, TokenFilterFactory.class, getDefaultPackages(), new Class[] { Map.class }, new Object[] { params }); factory.setExplicitLuceneMatchVersion(null != configuredVersion); return factory; } @Override protected void init(final TokenFilterFactory plugin, final Node node) throws Exception { if (plugin != null) { filters.add(plugin); } } @Override protected TokenFilterFactory register(final String name, final TokenFilterFactory plugin) throws Exception { return null; // used for map registration } }; filterLoader.load(loader, tokenFilterNodes); return new TokenizerChain(charFilters.toArray(new CharFilterFactory[charFilters.size()]), tokenizers.get(0), filters.toArray(new TokenFilterFactory[filters.size()])); }
From source file:com.sindicetech.siren.solr.schema.SirenDatatypeAnalyzerConfig.java
License:Open Source License
/** * Read the definition of the datatypes and load them into the * {@code datatypes} map.//from www.j a v a2 s .co m */ private void readConfig(final InputSource is) { log.info("Reading configuration of SIREn's datatype analyzer"); try { // pass the config resource loader to avoid building an empty one for no reason: // in the current case though, the stream is valid so we wont load the resource by name final Config schemaConf = new Config(loader, "datatypeConfig", is, "/datatypeConfig/"); final Document document = schemaConf.getDocument(); final XPath xpath = schemaConf.getXPath(); final Node nd = (Node) xpath.evaluate("/datatypeConfig/@name", document, XPathConstants.NODE); if (nd == null) { log.warn("datatypeConfig has no name!"); } else { name = nd.getNodeValue(); log.info("datatypeConfig name=" + name); } version = schemaConf.get("/datatypeConfig/@version"); final AbstractPluginLoader<Datatype> datatypeLoader = new AbstractPluginLoader<Datatype>( "[datatypeConfig] datatype", Datatype.class, true, true) { @Override protected Datatype create(final SolrResourceLoader loader, final String name, final String className, final Node node) throws Exception { final Datatype dt = loader.newInstance(className, Datatype.class); dt.setDatatypeName(name); // An analyzer with type="index" String expression = "./analyzer[@type='index']"; Node anode = (Node) xpath.evaluate(expression, node, XPathConstants.NODE); final Analyzer analyzer = AnalyzerConfigReader.readAnalyzer(anode, loader, luceneMatchVersion); if (analyzer != null) dt.setAnalyzer(analyzer); expression = "./analyzer[@type='query']"; anode = (Node) xpath.evaluate(expression, node, XPathConstants.NODE); final Analyzer queryAnalyzer = AnalyzerConfigReader.readAnalyzer(anode, loader, luceneMatchVersion); if (queryAnalyzer != null) dt.setQueryAnalyzer(queryAnalyzer); return dt; } @Override protected void init(final Datatype plugin, final Node node) throws Exception { final Map<String, String> params = DOMUtil.toMapExcept(node.getAttributes(), "name", "class"); plugin.setArgs(params); } @Override protected Datatype register(final String name, final Datatype plugin) throws Exception { log.trace("datatype defined: " + plugin); return datatypes.put(name, plugin); } }; final String expression = "/datatypeConfig/datatype"; final NodeList nodes = (NodeList) xpath.evaluate(expression, document, XPathConstants.NODESET); datatypeLoader.load(loader, nodes); } catch (final SolrException e) { throw e; } catch (final Exception e) { // unexpected exception... throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Datatype configuration parsing failed: " + e.getMessage(), e); } }
From source file:org.sindice.siren.solr.schema.AnalyzerConfigReader.java
License:Open Source License
/** * Read an analyzer definition and instantiate an {@link Analyzer} object. * * <p> Code taken from {@link IndexSchema#readAnalyzer()} * * @param node An analyzer node from the config file * @return An analyzer//from ww w . j a va 2s. c o m * @throws XPathExpressionException If an XPath expression cannot be evaluated */ protected static Analyzer readAnalyzer(final Node node, final SolrResourceLoader loader, final Version luceneMatchVersion) throws XPathExpressionException { if (node == null) return null; final NamedNodeMap attrs = node.getAttributes(); final String analyzerName = DOMUtil.getAttr(attrs, "class"); if (analyzerName != null) { // No need to be core-aware as Analyzers are not in the core-aware list final Class<? extends Analyzer> clazz = loader.findClass(analyzerName).asSubclass(Analyzer.class); try { try { // first try to use a ctor with version parameter (needed for many new Analyzers that have no default one anymore) final Constructor<? extends Analyzer> cnstr = clazz.getConstructor(Version.class); final String matchVersionStr = DOMUtil.getAttr(attrs, LUCENE_MATCH_VERSION_PARAM); final Version matchVersion = (matchVersionStr == null) ? luceneMatchVersion : Config.parseLuceneVersionString(matchVersionStr); if (matchVersion == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Configuration Error: Analyzer '" + clazz.getName() + "' needs a 'luceneMatchVersion' parameter"); } return cnstr.newInstance(matchVersion); } catch (final NoSuchMethodException nsme) { // otherwise use default ctor return clazz.newInstance(); } } catch (final Exception e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Cannot load analyzer: " + analyzerName); } } final XPath xpath = XPathFactory.newInstance().newXPath(); // Load the CharFilters // -------------------------------------------------------------------------------- final ArrayList<CharFilterFactory> charFilters = new ArrayList<CharFilterFactory>(); final AbstractPluginLoader<CharFilterFactory> charFilterLoader = new AbstractPluginLoader<CharFilterFactory>( "[analyzerConfig] analyzer/charFilter", false, false) { @Override protected void init(final CharFilterFactory plugin, final Node node) throws Exception { if (plugin != null) { final Map<String, String> params = DOMUtil.toMapExcept(node.getAttributes(), "class"); // copy the luceneMatchVersion from config, if not set if (!params.containsKey(LUCENE_MATCH_VERSION_PARAM)) params.put(LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion.toString()); plugin.init(params); charFilters.add(plugin); } } @Override protected CharFilterFactory register(final String name, final CharFilterFactory plugin) throws Exception { return null; // used for map registration } }; charFilterLoader.load(loader, (NodeList) xpath.evaluate("./charFilter", node, XPathConstants.NODESET)); // Load the Tokenizer // Although an analyzer only allows a single Tokenizer, we load a list to make sure // the configuration is ok // -------------------------------------------------------------------------------- final ArrayList<TokenizerFactory> tokenizers = new ArrayList<TokenizerFactory>(1); final AbstractPluginLoader<TokenizerFactory> tokenizerLoader = new AbstractPluginLoader<TokenizerFactory>( "[analyzerConfig] analyzer/tokenizer", false, false) { @Override protected void init(final TokenizerFactory plugin, final Node node) throws Exception { if (!tokenizers.isEmpty()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Multiple tokenizers defined for: " + node); } final Map<String, String> params = DOMUtil.toMapExcept(node.getAttributes(), "class"); // copy the luceneMatchVersion from config, if not set if (!params.containsKey(LUCENE_MATCH_VERSION_PARAM)) params.put(LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion.toString()); plugin.init(params); tokenizers.add(plugin); } @Override protected TokenizerFactory register(final String name, final TokenizerFactory plugin) throws Exception { return null; // used for map registration } }; tokenizerLoader.load(loader, (NodeList) xpath.evaluate("./tokenizer", node, XPathConstants.NODESET)); // Make sure something was loaded if (tokenizers.isEmpty()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "analyzer without class or tokenizer & filter list"); } // Load the Filters // -------------------------------------------------------------------------------- final ArrayList<TokenFilterFactory> filters = new ArrayList<TokenFilterFactory>(); final AbstractPluginLoader<TokenFilterFactory> filterLoader = new AbstractPluginLoader<TokenFilterFactory>( "[analyzerConfig] analyzer/filter", false, false) { @Override protected void init(final TokenFilterFactory plugin, final Node node) throws Exception { if (plugin != null) { final Map<String, String> params = DOMUtil.toMapExcept(node.getAttributes(), "class"); // copy the luceneMatchVersion from config, if not set if (!params.containsKey(LUCENE_MATCH_VERSION_PARAM)) params.put(LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion.toString()); plugin.init(params); filters.add(plugin); } } @Override protected TokenFilterFactory register(final String name, final TokenFilterFactory plugin) throws Exception { return null; // used for map registration } }; filterLoader.load(loader, (NodeList) xpath.evaluate("./filter", node, XPathConstants.NODESET)); return new TokenizerChain(charFilters.toArray(new CharFilterFactory[charFilters.size()]), tokenizers.get(0), filters.toArray(new TokenFilterFactory[filters.size()])); }
From source file:org.sindice.siren.solr.schema.SirenDatatypeAnalyzerConfig.java
License:Open Source License
/** * Read the definition of the datatypes and load them into the * {@code datatypes} map./*from w w w. j a v a 2 s .c o m*/ */ private void readConfig(final InputSource is) { log.info("Reading SIREn datatype analyzer configuration"); try { // in the current case though, the stream is valid so we wont load the resource by name final Config schemaConf = new Config(loader, "datatypeConfig", is, "/datatypeConfig/"); final Document document = schemaConf.getDocument(); final XPath xpath = schemaConf.getXPath(); final Node nd = (Node) xpath.evaluate("/datatypeConfig/@name", document, XPathConstants.NODE); if (nd == null) { log.warn("datatypeConfig has no name!"); } else { name = nd.getNodeValue(); log.info("datatypeConfig name=" + name); } version = schemaConf.get("/datatypeConfig/@version"); final AbstractPluginLoader<Datatype> datatypeLoader = new AbstractPluginLoader<Datatype>( "[datatypeConfig] datatype", true, true) { @Override protected Datatype create(final ResourceLoader loader, final String name, final String className, final Node node) throws Exception { final Datatype dt = (Datatype) loader.newInstance(className); dt.setDatatypeName(name); // An analyzer with type="index" String expression = "./analyzer[@type='index']"; Node anode = (Node) xpath.evaluate(expression, node, XPathConstants.NODE); final Analyzer analyzer = AnalyzerConfigReader.readAnalyzer(anode, (SolrResourceLoader) loader, luceneMatchVersion); if (analyzer != null) dt.setAnalyzer(analyzer); expression = "./analyzer[@type='query']"; anode = (Node) xpath.evaluate(expression, node, XPathConstants.NODE); final Analyzer queryAnalyzer = AnalyzerConfigReader.readAnalyzer(anode, (SolrResourceLoader) loader, luceneMatchVersion); if (queryAnalyzer != null) dt.setQueryAnalyzer(queryAnalyzer); return dt; } @Override protected void init(final Datatype plugin, final Node node) throws Exception { final Map<String, String> params = DOMUtil.toMapExcept(node.getAttributes(), "name", "class"); plugin.setArgs(params); } @Override protected Datatype register(final String name, final Datatype plugin) throws Exception { log.trace("datatype defined: " + plugin); return datatypes.put(name, plugin); } }; final String expression = "/datatypeConfig/datatype"; final NodeList nodes = (NodeList) xpath.evaluate(expression, document, XPathConstants.NODESET); datatypeLoader.load(loader, nodes); } catch (final SolrException e) { SolrConfig.severeErrors.add(e); throw e; } catch (final Exception e) { // unexpected exception... SolrConfig.severeErrors.add(e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "datatypeConfig parsing failed: " + e.getMessage(), e, false); } }