Example usage for org.apache.solr.util DOMUtil toMap

List of usage examples for org.apache.solr.util DOMUtil toMap

Introduction

In this page you can find the example usage for org.apache.solr.util DOMUtil toMap.

Prototype

public static Map<String, String> toMap(NamedNodeMap attrs) 

Source Link

Usage

From source file:com.sindicetech.siren.solr.schema.AnalyzerConfigReader.java

License:Open Source License

/**
 * Read an analyzer definition and instantiate an {@link Analyzer} object.
 *
 * <p> Code taken from {@link org.apache.solr.schema.FieldTypePluginLoader#readAnalyzer(org.w3c.dom.Node)}}
 *
 * @param node An analyzer node from the config file
 * @return An analyzer//from  w w  w .jav a 2s  . c o m
 * @throws XPathExpressionException If an XPath expression cannot be evaluated
 */
protected static Analyzer readAnalyzer(final Node node, final SolrResourceLoader loader,
        final Version luceneMatchVersion) throws XPathExpressionException {
    if (node == null)
        return null;
    final NamedNodeMap attrs = node.getAttributes();

    final String analyzerName = DOMUtil.getAttr(attrs, "class");

    // check for all of these up front, so we can error if used in
    // conjunction with an explicit analyzer class.
    final XPath xpath = XPathFactory.newInstance().newXPath();
    final NodeList charFilterNodes = (NodeList) xpath.evaluate("./charFilter", node, XPathConstants.NODESET);
    final NodeList tokenizerNodes = (NodeList) xpath.evaluate("./tokenizer", node, XPathConstants.NODESET);
    final NodeList tokenFilterNodes = (NodeList) xpath.evaluate("./filter", node, XPathConstants.NODESET);

    if (analyzerName != null) {

        // explicitly check for child analysis factories instead of
        // just any child nodes, because the user might have their
        // own custom nodes (ie: <description> or something like that)
        if (0 != charFilterNodes.getLength() || 0 != tokenizerNodes.getLength()
                || 0 != tokenFilterNodes.getLength()) {
            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                    "Configuration Error: Analyzer class='" + analyzerName
                            + "' can not be combined with nested analysis factories");
        }

        try {
            // No need to be core-aware as Analyzers are not in the core-aware list
            final Class<? extends Analyzer> clazz = loader.findClass(analyzerName, Analyzer.class);

            try {
                // first try to use a ctor with version parameter (needed for many new Analyzers that have no default one anymore)
                final Constructor<? extends Analyzer> cnstr = clazz.getConstructor(Version.class);
                final String matchVersionStr = DOMUtil.getAttr(attrs, LUCENE_MATCH_VERSION_PARAM);
                final Version matchVersion = (matchVersionStr == null) ? luceneMatchVersion
                        : Config.parseLuceneVersionString(matchVersionStr);
                if (matchVersion == null) {
                    throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                            "Configuration Error: Analyzer '" + clazz.getName()
                                    + "' needs a 'luceneMatchVersion' parameter");
                }
                return cnstr.newInstance(matchVersion);
            } catch (final NoSuchMethodException nsme) {
                // otherwise use default ctor
                return clazz.newInstance();
            }
        } catch (final Exception e) {
            logger.error("Cannot load analyzer: " + analyzerName, e);
            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                    "Cannot load analyzer: " + analyzerName, e);
        }
    }

    // Load the CharFilters
    // --------------------------------------------------------------------------------
    final ArrayList<CharFilterFactory> charFilters = new ArrayList<CharFilterFactory>();
    final AbstractPluginLoader<CharFilterFactory> charFilterLoader = new AbstractPluginLoader<CharFilterFactory>(
            "[analyzerConfig] analyzer/charFilter", CharFilterFactory.class, false, false) {

        @Override
        protected CharFilterFactory create(final SolrResourceLoader loader, final String name,
                final String className, final Node node) throws Exception {
            final Map<String, String> params = DOMUtil.toMap(node.getAttributes());
            String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
            params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion,
                    CharFilterFactory.class.getSimpleName(), luceneMatchVersion).toString());
            CharFilterFactory factory = loader.newInstance(className, CharFilterFactory.class,
                    getDefaultPackages(), new Class[] { Map.class }, new Object[] { params });
            factory.setExplicitLuceneMatchVersion(null != configuredVersion);
            return factory;
        }

        @Override
        protected void init(final CharFilterFactory plugin, final Node node) throws Exception {
            if (plugin != null) {
                charFilters.add(plugin);
            }
        }

        @Override
        protected CharFilterFactory register(final String name, final CharFilterFactory plugin) {
            return null; // used for map registration
        }

    };

    charFilterLoader.load(loader, charFilterNodes);

    // Load the Tokenizer
    // Although an analyzer only allows a single Tokenizer, we load a list to make sure
    // the configuration is ok
    // --------------------------------------------------------------------------------
    final ArrayList<TokenizerFactory> tokenizers = new ArrayList<TokenizerFactory>(1);
    final AbstractPluginLoader<TokenizerFactory> tokenizerLoader = new AbstractPluginLoader<TokenizerFactory>(
            "[analyzerConfig] analyzer/tokenizer", TokenizerFactory.class, false, false) {

        @Override
        protected TokenizerFactory create(final SolrResourceLoader loader, final String name,
                final String className, final Node node) throws Exception {
            final Map<String, String> params = DOMUtil.toMap(node.getAttributes());
            String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
            params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion,
                    TokenizerFactory.class.getSimpleName(), luceneMatchVersion).toString());
            TokenizerFactory factory = loader.newInstance(className, TokenizerFactory.class,
                    getDefaultPackages(), new Class[] { Map.class }, new Object[] { params });
            factory.setExplicitLuceneMatchVersion(null != configuredVersion);
            return factory;
        }

        @Override
        protected void init(final TokenizerFactory plugin, final Node node) throws Exception {
            if (!tokenizers.isEmpty()) {
                throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                        "Multiple tokenizers defined for: " + node);
            }
            tokenizers.add(plugin);
        }

        @Override
        protected TokenizerFactory register(final String name, final TokenizerFactory plugin) {
            return null; // used for map registration
        }
    };

    tokenizerLoader.load(loader, tokenizerNodes);

    // Make sure something was loaded
    if (tokenizers.isEmpty()) {
        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                "analyzer without class or tokenizer & filter list");
    }

    // Load the Filters
    // --------------------------------------------------------------------------------
    final ArrayList<TokenFilterFactory> filters = new ArrayList<TokenFilterFactory>();
    final AbstractPluginLoader<TokenFilterFactory> filterLoader = new AbstractPluginLoader<TokenFilterFactory>(
            "[analyzerConfig] analyzer/filter", TokenFilterFactory.class, false, false) {

        @Override
        protected TokenFilterFactory create(final SolrResourceLoader loader, final String name,
                final String className, final Node node) throws Exception {
            final Map<String, String> params = DOMUtil.toMap(node.getAttributes());
            String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
            params.put(LUCENE_MATCH_VERSION_PARAM, parseConfiguredVersion(configuredVersion,
                    TokenFilterFactory.class.getSimpleName(), luceneMatchVersion).toString());
            TokenFilterFactory factory = loader.newInstance(className, TokenFilterFactory.class,
                    getDefaultPackages(), new Class[] { Map.class }, new Object[] { params });
            factory.setExplicitLuceneMatchVersion(null != configuredVersion);
            return factory;
        }

        @Override
        protected void init(final TokenFilterFactory plugin, final Node node) throws Exception {
            if (plugin != null) {
                filters.add(plugin);
            }
        }

        @Override
        protected TokenFilterFactory register(final String name, final TokenFilterFactory plugin)
                throws Exception {
            return null; // used for map registration
        }
    };
    filterLoader.load(loader, tokenFilterNodes);

    return new TokenizerChain(charFilters.toArray(new CharFilterFactory[charFilters.size()]), tokenizers.get(0),
            filters.toArray(new TokenFilterFactory[filters.size()]));
}