Example usage for org.apache.lucene.analysis.util TokenFilterFactory forName

List of usage examples for org.apache.lucene.analysis.util TokenFilterFactory forName

Introduction

In this page you can find the example usage for org.apache.lucene.analysis.util TokenFilterFactory forName.

Prototype

public static TokenFilterFactory forName(String name, Map<String, String> args) 

Source Link

Document

looks up a tokenfilter by name from context classpath

Usage

From source file:com.github.healthonnet.search.SynonymExpandingExtendedDismaxQParserPlugin.java

License:Apache License

private void parseConfig(Map<String, Analyzer> analyzers, String argName) {
    try {// ww w.  j  av  a  2s.c  om

        Object xmlAnalyzers = args.get(argName);

        if (xmlAnalyzers != null && xmlAnalyzers instanceof NamedList) {
            NamedList<?> AnalyzersList = (NamedList<?>) xmlAnalyzers;
            for (Entry<String, ?> entry : AnalyzersList) {
                String analyzerName = entry.getKey();
                if (!(entry.getValue() instanceof NamedList)) {
                    continue;
                }
                NamedList<?> analyzerAsNamedList = (NamedList<?>) entry.getValue();

                TokenizerFactory tokenizerFactory = null;
                TokenFilterFactory filterFactory;
                List<TokenFilterFactory> filterFactories = new LinkedList<>();

                for (Entry<String, ?> analyzerEntry : analyzerAsNamedList) {
                    String key = analyzerEntry.getKey();
                    if (!(entry.getValue() instanceof NamedList)) {
                        continue;
                    }
                    Map<String, String> params = convertNamedListToMap((NamedList<?>) analyzerEntry.getValue());

                    String className = params.get("class");
                    if (className == null) {
                        continue;
                    }

                    params.put("luceneMatchVersion", luceneMatchVersion.toString());

                    if (key.equals("tokenizer")) {
                        try {
                            tokenizerFactory = TokenizerFactory.forName(className, params);
                        } catch (IllegalArgumentException iae) {
                            if (!className.contains(".")) {
                                iae.printStackTrace();
                            }
                            // Now try by classname instead of SPI keyword
                            tokenizerFactory = loader.newInstance(className, TokenizerFactory.class,
                                    new String[] {}, new Class[] { Map.class }, new Object[] { params });
                        }
                        if (tokenizerFactory instanceof ResourceLoaderAware) {
                            ((ResourceLoaderAware) tokenizerFactory).inform(loader);
                        }
                    } else if (key.equals("filter")) {
                        try {
                            filterFactory = TokenFilterFactory.forName(className, params);
                        } catch (IllegalArgumentException iae) {
                            if (!className.contains(".")) {
                                iae.printStackTrace();
                            }
                            // Now try by classname instead of SPI keyword
                            filterFactory = loader.newInstance(className, TokenFilterFactory.class,
                                    new String[] {}, new Class[] { Map.class }, new Object[] { params });
                        }
                        if (filterFactory instanceof ResourceLoaderAware) {
                            ((ResourceLoaderAware) filterFactory).inform(loader);
                        }
                        filterFactories.add(filterFactory);
                    }
                }
                if (tokenizerFactory == null) {
                    throw new SolrException(ErrorCode.SERVER_ERROR,
                            "tokenizer must not be null for analyzer: " + analyzerName);
                } else if (filterFactories.isEmpty()) {
                    throw new SolrException(ErrorCode.SERVER_ERROR,
                            "filter factories must be defined for analyzer: " + analyzerName);
                }

                TokenizerChain analyzer = new TokenizerChain(tokenizerFactory,
                        filterFactories.toArray(new TokenFilterFactory[filterFactories.size()]));

                analyzers.put(analyzerName, analyzer);
            }
        }
    } catch (IOException e) {
        throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to create parser. Check your config.", e);
    }
}

From source file:org.apache.jackrabbit.oak.plugins.index.lucene.NodeStateAnalyzerFactory.java

License:Apache License

private TokenFilterFactory[] loadTokenFilterFactories(NodeState tokenFiltersState) {
    List<TokenFilterFactory> result = newArrayList();

    Tree tree = TreeFactory.createReadOnlyTree(tokenFiltersState);
    for (Tree t : tree.getChildren()) {
        NodeState state = tokenFiltersState.getChildNode(t.getName());

        String factoryType = getFactoryType(state, t.getName());
        Map<String, String> args = convertNodeState(state);
        TokenFilterFactory cf = TokenFilterFactory.forName(factoryType, args);
        init(cf, state);//from w  ww  . ja  v a 2s .c  om
        result.add(cf);
    }

    return result.toArray(new TokenFilterFactory[result.size()]);
}

From source file:org.apache.tika.eval.tokens.AnalyzerDeserializer.java

License:Apache License

private static TokenFilterFactory[] buildTokenFilterFactories(JsonElement el, String analyzerName)
        throws IOException {
    if (el == null || el.isJsonNull()) {
        return null;
    }/*from  w  ww  .j a v a 2s. c  om*/
    if (!el.isJsonArray()) {
        throw new IllegalArgumentException(
                "Expecting array for tokenfilters, but got:" + el.toString() + " in " + analyzerName);
    }
    JsonArray jsonArray = (JsonArray) el;
    List<TokenFilterFactory> ret = new LinkedList<>();
    for (JsonElement filterMap : jsonArray) {
        if (!(filterMap instanceof JsonObject)) {
            throw new IllegalArgumentException(
                    "Expecting a map with \"factory\" string and \"params\" map in token filter factory;"
                            + " not: " + filterMap.toString() + " in " + analyzerName);
        }
        JsonElement factoryEl = ((JsonObject) filterMap).get(FACTORY);
        if (factoryEl == null || !factoryEl.isJsonPrimitive()) {
            throw new IllegalArgumentException(
                    "Expecting value for factory in token filter factory builder in " + analyzerName);
        }
        String factoryName = factoryEl.getAsString();
        factoryName = factoryName.startsWith("oala.")
                ? factoryName.replaceFirst("oala.", "org.apache.lucene.analysis.")
                : factoryName;

        JsonElement paramsEl = ((JsonObject) filterMap).get(PARAMS);
        Map<String, String> params = mapify(paramsEl);
        String spiName = "";
        for (String s : TokenFilterFactory.availableTokenFilters()) {
            Class clazz = TokenFilterFactory.lookupClass(s);
            if (clazz.getName().equals(factoryName)) {
                spiName = s;
                break;
            }
        }
        if (spiName.equals("")) {
            throw new IllegalArgumentException(
                    "A SPI class of type org.apache.lucene.analysis.util.TokenFilterFactory with name" + "'"
                            + factoryName + "' does not exist.");
        }

        try {
            TokenFilterFactory tokenFilterFactory = TokenFilterFactory.forName(spiName, params);
            if (tokenFilterFactory instanceof ResourceLoaderAware) {
                ((ResourceLoaderAware) tokenFilterFactory)
                        .inform(new ClasspathResourceLoader(AnalyzerDeserializer.class));
            }
            ret.add(tokenFilterFactory);
        } catch (IllegalArgumentException e) {
            throw new IllegalArgumentException("While loading " + analyzerName, e);
        }
    }
    if (ret.size() == 0) {
        return new TokenFilterFactory[0];
    }
    return ret.toArray(new TokenFilterFactory[ret.size()]);
}

From source file:org.tallison.gramreaper.ingest.schema.AnalyzerDeserializer.java

License:Apache License

private static TokenFilterFactory[] buildTokenFilterFactories(JsonElement el, String analyzerName)
        throws IOException {
    if (el == null || el.isJsonNull()) {
        return null;
    }/*from www .  ja va  2  s  .c  o m*/
    if (!el.isJsonArray()) {
        throw new IllegalArgumentException(
                "Expecting array for tokenfilters, but got:" + el.toString() + " in " + analyzerName);
    }
    JsonArray jsonArray = (JsonArray) el;
    List<TokenFilterFactory> ret = new LinkedList<>();
    for (JsonElement filterMap : jsonArray) {
        if (!(filterMap instanceof JsonObject)) {
            throw new IllegalArgumentException(
                    "Expecting a map with \"factory\" string and \"params\" map in token filter factory;"
                            + " not: " + filterMap.toString() + " in " + analyzerName);
        }
        JsonElement factoryEl = ((JsonObject) filterMap).get(FACTORY);
        if (factoryEl == null || !factoryEl.isJsonPrimitive()) {
            throw new IllegalArgumentException(
                    "Expecting value for factory in token filter factory builder in " + analyzerName);
        }
        String factoryName = factoryEl.getAsString();
        factoryName = factoryName.startsWith("oala.")
                ? factoryName.replaceFirst("oala.", "org.apache.lucene.analysis.")
                : factoryName;

        JsonElement paramsEl = ((JsonObject) filterMap).get(PARAMS);
        Map<String, String> params = mapify(paramsEl);
        String spiName = "";
        for (String s : TokenFilterFactory.availableTokenFilters()) {
            Class clazz = TokenFilterFactory.lookupClass(s);
            if (clazz.getName().equals(factoryName)) {
                spiName = s;
                break;
            }
        }

        try {
            TokenFilterFactory tokenFilterFactory = TokenFilterFactory.forName(spiName, params);
            if (tokenFilterFactory instanceof ResourceLoaderAware) {
                ((ResourceLoaderAware) tokenFilterFactory)
                        .inform(new ClasspathResourceLoader(AnalyzerDeserializer.class));
            }
            ret.add(tokenFilterFactory);
        } catch (IllegalArgumentException e) {
            throw new IllegalArgumentException("While loading " + analyzerName, e);
        }
    }
    if (ret.size() == 0) {
        return new TokenFilterFactory[0];
    }
    return ret.toArray(new TokenFilterFactory[ret.size()]);
}