Java tutorial
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.hamcrest.MatcherAssert; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; public class SynonymsAnalysisTests extends ESTestCase { private IndexAnalyzers indexAnalyzers; public void testSynonymsAnalysis() throws IOException { InputStream synonyms = getClass().getResourceAsStream("synonyms.txt"); InputStream synonymsWordnet = getClass().getResourceAsStream("synonyms_wordnet.txt"); Path home = createTempDir(); Path config = home.resolve("config"); Files.createDirectory(config); Files.copy(synonyms, config.resolve("synonyms.txt")); Files.copy(synonymsWordnet, config.resolve("synonyms_wordnet.txt")); String json = "/org/elasticsearch/analysis/common/synonyms.json"; Settings settings = Settings.builder().loadFromStream(json, getClass().getResourceAsStream(json), false) .put(Environment.PATH_HOME_SETTING.getKey(), home) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; match("synonymAnalyzer", "kimchy is the dude abides", "shay is the elasticsearch man!"); match("synonymAnalyzer_file", "kimchy is the dude abides", "shay is the elasticsearch man!"); match("synonymAnalyzerWordnet", "abstain", "abstain refrain desist"); match("synonymAnalyzerWordnet_file", "abstain", "abstain refrain desist"); match("synonymAnalyzerWithsettings", "kimchy", "sha hay"); match("synonymAnalyzerWithStopAfterSynonym", "kimchy is the dude abides , stop", "shay is the elasticsearch man! ,"); match("synonymAnalyzerWithStopBeforeSynonym", "kimchy is the dude abides , stop", "shay is the elasticsearch man! ,"); match("synonymAnalyzerWithStopSynonymAfterSynonym", "kimchy is the dude abides", "shay is the man!"); match("synonymAnalyzerExpand", "kimchy is the dude abides", "kimchy shay is the dude elasticsearch abides man!"); match("synonymAnalyzerExpandWithStopAfterSynonym", "kimchy is the dude abides", "shay is the dude abides man!"); } public void testSynonymWordDeleteByAnalyzer() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("path.home", createTempDir().toString()).put("index.analysis.filter.synonym.type", "synonym") .putList("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!") .put("index.analysis.filter.stop_within_synonym.type", "stop") .putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch") .put("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.tokenizer", "whitespace") .putList("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym", "synonym") .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try { indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; fail("fail! due to synonym word deleted by analyzer"); } catch (Exception e) { assertThat(e, instanceOf(IllegalArgumentException.class)); assertThat(e.getMessage(), startsWith("failed to build synonyms")); } } public void testExpandSynonymWordDeleteByAnalyzer() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonym_expand.type", "synonym") .putList("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!") .put("index.analysis.filter.stop_within_synonym.type", "stop") .putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch") .put("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.tokenizer", "whitespace") .putList("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym", "synonym_expand") .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try { indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; fail("fail! due to synonym word deleted by analyzer"); } catch (Exception e) { assertThat(e, instanceOf(IllegalArgumentException.class)); assertThat(e.getMessage(), startsWith("failed to build synonyms")); } } private void match(String analyzerName, String source, String target) throws IOException { Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer(); TokenStream stream = analyzer.tokenStream("", source); stream.reset(); CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class); StringBuilder sb = new StringBuilder(); while (stream.incrementToken()) { sb.append(termAtt.toString()).append(" "); } MatcherAssert.assertThat(target, equalTo(sb.toString().trim())); } }