List of usage examples for org.apache.lucene.store RAMDirectory createOutput
@Override public IndexOutput createOutput(String name, IOContext context) throws IOException
From source file:org.elasticsearch.common.lucene.store.InputStreamIndexInputTests.java
License:Apache License
@Test public void testReadMultiFourBytesLimit() throws IOException { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); }/*from w ww . j a v a2 s. c o m*/ for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); byte[] read = new byte[4]; assertThat(input.getFilePointer(), lessThan(input.length())); InputStreamIndexInput is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(4l)); assertThat(is.read(read), equalTo(4)); assertThat(read[0], equalTo((byte) 1)); assertThat(read[1], equalTo((byte) 1)); assertThat(read[2], equalTo((byte) 1)); assertThat(read[3], equalTo((byte) 2)); assertThat(input.getFilePointer(), lessThan(input.length())); is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(2l)); assertThat(is.read(read), equalTo(2)); assertThat(read[0], equalTo((byte) 2)); assertThat(read[1], equalTo((byte) 2)); assertThat(input.getFilePointer(), equalTo(input.length())); is = new InputStreamIndexInput(input, 4); assertThat(is.actualSizeToRead(), equalTo(0l)); assertThat(is.read(read), equalTo(-1)); }
From source file:org.elasticsearch.common.lucene.store.InputStreamIndexInputTests.java
License:Apache License
@Test public void testMarkRest() throws Exception { RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("test", IOContext.DEFAULT); for (int i = 0; i < 3; i++) { output.writeByte((byte) 1); }//w ww .j a v a 2 s . c o m for (int i = 0; i < 3; i++) { output.writeByte((byte) 2); } output.close(); IndexInput input = dir.openInput("test", IOContext.DEFAULT); InputStreamIndexInput is = new InputStreamIndexInput(input, 4); assertThat(is.markSupported(), equalTo(true)); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(1)); is.mark(0); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(2)); is.reset(); assertThat(is.read(), equalTo(1)); assertThat(is.read(), equalTo(2)); }
From source file:org.elasticsearch.search.suggest.completion.CompletionPostingsFormatTest.java
License:Apache License
@Test public void testNoDocs() throws IOException { AnalyzingCompletionLookupProvider provider = new AnalyzingCompletionLookupProvider(true, false, true, true); RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("foo.txt", IOContext.DEFAULT); FieldsConsumer consumer = provider.consumer(output); FieldInfo fieldInfo = new FieldInfo("foo", true, 1, false, true, true, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, DocValuesType.SORTED, DocValuesType.BINARY, new HashMap<String, String>()); TermsConsumer addField = consumer.addField(fieldInfo); addField.finish(0, 0, 0);/*from ww w . jav a 2s . co m*/ consumer.close(); output.close(); IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = provider.load(input); PostingsFormatProvider format = new PreBuiltPostingsFormatProvider(new Elasticsearch090PostingsFormat()); NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer(TEST_VERSION_CURRENT)); assertNull(load.getLookup( new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, AbstractFieldMapper.MultiFields.empty(), null), new CompletionSuggestionContext(null))); dir.close(); }
From source file:org.elasticsearch.search.suggest.completion.CompletionPostingsFormatTests.java
License:Apache License
@Test public void testNoDocs() throws IOException { AnalyzingCompletionLookupProvider provider = new AnalyzingCompletionLookupProvider(true, false, true, true); RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("foo.txt", IOContext.DEFAULT); FieldsConsumer consumer = provider.consumer(output); consumer.write(new Fields() { @Override// w w w . j av a 2 s. c om public Iterator<String> iterator() { return Arrays.asList("foo").iterator(); } @Override public Terms terms(String field) throws IOException { return null; } @Override public int size() { return 1; } }); consumer.close(); output.close(); IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = provider.load(input); CompletionFieldMapper.CompletionFieldType fieldType = FIELD_TYPE.clone(); fieldType.setProvider(provider); assertNull(load.getLookup(fieldType, new CompletionSuggestionContext(null))); dir.close(); }
From source file:org.elasticsearch.search.suggest.completion.old.CompletionPostingsFormatTest.java
License:Apache License
@Test public void testNoDocs() throws IOException { AnalyzingCompletionLookupProvider provider = new AnalyzingCompletionLookupProvider(true, false, true, true); RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("foo.txt", IOContext.DEFAULT); FieldsConsumer consumer = provider.consumer(output); consumer.write(new Fields() { @Override/*from w w w .ja va2s .c om*/ public Iterator<String> iterator() { return Arrays.asList("foo").iterator(); } @Override public Terms terms(String field) throws IOException { return null; } @Override public int size() { return 1; } }); consumer.close(); output.close(); IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = provider.load(input); OldCompletionFieldMapper.CompletionFieldType fieldType = FIELD_TYPE.clone(); fieldType.setProvider(provider); assertNull(load.getLookup(fieldType, new CompletionSuggestionContext(null))); dir.close(); }
From source file:org.elasticsearch.search.suggest.CompletionPostingsFormatTest.java
License:Apache License
@Test public void testCompletionPostingsFormat() throws IOException { AnalyzingCompletionLookupProvider provider = new AnalyzingCompletionLookupProvider(true, false, true, true); RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("foo.txt", IOContext.DEFAULT); FieldsConsumer consumer = provider.consumer(output); FieldInfo fieldInfo = new FieldInfo("foo", true, 1, false, true, true, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, DocValuesType.SORTED, DocValuesType.BINARY, new HashMap<String, String>()); TermsConsumer addField = consumer.addField(fieldInfo); PostingsConsumer postingsConsumer = addField.startTerm(new BytesRef("foofightersgenerator")); postingsConsumer.startDoc(0, 1);//from w ww. jav a2 s . co m postingsConsumer.addPosition(256 - 2, provider.buildPayload(new BytesRef("Generator - Foo Fighters"), 9, new BytesRef("id:10")), 0, 1); postingsConsumer.finishDoc(); addField.finishTerm(new BytesRef("foofightersgenerator"), new TermStats(1, 1)); addField.startTerm(new BytesRef("generator")); postingsConsumer.startDoc(0, 1); postingsConsumer.addPosition(256 - 1, provider.buildPayload(new BytesRef("Generator - Foo Fighters"), 9, new BytesRef("id:10")), 0, 1); postingsConsumer.finishDoc(); addField.finishTerm(new BytesRef("generator"), new TermStats(1, 1)); addField.finish(1, 1, 1); consumer.close(); output.close(); IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = provider.load(input); PostingsFormatProvider format = new PreBuiltPostingsFormatProvider(new ElasticSearch090PostingsFormat()); NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer(TEST_VERSION_CURRENT)); Lookup lookup = load.getLookup(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE), new CompletionSuggestionContext(null)); List<LookupResult> result = lookup.lookup("ge", false, 10); assertThat(result.get(0).key.toString(), equalTo("Generator - Foo Fighters")); assertThat(result.get(0).payload.utf8ToString(), equalTo("id:10")); dir.close(); }
From source file:org.elasticsearch.search.suggest.CompletionPostingsFormatTest.java
License:Apache License
@Test public void testNoDocs() throws IOException { AnalyzingCompletionLookupProvider provider = new AnalyzingCompletionLookupProvider(true, false, true, true); RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("foo.txt", IOContext.DEFAULT); FieldsConsumer consumer = provider.consumer(output); FieldInfo fieldInfo = new FieldInfo("foo", true, 1, false, true, true, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, DocValuesType.SORTED, DocValuesType.BINARY, new HashMap<String, String>()); TermsConsumer addField = consumer.addField(fieldInfo); addField.finish(0, 0, 0);/*from w w w . ja v a2s . co m*/ consumer.close(); output.close(); IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = provider.load(input); PostingsFormatProvider format = new PreBuiltPostingsFormatProvider(new ElasticSearch090PostingsFormat()); NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer(TEST_VERSION_CURRENT)); assertNull(load.getLookup(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE), new CompletionSuggestionContext(null))); dir.close(); }
From source file:org.elasticsearch.test.integration.search.suggest.CompletionPostingsFormatTest.java
License:Apache License
@Test public void testCompletionPostingsFormat() throws IOException { AnalyzingCompletionLookupProvider provider = new AnalyzingCompletionLookupProvider(true, false, true, true); RAMDirectory dir = new RAMDirectory(); IndexOutput output = dir.createOutput("foo.txt", IOContext.DEFAULT); FieldsConsumer consumer = provider.consumer(output); FieldInfo fieldInfo = new FieldInfo("foo", true, 1, false, true, true, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, DocValuesType.SORTED, DocValuesType.BINARY, new HashMap<String, String>()); TermsConsumer addField = consumer.addField(fieldInfo); PostingsConsumer postingsConsumer = addField.startTerm(new BytesRef("foofightersgenerator")); postingsConsumer.startDoc(0, 1);/*from www. j av a 2 s . c o m*/ postingsConsumer.addPosition(256 - 2, provider.buildPayload(new BytesRef("Generator - Foo Fighters"), 9, new BytesRef("id:10")), 0, 1); postingsConsumer.finishDoc(); addField.finishTerm(new BytesRef("foofightersgenerator"), new TermStats(1, 1)); addField.startTerm(new BytesRef("generator")); postingsConsumer.startDoc(0, 1); postingsConsumer.addPosition(256 - 1, provider.buildPayload(new BytesRef("Generator - Foo Fighters"), 9, new BytesRef("id:10")), 0, 1); postingsConsumer.finishDoc(); addField.finishTerm(new BytesRef("generator"), new TermStats(1, 1)); addField.finish(1, 1, 1); consumer.close(); output.close(); IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = provider.load(input); PostingsFormatProvider format = new PreBuiltPostingsFormatProvider(new ElasticSearch090PostingsFormat()); NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer(TEST_VERSION_CURRENT)); Lookup lookup = load.getLookup( new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true), false); List<LookupResult> result = lookup.lookup("ge", false, 10); assertThat(result.get(0).key.toString(), equalTo("Generator - Foo Fighters")); assertThat(result.get(0).payload.utf8ToString(), equalTo("id:10")); dir.close(); }