List of usage examples for org.apache.lucene.document Document getField
public final IndexableField getField(String name)
From source file:org.apache.cxf.jaxrs.ext.search.lucene.AbstractLuceneQueryVisitorTest.java
License:Apache License
protected void doTestIntContentMatchWithQuery(Query query) throws Exception { ScoreDoc[] hits = isearcher.search(query, null, 1000).scoreDocs; assertEquals(1, hits.length);/*from ww w . j av a2 s . c o m*/ // Iterate through the results: for (int i = 0; i < hits.length; i++) { Document hitDoc = isearcher.doc(hits[i].doc); IndexableField field = hitDoc.getField("intfield"); assertEquals(4, field.numericValue().intValue()); } }
From source file:org.apache.geode.cache.lucene.internal.repository.serializer.PdxFieldMapperJUnitTest.java
License:Apache License
@Test public void testWriteFields() { String[] fields = new String[] { "s", "i" }; PdxLuceneSerializer mapper = new PdxLuceneSerializer(fields); PdxInstance i = mock(PdxInstance.class); when(i.hasField("s")).thenReturn(true); when(i.hasField("i")).thenReturn(true); when(i.getField("s")).thenReturn("a"); when(i.getField("i")).thenReturn(5); Document doc = new Document(); mapper.toDocument(i, doc);// w w w. jav a 2 s . c om assertEquals(2, doc.getFields().size()); assertEquals("a", doc.getField("s").stringValue()); assertEquals(5, doc.getField("i").numericValue()); }
From source file:org.apache.geode.cache.lucene.internal.repository.serializer.PdxFieldMapperJUnitTest.java
License:Apache License
@Test public void testIgnoreMissing() { String[] fields = new String[] { "s", "i", "s2", "o" }; PdxLuceneSerializer mapper = new PdxLuceneSerializer(fields); PdxInstance i = mock(PdxInstance.class); when(i.hasField("s")).thenReturn(true); when(i.hasField("i")).thenReturn(true); when(i.hasField("o")).thenReturn(true); when(i.hasField("o2")).thenReturn(true); when(i.getField("s")).thenReturn("a"); when(i.getField("i")).thenReturn(5); when(i.getField("o")).thenReturn(new Object()); when(i.getField("o2")).thenReturn(new Object()); Document doc = new Document(); mapper.toDocument(i, doc);/*from w w w . j av a2 s . com*/ assertEquals(2, doc.getFields().size()); assertEquals("a", doc.getField("s").stringValue()); assertEquals(5, doc.getField("i").numericValue()); }
From source file:org.apache.geode.cache.lucene.internal.repository.serializer.PdxFieldMapperJUnitTest.java
License:Apache License
@Test public void testNullField() { String[] fields = new String[] { "s", "i" }; PdxLuceneSerializer mapper = new PdxLuceneSerializer(fields); PdxInstance i = mock(PdxInstance.class); when(i.hasField("s")).thenReturn(true); when(i.hasField("i")).thenReturn(true); when(i.getField("s")).thenReturn("a"); when(i.getField("i")).thenReturn(null); Document doc = new Document(); mapper.toDocument(i, doc);/*from w w w . j a v a 2s . c om*/ assertEquals(1, doc.getFields().size()); assertEquals("a", doc.getField("s").stringValue()); assertNull(doc.getField("i")); }
From source file:org.apache.geode.cache.lucene.internal.repository.serializer.ReflectionFieldMapperJUnitTest.java
License:Apache License
@Test public void testNullField() { String[] fields = new String[] { "s", "o", "s2" }; ReflectionLuceneSerializer mapper = new ReflectionLuceneSerializer(Type2.class, fields); Type2 t = new Type2("a", 1, 2L, 3.0, 4.0f, null); Document doc = new Document(); mapper.toDocument(t, doc);/* w w w.j a va2 s .c o m*/ assertEquals(1, doc.getFields().size()); assertEquals("a", doc.getField("s").stringValue()); assertNull(doc.getField("s2")); }
From source file:org.apache.gora.lucene.store.LuceneStore.java
License:Apache License
@Override public long deleteByQuery(Query<K, T> query) { try {// w w w.ja v a 2 s. co m // Figure out how many were there before LuceneQuery<K, T> q = (LuceneQuery<K, T>) query; LuceneResult<K, T> r = (LuceneResult<K, T>) q.execute(); int before = r.getScoreDocs().length; if (query.getFields() == null || (query.getFields().length == getFields().length) || isPrimaryKeyIncluded(query.getFields())) { // Delete them writer.deleteDocuments(q.toLuceneQuery()); searcherManager.maybeRefresh(); } else { Query<K, T> selectQuery = this.newQuery(); selectQuery.setStartKey(q.getStartKey()); selectQuery.setEndKey(q.getEndKey()); LuceneResult<K, T> selectResult = (LuceneResult<K, T>) selectQuery.execute(); ScoreDoc[] scoreDocs = selectResult.getScoreDocs(); HashSet<String> fields = new HashSet<>(); fields.addAll(mapping.getLuceneFields()); IndexSearcher searcher = selectResult.getSearcher(); if (scoreDocs.length > 0) { for (ScoreDoc scoreDoc : scoreDocs) { Document doc = searcher.doc(scoreDoc.doc, fields); for (String avroField : query.getFields()) { String docField = mapping.getLuceneField(avroField); if (doc.getField(docField) != null) { doc.removeField(docField); } } String key = doc.get(getMapping().getPrimaryKey()); doc.add(new StringField(mapping.getPrimaryKey(), key, Store.YES)); writer.updateDocument(new Term(mapping.getPrimaryKey(), key), doc); searcherManager.maybeRefresh(); } } selectResult.close(); } // Figure out how many there are after r = (LuceneResult<K, T>) q.execute(); int after = r.getScoreDocs().length; return before - after; } catch (IOException e) { LOG.error("Unable to deleteByQuery: {}", query.toString(), e); } return 0; }
From source file:org.apache.jackrabbit.core.query.lucene.constraint.NoDuplicatesConstraint.java
License:Open Source License
public boolean evaluate(ScoreNode[] row, Name[] selectorNames, EvaluationContext context) throws IOException { StringBuilder idBuilder = new StringBuilder(1024); for (ScoreNode sn : row) { if (sn == null) { idBuilder.append("null"); } else {//from ww w . j a v a2 s. c o m int docNb = sn.getDoc(context.getIndexReader()); Document doc = context.getIndexReader().document(docNb); if (doc.getField(JahiaNodeIndexer.TRANSLATED_NODE_PARENT) != null) { idBuilder.append(doc.getField(FieldNames.PARENT).stringValue()); } else { idBuilder.append(sn.getNodeId().toString()); } } } final String id = idBuilder.toString(); return ids.add(id); }
From source file:org.apache.jackrabbit.core.query.lucene.JahiaLuceneQueryFactoryImpl.java
License:Open Source License
/** * Get a String array of indexed fields for running quick checks * [0] the uuid of the language independent node * [1] the acl-id// www. j av a 2s.com * [2] "1" if visibility rule is set for node * [3] "true" node is published / "false" node is not published */ private IndexedNodeInfo getIndexedNodeInfo(ScoreNode sn, IndexReader reader, final boolean onlyMainNodeUuid) throws IOException { IndexedNodeInfo info = new IndexedNodeInfo(sn.getDoc(reader)); Document doc = reader.document(info.getDocNumber(), onlyMainNodeUuid ? ONLY_MAIN_NODE_UUID : OPTIMIZATION_FIELDS); if (doc.getField(JahiaNodeIndexer.TRANSLATED_NODE_PARENT) != null) { info.setMainNodeUuid(doc.getField(FieldNames.PARENT).stringValue()); } else { info.setMainNodeUuid(sn.getNodeId().toString()); } if (!onlyMainNodeUuid) { if (isAclUuidInIndex()) { Field aclUuidField = doc.getField(JahiaNodeIndexer.ACL_UUID); if (aclUuidField != null) { info.setAclUuid(aclUuidField.stringValue()); } } Field checkVisibilityField = doc.getField(JahiaNodeIndexer.CHECK_VISIBILITY); if (checkVisibilityField != null) { info.setCheckVisibility(checkVisibilityField.stringValue()); } Field publishedField = doc.getField(JahiaNodeIndexer.PUBLISHED); if (publishedField != null) { info.setPublished(publishedField.stringValue()); } Field[] checkInvalidLanguagesField = doc.getFields(JahiaNodeIndexer.INVALID_LANGUAGES); if (checkInvalidLanguagesField != null && checkInvalidLanguagesField.length > 0) { for (Field field : checkInvalidLanguagesField) { info.addInvalidLanguages(field.stringValue()); } } } return info; }
From source file:org.apache.jackrabbit.core.query.lucene.LanguageCustomizingAnalyzerRegistry.java
License:Open Source License
@Override public String getKeyFor(Document document) { if (document != null) { final Field field = document.getField(JahiaNodeIndexer.TRANSLATION_LANGUAGE); if (field != null) { return field.stringValue(); }/*from w ww . j a v a2 s . c o m*/ } return null; }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.util.FilteredSortedSetDocValuesFacetCounts.java
License:Apache License
private LabelAndValue[] filterFacet(int docId, String dimension, LabelAndValue[] labelAndValues) throws IOException { boolean filterd = false; Map<String, Long> newValues = new HashMap<String, Long>(); Document document = reader.document(docId); SortedSetDocValues docValues = state.getDocValues(); docValues.setDocument(docId);/*from ww w . ja va2 s .c o m*/ // filter using doc values (avoiding requiring stored values) if (!filter.isAccessible(document.getField(FieldNames.PATH).stringValue() + "/" + dimension)) { filterd = true; for (LabelAndValue lv : labelAndValues) { long existingCount = lv.value.longValue(); BytesRef key = new BytesRef(FacetsConfig.pathToString(dimension, new String[] { lv.label })); long l = docValues.lookupTerm(key); if (l >= 0) { if (existingCount > 0) { newValues.put(lv.label, existingCount - 1); } else { if (newValues.containsKey(lv.label)) { newValues.remove(lv.label); } } } } } LabelAndValue[] filteredLVs; if (filterd) { filteredLVs = new LabelAndValue[newValues.size()]; int i = 0; for (Map.Entry<String, Long> entry : newValues.entrySet()) { filteredLVs[i] = new LabelAndValue(entry.getKey(), entry.getValue()); i++; } } else { filteredLVs = labelAndValues; } return filteredLVs; }