Example usage for org.apache.lucene.document StoredField StoredField

List of usage examples for org.apache.lucene.document StoredField StoredField

Introduction

In this page you can find the example usage for org.apache.lucene.document StoredField StoredField.

Prototype

public StoredField(String name, double value) 

Source Link

Document

Create a stored-only field with the given double value.

Usage

From source file:org.apache.solr.uninverting.TestFieldCacheSort.java

License:Apache License

/** Tests sorting on type double in reverse */
public void testDoubleReverse() throws IOException {
    Directory dir = newDirectory();//from  ww  w. j a va2s . c om
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(new DoublePoint("value", 30.1));
    doc.add(new StoredField("value", 30.1));
    writer.addDocument(doc);
    doc = new Document();
    doc.add(new DoublePoint("value", -1.3));
    doc.add(new StoredField("value", -1.3));
    writer.addDocument(doc);
    doc = new Document();
    doc.add(new DoublePoint("value", 4.2333333333333));
    doc.add(new StoredField("value", 4.2333333333333));
    writer.addDocument(doc);
    doc = new Document();
    doc.add(new DoublePoint("value", 4.2333333333332));
    doc.add(new StoredField("value", 4.2333333333332));
    writer.addDocument(doc);
    IndexReader ir = UninvertingReader.wrap(writer.getReader(),
            Collections.singletonMap("value", Type.DOUBLE_POINT));
    writer.close();

    IndexSearcher searcher = newSearcher(ir, false);
    Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true));

    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
    assertEquals(4, td.totalHits);
    // numeric order
    assertEquals("30.1", searcher.doc(td.scoreDocs[0].doc).get("value"));
    assertEquals("4.2333333333333", searcher.doc(td.scoreDocs[1].doc).get("value"));
    assertEquals("4.2333333333332", searcher.doc(td.scoreDocs[2].doc).get("value"));
    assertEquals("-1.3", searcher.doc(td.scoreDocs[3].doc).get("value"));
    TestUtil.checkReader(ir);
    ir.close();
    dir.close();
}

From source file:org.apache.solr.uninverting.TestFieldCacheSortRandom.java

License:Apache License

private void testRandomStringSort(SortField.Type type) throws Exception {
    Random random = new Random(random().nextLong());

    final int NUM_DOCS = atLeast(100);
    final Directory dir = newDirectory();
    final RandomIndexWriter writer = new RandomIndexWriter(random, dir);
    final boolean allowDups = random.nextBoolean();
    final Set<String> seen = new HashSet<>();
    final int maxLength = TestUtil.nextInt(random, 5, 100);
    if (VERBOSE) {
        System.out//from ww w .  j  av  a 2  s. c om
                .println("TEST: NUM_DOCS=" + NUM_DOCS + " maxLength=" + maxLength + " allowDups=" + allowDups);
    }

    int numDocs = 0;
    final List<BytesRef> docValues = new ArrayList<>();
    // TODO: deletions
    while (numDocs < NUM_DOCS) {
        final Document doc = new Document();

        // 10% of the time, the document is missing the value:
        final BytesRef br;
        if (random().nextInt(10) != 7) {
            final String s;
            if (random.nextBoolean()) {
                s = TestUtil.randomSimpleString(random, maxLength);
            } else {
                s = TestUtil.randomUnicodeString(random, maxLength);
            }

            if (!allowDups) {
                if (seen.contains(s)) {
                    continue;
                }
                seen.add(s);
            }

            if (VERBOSE) {
                System.out.println("  " + numDocs + ": s=" + s);
            }

            doc.add(new StringField("stringdv", s, Field.Store.NO));
            docValues.add(new BytesRef(s));

        } else {
            br = null;
            if (VERBOSE) {
                System.out.println("  " + numDocs + ": <missing>");
            }
            docValues.add(null);
        }

        doc.add(new IntPoint("id", numDocs));
        doc.add(new StoredField("id", numDocs));
        writer.addDocument(doc);
        numDocs++;

        if (random.nextInt(40) == 17) {
            // force flush
            writer.getReader().close();
        }
    }

    Map<String, UninvertingReader.Type> mapping = new HashMap<>();
    mapping.put("stringdv", Type.SORTED);
    mapping.put("id", Type.INTEGER_POINT);
    final IndexReader r = UninvertingReader.wrap(writer.getReader(), mapping);
    writer.close();
    if (VERBOSE) {
        System.out.println("  reader=" + r);
    }

    final IndexSearcher s = newSearcher(r, false);
    final int ITERS = atLeast(100);
    for (int iter = 0; iter < ITERS; iter++) {
        final boolean reverse = random.nextBoolean();

        final TopFieldDocs hits;
        final SortField sf;
        final boolean sortMissingLast;
        final boolean missingIsNull;
        sf = new SortField("stringdv", type, reverse);
        sortMissingLast = random().nextBoolean();
        missingIsNull = true;

        if (sortMissingLast) {
            sf.setMissingValue(SortField.STRING_LAST);
        }

        final Sort sort;
        if (random.nextBoolean()) {
            sort = new Sort(sf);
        } else {
            sort = new Sort(sf, SortField.FIELD_DOC);
        }
        final int hitCount = TestUtil.nextInt(random, 1, r.maxDoc() + 20);
        final RandomQuery f = new RandomQuery(random.nextLong(), random.nextFloat(), docValues);
        int queryType = random.nextInt(2);
        if (queryType == 0) {
            hits = s.search(new ConstantScoreQuery(f), hitCount, sort, random.nextBoolean(),
                    random.nextBoolean());
        } else {
            hits = s.search(f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
        }

        if (VERBOSE) {
            System.out.println("\nTEST: iter=" + iter + " " + hits.totalHits + " hits; topN=" + hitCount
                    + "; reverse=" + reverse + "; sortMissingLast=" + sortMissingLast + " sort=" + sort);
        }

        // Compute expected results:
        Collections.sort(f.matchValues, new Comparator<BytesRef>() {
            @Override
            public int compare(BytesRef a, BytesRef b) {
                if (a == null) {
                    if (b == null) {
                        return 0;
                    }
                    if (sortMissingLast) {
                        return 1;
                    } else {
                        return -1;
                    }
                } else if (b == null) {
                    if (sortMissingLast) {
                        return -1;
                    } else {
                        return 1;
                    }
                } else {
                    return a.compareTo(b);
                }
            }
        });

        if (reverse) {
            Collections.reverse(f.matchValues);
        }
        final List<BytesRef> expected = f.matchValues;
        if (VERBOSE) {
            System.out.println("  expected:");
            for (int idx = 0; idx < expected.size(); idx++) {
                BytesRef br = expected.get(idx);
                if (br == null && missingIsNull == false) {
                    br = new BytesRef();
                }
                System.out.println("    " + idx + ": " + (br == null ? "<missing>" : br.utf8ToString()));
                if (idx == hitCount - 1) {
                    break;
                }
            }
        }

        if (VERBOSE) {
            System.out.println("  actual:");
            for (int hitIDX = 0; hitIDX < hits.scoreDocs.length; hitIDX++) {
                final FieldDoc fd = (FieldDoc) hits.scoreDocs[hitIDX];
                BytesRef br = (BytesRef) fd.fields[0];

                System.out.println("    " + hitIDX + ": " + (br == null ? "<missing>" : br.utf8ToString())
                        + " id=" + s.doc(fd.doc).get("id"));
            }
        }
        for (int hitIDX = 0; hitIDX < hits.scoreDocs.length; hitIDX++) {
            final FieldDoc fd = (FieldDoc) hits.scoreDocs[hitIDX];
            BytesRef br = expected.get(hitIDX);
            if (br == null && missingIsNull == false) {
                br = new BytesRef();
            }

            // Normally, the old codecs (that don't support
            // docsWithField via doc values) will always return
            // an empty BytesRef for the missing case; however,
            // if all docs in a given segment were missing, in
            // that case it will return null!  So we must map
            // null here, too:
            BytesRef br2 = (BytesRef) fd.fields[0];
            if (br2 == null && missingIsNull == false) {
                br2 = new BytesRef();
            }

            assertEquals(br, br2);
        }
    }

    r.close();
    dir.close();
}

From source file:org.apache.solr.uninverting.TestLegacyFieldCache.java

License:Apache License

public void testNonIndexedFields() throws Exception {
    Directory dir = newDirectory();/*from  www .  ja  v  a2  s .  c  o m*/
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
    Document doc = new Document();
    doc.add(new StoredField("bogusbytes", "bogus"));
    doc.add(new StoredField("bogusshorts", "bogus"));
    doc.add(new StoredField("bogusints", "bogus"));
    doc.add(new StoredField("boguslongs", "bogus"));
    doc.add(new StoredField("bogusfloats", "bogus"));
    doc.add(new StoredField("bogusdoubles", "bogus"));
    doc.add(new StoredField("bogusbits", "bogus"));
    iw.addDocument(doc);
    DirectoryReader ir = iw.getReader();
    iw.close();

    LeafReader ar = getOnlyLeafReader(ir);

    final FieldCache cache = FieldCache.DEFAULT;
    cache.purgeAllCaches();
    assertEquals(0, cache.getCacheEntries().length);

    NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER);
    assertEquals(NO_MORE_DOCS, ints.nextDoc());

    NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER);
    assertEquals(NO_MORE_DOCS, longs.nextDoc());

    NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER);
    assertEquals(NO_MORE_DOCS, floats.nextDoc());

    NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER);
    assertEquals(NO_MORE_DOCS, doubles.nextDoc());

    // check that we cached nothing
    assertEquals(0, cache.getCacheEntries().length);
    ir.close();
    dir.close();
}

From source file:org.apache.solr.uninverting.TestUninvertingReader.java

License:Apache License

public void testFieldInfos() throws IOException {
    Directory dir = newDirectory();/*from   ww  w  .  j  a v a 2s. c om*/
    IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));

    Document doc = new Document();
    BytesRef idBytes = new BytesRef("id");
    doc.add(new StringField("id", idBytes, Store.YES));
    doc.add(new LegacyIntField("int", 5, Store.YES));
    doc.add(new NumericDocValuesField("dv", 5));
    doc.add(new IntPoint("dint", 5));
    doc.add(new StoredField("stored", 5)); // not indexed
    iw.addDocument(doc);

    iw.forceMerge(1);
    iw.close();

    Map<String, Type> uninvertingMap = new HashMap<>();
    uninvertingMap.put("int", Type.LEGACY_INTEGER);
    uninvertingMap.put("dv", Type.LEGACY_INTEGER);
    uninvertingMap.put("dint", Type.INTEGER_POINT);

    DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), uninvertingMap);
    LeafReader leafReader = ir.leaves().get(0).reader();

    FieldInfo intFInfo = leafReader.getFieldInfos().fieldInfo("int");
    assertEquals(DocValuesType.NUMERIC, intFInfo.getDocValuesType());
    assertEquals(0, intFInfo.getPointDimensionCount());
    assertEquals(0, intFInfo.getPointNumBytes());

    FieldInfo dintFInfo = leafReader.getFieldInfos().fieldInfo("dint");
    assertEquals(DocValuesType.NUMERIC, dintFInfo.getDocValuesType());
    assertEquals(1, dintFInfo.getPointDimensionCount());
    assertEquals(4, dintFInfo.getPointNumBytes());

    FieldInfo dvFInfo = leafReader.getFieldInfos().fieldInfo("dv");
    assertEquals(DocValuesType.NUMERIC, dvFInfo.getDocValuesType());

    FieldInfo storedFInfo = leafReader.getFieldInfos().fieldInfo("stored");
    assertEquals(DocValuesType.NONE, storedFInfo.getDocValuesType());

    TestUtil.checkReader(ir);
    ir.close();
    dir.close();
}

From source file:org.archive.index.IndexFiles.java

License:Apache License

/**
 * index files: ..._check.xml//  w  w  w  .  j a  v  a2 s.  c  om
 * **/
public static void indexFiles_check(String dirStr) {
    String indexPath = TDirectory.LPFileIndexPath;

    try {
        logOddFile = new PrintStream(
                new FileOutputStream(new File(TDirectory.ROOT_OUTPUT + "logOddCheckIndexFiles.txt")));

        System.out.println("Indexing to directory '" + indexPath + "'...");

        Directory dir = FSDirectory.open(new File(indexPath));
        Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_48);
        IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_48, analyzer);
        boolean create = true;
        if (create) {
            // Create a new index in the directory, removing any previously indexed documents:
            iwc.setOpenMode(OpenMode.CREATE);
        } else {
            // Add new documents to an existing index:
            iwc.setOpenMode(OpenMode.CREATE_OR_APPEND);
        }
        IndexWriter indexWriter = new IndexWriter(dir, iwc);

        Date start = new Date();

        File dirFile = new File(dirStr);
        File[] files = dirFile.listFiles();
        System.out.println(files.length);

        int count = 1;
        int badCount = 0;
        for (File f : files) {
            System.out.print("file-" + count + "\t");
            count++;

            List<org.apache.lucene.document.Document> docs = new ArrayList<org.apache.lucene.document.Document>();

            List<TreeMap<String, String>> checkdocList = TemLoader.parseCheckFile(logOddFile,
                    f.getAbsolutePath());

            if (null == checkdocList) {
                System.out.print("null");
                System.out.println();

                badCount++;
                continue;
            }
            System.out.print(f);
            System.out.println();

            for (TreeMap<String, String> checkdoc : checkdocList) {
                // make a new, empty document
                org.apache.lucene.document.Document doc = new org.apache.lucene.document.Document();

                Field idField = new StringField("id", checkdoc.get("id"), Field.Store.YES);
                doc.add(idField);
                for (Entry<String, String> entry : checkdoc.entrySet()) {
                    if (!entry.getKey().equals("id")) {
                        StoredField storeField = new StoredField(entry.getKey(), entry.getValue());
                        doc.add(storeField);
                    }
                }

                docs.add(doc);
            }

            for (org.apache.lucene.document.Document doc : docs) {
                indexWriter.addDocument(doc);
            }
        }

        indexWriter.commit();
        indexWriter.close();

        logOddFile.flush();
        logOddFile.close();

        Date end = new Date();
        System.out.println(end.getTime() - start.getTime() + " total milliseconds");

        System.out.println("BadCount:\t" + badCount);
    } catch (Exception e) {
        // TODO: handle exception
        e.printStackTrace();
    }
}

From source file:org.cee.store.lucene.DocumentBuilder.java

License:Apache License

public DocumentBuilder addStoredField(String name, String value) {
    if (value != null) {
        document.add(new StoredField(name, value));
    }//from  ww w.ja  v a2s  .c o  m
    return this;
}

From source file:org.codice.ddf.spatial.geocoding.index.GeoNamesLuceneIndexer.java

License:Open Source License

private void addDocument(final IndexWriter indexWriter, final GeoEntry geoEntry, final SpatialStrategy strategy)
        throws IOException {
    final Document document = new Document();
    document.add(new TextField(GeoNamesLuceneConstants.NAME_FIELD, geoEntry.getName(), Field.Store.YES));

    document.add(new StoredField(GeoNamesLuceneConstants.LATITUDE_FIELD, geoEntry.getLatitude()));

    document.add(new StoredField(GeoNamesLuceneConstants.LONGITUDE_FIELD, geoEntry.getLongitude()));

    document.add(new StringField(GeoNamesLuceneConstants.FEATURE_CODE_FIELD, geoEntry.getFeatureCode(),
            Field.Store.YES));// w  w  w .j  a v  a 2  s .c o m

    document.add(new TextField(GeoNamesLuceneConstants.COUNTRY_CODE_FIELD, geoEntry.getCountryCode(),
            Field.Store.YES));

    document.add(new StoredField(GeoNamesLuceneConstants.POPULATION_FIELD, geoEntry.getPopulation()));
    // This DocValues field is used for sorting by population.
    document.add(new NumericDocValuesField(GeoNamesLuceneConstants.POPULATION_DOCVALUES_FIELD,
            geoEntry.getPopulation()));

    document.add(new TextField(GeoNamesLuceneConstants.ALTERNATE_NAMES_FIELD, geoEntry.getAlternateNames(),
            Field.Store.NO));

    // Add each entry's spatial information for fast spatial filtering.
    final Shape point = SPATIAL_CONTEXT.getShapeFactory().pointXY(geoEntry.getLongitude(),
            geoEntry.getLatitude());
    for (IndexableField field : strategy.createIndexableFields(point)) {
        document.add(field);
    }

    final float boost = calculateBoost(geoEntry);
    document.add(new FloatDocValuesField(GeoNamesLuceneConstants.BOOST_FIELD, boost));

    indexWriter.addDocument(document);
}

From source file:org.codice.ddf.spatial.geocoding.query.GeoNamesQueryLuceneIndexTest.java

License:Open Source License

private Document createDocumentFromGeoEntry(final GeoEntry geoEntry) {
    final Document document = new Document();

    document.add(new TextField(GeoNamesLuceneConstants.NAME_FIELD, geoEntry.getName(), Field.Store.YES));
    document.add(new StoredField(GeoNamesLuceneConstants.LATITUDE_FIELD, geoEntry.getLatitude()));
    document.add(new StoredField(GeoNamesLuceneConstants.LONGITUDE_FIELD, geoEntry.getLongitude()));
    document.add(new StringField(GeoNamesLuceneConstants.FEATURE_CODE_FIELD, geoEntry.getFeatureCode(),
            Field.Store.YES));/* w  w w .  ja v  a 2 s.c om*/
    document.add(new StoredField(GeoNamesLuceneConstants.POPULATION_FIELD, geoEntry.getPopulation()));
    document.add(new NumericDocValuesField(GeoNamesLuceneConstants.POPULATION_DOCVALUES_FIELD,
            geoEntry.getPopulation()));
    document.add(new StringField(GeoNamesLuceneConstants.COUNTRY_CODE_FIELD, geoEntry.getCountryCode(),
            Field.Store.YES));

    document.add(new TextField(GeoNamesLuceneConstants.ALTERNATE_NAMES_FIELD, geoEntry.getAlternateNames(),
            Field.Store.NO));

    final Shape point = SPATIAL_CONTEXT.getShapeFactory().pointXY(geoEntry.getLongitude(),
            geoEntry.getLatitude());
    for (IndexableField field : strategy.createIndexableFields(point)) {
        document.add(field);
    }

    return document;
}

From source file:org.eclipse.rdf4j.sail.lucene.LuceneIndex.java

License:Open Source License

public static void addStoredOnlyPredicateField(String predicate, String text, Document document) {
    // store this predicate
    document.add(new StoredField(predicate, text));
}

From source file:org.efaps.admin.index.Indexer.java

License:Apache License

/**
 * Index or reindex a given list of instances. The given instances m,ust be
 * all of the same type!//from   w  ww. ja v a  2 s . c  o  m
 *
 * @param _context the _context
 * @param _instances the instances
 * @throws EFapsException the e faps exception
 */
public static void index(final IndexContext _context, final List<Instance> _instances) throws EFapsException {
    if (CollectionUtils.isNotEmpty(_instances)) {
        final Company currentCompany = Context.getThreadContext().getCompany();
        final String currentLanguage = Context.getThreadContext().getLanguage();

        Context.getThreadContext().setCompany(Company.get(_context.getCompanyId()));
        Context.getThreadContext().setLanguage(_context.getLanguage());
        final IndexWriterConfig config = new IndexWriterConfig(_context.getAnalyzer());
        try (IndexWriter writer = new IndexWriter(_context.getDirectory(), config);
                TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(_context.getTaxonomyDirectory());) {

            final IndexDefinition def = IndexDefinition.get(_instances.get(0).getType().getUUID());
            final MultiPrintQuery multi = new MultiPrintQuery(_instances);
            for (final IndexField field : def.getFields()) {
                multi.addSelect(field.getSelect());
            }
            Attribute createdAttr = null;
            if (!_instances.get(0).getType().getAttributes(CreatedType.class).isEmpty()) {
                createdAttr = _instances.get(0).getType().getAttributes(CreatedType.class).iterator().next();
                multi.addAttribute(createdAttr);
            }
            multi.addMsgPhrase(def.getMsgPhrase());
            multi.executeWithoutAccessCheck();
            while (multi.next()) {
                final String oid = multi.getCurrentInstance().getOid();
                final String type = multi.getCurrentInstance().getType().getLabel();
                final DateTime created;
                if (createdAttr == null) {
                    created = new DateTime();
                } else {
                    created = multi.getAttribute(createdAttr);
                }

                final Document doc = new Document();
                doc.add(new FacetField(Dimension.DIMTYPE.name(), type));
                doc.add(new FacetField(Dimension.DIMCREATED.name(), String.valueOf(created.getYear()),
                        String.format("%02d", created.getMonthOfYear())));
                doc.add(new StringField(Key.OID.name(), oid, Store.YES));
                doc.add(new TextField(DBProperties.getProperty("index.Type"), type, Store.YES));
                doc.add(new NumericDocValuesField(Key.CREATED.name(), created.getMillis()));
                doc.add(new StringField(Key.CREATEDSTR.name(),
                        DateTools.dateToString(created.toDate(), DateTools.Resolution.DAY), Store.NO));

                final StringBuilder allBldr = new StringBuilder().append(type).append(" ");

                for (final IndexField field : def.getFields()) {
                    final String name = DBProperties.getProperty(field.getKey());
                    Object value = multi.getSelect(field.getSelect());
                    if (value != null) {
                        if (StringUtils.isNoneEmpty(field.getTransform())) {
                            final Class<?> clazz = Class.forName(field.getTransform(), false,
                                    EFapsClassLoader.getInstance());
                            final ITransformer transformer = (ITransformer) clazz.newInstance();
                            value = transformer.transform(value);
                        }
                        switch (field.getFieldType()) {
                        case LONG:
                            long val = 0;
                            if (value instanceof String) {
                                val = NumberUtils.toLong((String) value);
                            } else if (value instanceof Number) {
                                val = ((Number) value).longValue();
                            }
                            doc.add(new LongField(name, val, Store.YES));
                            allBldr.append(value).append(" ");
                            break;
                        case SEARCHLONG:
                            long val2 = 0;
                            if (value instanceof String) {
                                val2 = NumberUtils.toLong((String) value);
                            } else if (value instanceof Number) {
                                val2 = ((Number) value).longValue();
                            }
                            doc.add(new LongField(name, val2, Store.NO));
                            allBldr.append(value).append(" ");
                            break;
                        case STRING:
                            doc.add(new StringField(name, String.valueOf(value), Store.YES));
                            allBldr.append(value).append(" ");
                            break;
                        case SEARCHSTRING:
                            doc.add(new StringField(name, String.valueOf(value), Store.NO));
                            allBldr.append(value).append(" ");
                            break;
                        case TEXT:
                            doc.add(new TextField(name, String.valueOf(value), Store.YES));
                            allBldr.append(value).append(" ");
                            break;
                        case SEARCHTEXT:
                            doc.add(new TextField(name, String.valueOf(value), Store.NO));
                            allBldr.append(value).append(" ");
                            break;
                        case STORED:
                            doc.add(new StoredField(name, String.valueOf(value)));
                            allBldr.append(value).append(" ");
                            break;
                        default:
                            break;
                        }
                    }
                }
                doc.add(new StoredField(Key.MSGPHRASE.name(), multi.getMsgPhrase(def.getMsgPhrase())));
                doc.add(new TextField(Key.ALL.name(), allBldr.toString(), Store.NO));
                writer.updateDocument(new Term(Key.OID.name(), oid),
                        Index.getFacetsConfig().build(taxonomyWriter, doc));
                LOG.debug("Add Document: {}", doc);
            }
            writer.close();
            taxonomyWriter.close();
        } catch (final IOException | ClassNotFoundException | InstantiationException
                | IllegalAccessException e) {
            throw new EFapsException(Indexer.class, "IOException", e);
        } finally {
            Context.getThreadContext().setCompany(currentCompany);
            Context.getThreadContext().setLanguage(currentLanguage);
        }
    }
}