List of usage examples for org.apache.lucene.util RamUsageEstimator humanReadableUnits
public static String humanReadableUnits(long bytes)
size in human-readable units (GB, MB, KB or bytes). From source file:org.apache.solr.search.LRUCache.java
License:Apache License
@Override public V put(K key, V value) { synchronized (map) { if (getState() == State.LIVE) { stats.inserts.increment();//w ww. ja v a2 s . com } // increment local inserts regardless of state??? // it does make it more consistent with the current size... inserts++; // important to calc and add new ram bytes first so that removeEldestEntry can compare correctly long keySize = DEFAULT_RAM_BYTES_USED; if (maxRamBytes != Long.MAX_VALUE) { if (key != null && key instanceof Accountable) { keySize = ((Accountable) key).ramBytesUsed(); } long valueSize = 0; if (value != null) { if (value instanceof Accountable) { Accountable accountable = (Accountable) value; valueSize = accountable.ramBytesUsed(); } else { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Cache: " + getName() + " is configured with maxRamBytes=" + RamUsageEstimator.humanReadableUnits(maxRamBytes) + " but its values do not implement org.apache.lucene.util.Accountable"); } } ramBytesUsed += keySize + valueSize + LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY; } V old = map.put(key, value); if (maxRamBytes != Long.MAX_VALUE && old != null) { long bytesToDecrement = ((Accountable) old).ramBytesUsed(); // the key existed in the map but we added its size before the put, so let's back out bytesToDecrement += LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY; if (key != null) { if (key instanceof Accountable) { Accountable aKey = (Accountable) key; bytesToDecrement += aKey.ramBytesUsed(); } else { bytesToDecrement += DEFAULT_RAM_BYTES_USED; } } ramBytesUsed -= bytesToDecrement; } return old; } }
From source file:org.apache.solr.uninverting.UninvertingReader.java
License:Apache License
/** * Return information about the backing cache * @lucene.internal /*w w w .j a v a 2 s . c o m*/ */ public static FieldCacheStats getUninvertedStats() { CacheEntry[] entries = FieldCache.DEFAULT.getCacheEntries(); long totalBytesUsed = 0; String[] info = new String[entries.length]; for (int i = 0; i < entries.length; i++) { info[i] = entries[i].toString(); totalBytesUsed += entries[i].getValue().ramBytesUsed(); } String totalSize = RamUsageEstimator.humanReadableUnits(totalBytesUsed); return new FieldCacheStats(totalSize, info); }
From source file:org.elasticsearch.benchmark.fielddata.LongFieldDataBenchmark.java
License:Apache License
public static void main(String[] args) throws Exception { final IndexWriterConfig iwc = new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer()); final String fieldName = "f"; final int numDocs = 1000000; System.out.println("Data\tLoading time\tImplementation\tActual size\tExpected size"); for (Data data : Data.values()) { final RAMDirectory dir = new RAMDirectory(); final IndexWriter indexWriter = new IndexWriter(dir, iwc); for (int i = 0; i < numDocs; ++i) { final Document doc = new Document(); final int numFields = data.numValues(); for (int j = 0; j < numFields; ++j) { doc.add(new LongField(fieldName, data.nextValue(), Store.NO)); }//from w ww . j av a 2s .c om indexWriter.addDocument(doc); } indexWriter.forceMerge(1); indexWriter.close(); final DirectoryReader dr = DirectoryReader.open(dir); final IndexFieldDataService fds = new IndexFieldDataService(new Index("dummy"), new DummyCircuitBreakerService()); final LongFieldMapper mapper = new LongFieldMapper.Builder(fieldName) .build(new BuilderContext(null, new ContentPath(1))); final IndexNumericFieldData<AtomicNumericFieldData> fd = fds.getForField(mapper); final long start = System.nanoTime(); final AtomicNumericFieldData afd = fd.loadDirect(SlowCompositeReaderWrapper.wrap(dr).getContext()); final long loadingTimeMs = (System.nanoTime() - start) / 1000 / 1000; System.out.println(data + "\t" + loadingTimeMs + "\t" + afd.getClass().getSimpleName() + "\t" + RamUsageEstimator.humanSizeOf(afd.getLongValues()) + "\t" + RamUsageEstimator.humanReadableUnits(afd.getMemorySizeInBytes())); dr.close(); } }