List of usage examples for org.apache.commons.lang.mutable MutableLong longValue
public long longValue()
From source file:org.apache.accumulo.core.clientImpl.ConditionalWriterImpl.java
private void convertMutations(TabletServerMutations<QCMutation> mutations, Map<Long, CMK> cmidToCm, MutableLong cmid, Map<TKeyExtent, List<TConditionalMutation>> tmutations, CompressedIterators compressedIters) { for (Entry<KeyExtent, List<QCMutation>> entry : mutations.getMutations().entrySet()) { TKeyExtent tke = entry.getKey().toThrift(); ArrayList<TConditionalMutation> tcondMutaions = new ArrayList<>(); List<QCMutation> condMutations = entry.getValue(); for (QCMutation cm : condMutations) { TMutation tm = cm.toThrift(); List<TCondition> conditions = convertConditions(cm, compressedIters); cmidToCm.put(cmid.longValue(), new CMK(entry.getKey(), cm)); TConditionalMutation tcm = new TConditionalMutation(conditions, tm, cmid.longValue()); cmid.increment();//w ww.j a va 2 s .co m tcondMutaions.add(tcm); } tmutations.put(tke, tcondMutaions); } }
From source file:org.apache.bookkeeper.bookie.InterleavedLedgerStorage.java
@Override public List<DetectedInconsistency> localConsistencyCheck(Optional<RateLimiter> rateLimiter) throws IOException { long checkStart = MathUtils.nowInNano(); LOG.info("Starting localConsistencyCheck"); long checkedLedgers = 0; long checkedPages = 0; final MutableLong checkedEntries = new MutableLong(0); final MutableLong pageRetries = new MutableLong(0); NavigableMap<Long, Boolean> bkActiveLedgersSnapshot = activeLedgers.snapshot(); final List<DetectedInconsistency> errors = new ArrayList<>(); for (Long ledger : bkActiveLedgersSnapshot.keySet()) { try (LedgerCache.PageEntriesIterable pages = ledgerCache.listEntries(ledger)) { for (LedgerCache.PageEntries page : pages) { @Cleanup/* w ww.j ava 2s .c om*/ LedgerEntryPage lep = page.getLEP(); MutableBoolean retry = new MutableBoolean(false); do { retry.setValue(false); int version = lep.getVersion(); MutableBoolean success = new MutableBoolean(true); long start = MathUtils.nowInNano(); lep.getEntries((entry, offset) -> { rateLimiter.ifPresent(RateLimiter::acquire); try { entryLogger.checkEntry(ledger, entry, offset); checkedEntries.increment(); } catch (EntryLogger.EntryLookupException e) { if (version != lep.getVersion()) { pageRetries.increment(); if (lep.isDeleted()) { LOG.debug("localConsistencyCheck: ledger {} deleted", ledger); } else { LOG.debug("localConsistencyCheck: concurrent modification, retrying"); retry.setValue(true); retryCounter.inc(); } return false; } else { errors.add(new DetectedInconsistency(ledger, entry, e)); LOG.error("Got error: ", e); } success.setValue(false); } return true; }); if (success.booleanValue()) { pageScanStats.registerSuccessfulEvent(MathUtils.elapsedNanos(start), TimeUnit.NANOSECONDS); } else { pageScanStats.registerFailedEvent(MathUtils.elapsedNanos(start), TimeUnit.NANOSECONDS); } } while (retry.booleanValue()); checkedPages++; } } catch (NoLedgerException | FileInfo.FileInfoDeletedException e) { if (activeLedgers.containsKey(ledger)) { LOG.error("Cannot find ledger {}, should exist, exception is ", ledger, e); errors.add(new DetectedInconsistency(ledger, -1, e)); } else { LOG.debug("ledger {} deleted since snapshot taken", ledger); } } catch (Exception e) { throw new IOException("Got other exception in localConsistencyCheck", e); } checkedLedgers++; } LOG.info( "Finished localConsistencyCheck, took {}s to scan {} ledgers, {} pages, " + "{} entries with {} retries, {} errors", TimeUnit.NANOSECONDS.toSeconds(MathUtils.elapsedNanos(checkStart)), checkedLedgers, checkedPages, checkedEntries.longValue(), pageRetries.longValue(), errors.size()); return errors; }
From source file:org.apache.bookkeeper.bookie.InterleavedLedgerStorageTest.java
@Test public void testIndexEntryIterator() throws Exception { try (LedgerCache.PageEntriesIterable pages = interleavedStorage.getIndexEntries(0)) { MutableLong curEntry = new MutableLong(0); for (LedgerCache.PageEntries page : pages) { try (LedgerEntryPage lep = page.getLEP()) { lep.getEntries((entry, offset) -> { Assert.assertEquals(curEntry.longValue(), entry); Assert.assertNotEquals(0, offset); curEntry.setValue(entriesPerWrite + entry); return true; });/*from w w w .j a v a2 s .c o m*/ } } Assert.assertEquals(entriesPerWrite * numWrites, curEntry.longValue()); } }
From source file:org.apache.bookkeeper.bookie.storage.ldb.SingleDirectoryDbLedgerStorage.java
/** * Add an already existing ledger to the index. * * <p>This method is only used as a tool to help the migration from InterleaveLedgerStorage to DbLedgerStorage * * @param ledgerId// w w w . j a va 2 s . co m * the ledger id * @param pages * Iterator over index pages from Indexed * @return the number of */ public long addLedgerToIndex(long ledgerId, boolean isFenced, byte[] masterKey, LedgerCache.PageEntriesIterable pages) throws Exception { LedgerData ledgerData = LedgerData.newBuilder().setExists(true).setFenced(isFenced) .setMasterKey(ByteString.copyFrom(masterKey)).build(); ledgerIndex.set(ledgerId, ledgerData); MutableLong numberOfEntries = new MutableLong(); // Iterate over all the entries pages Batch batch = entryLocationIndex.newBatch(); for (LedgerCache.PageEntries page : pages) { try (LedgerEntryPage lep = page.getLEP()) { lep.getEntries((entryId, location) -> { entryLocationIndex.addLocation(batch, ledgerId, entryId, location); numberOfEntries.increment(); return true; }); } } batch.flush(); batch.close(); return numberOfEntries.longValue(); }
From source file:org.apache.bookkeeper.statelib.impl.mvcc.MVCCStoreImpl.java
long deleteUsingIter(WriteBatch batch, K key, byte[] rawKey, @Nullable byte[] rawEndKey, List<byte[]> resultKeys, List<MVCCRecord> resultValues, boolean countOnly) { MutableLong numKvs = new MutableLong(0L); if (null == rawEndKey) { MVCCRecord record = getKeyRecord(key, rawKey); if (null != record) { if (!countOnly) { resultKeys.add(rawKey);/* www . j av a 2s. co m*/ resultValues.add(record); } else { record.recycle(); } numKvs.add(1L); try { batch.delete(rawKey); } catch (RocksDBException e) { throw new StateStoreRuntimeException(e); } } } else { Pair<byte[], byte[]> realRange = getRealRange(rawKey, rawEndKey); rawKey = realRange.getLeft(); rawEndKey = realRange.getRight(); getKeyRecords(rawKey, rawEndKey, resultKeys, resultValues, numKvs, null, -1, countOnly); deleteBlind(batch, rawKey, rawEndKey); } return numKvs.longValue(); }
From source file:org.apache.mahout.common.cache.LFUCache.java
@Override public V get(K key) { Pair<V, MutableLong> data = dataMap.get(key); if (data == null) { return null; } else {// w ww . ja va 2 s. c om V value = data.getFirst(); MutableLong count = data.getSecond(); long oldCount = count.longValue(); count.increment(); incrementHit(key, oldCount); return value; } }
From source file:org.apache.mahout.fpm.pfpgrowth.fpgrowth.FPGrowth.java
/** * Top K FpGrowth Algorithm//from w ww.j av a2 s . co m * * @param tree * to be mined * @param minSupportValue * minimum support of the pattern to keep * @param k * Number of top frequent patterns to keep * @param requiredFeatures * Set of integer id's of features to mine * @param outputCollector * the Collector class which converts the given frequent pattern in * integer to A * @return Top K Frequent Patterns for each feature and their support */ private Map<Integer, FrequentPatternMaxHeap> fpGrowth(FPTree tree, long minSupportValue, int k, Collection<Integer> requiredFeatures, TopKPatternsOutputConverter<A> outputCollector, StatusUpdater updater) throws IOException { Map<Integer, FrequentPatternMaxHeap> patterns = Maps.newHashMap(); FPTreeDepthCache treeCache = new FPTreeDepthCache(); for (int i = tree.getHeaderTableCount() - 1; i >= 0; i--) { int attribute = tree.getAttributeAtIndex(i); if (requiredFeatures.contains(attribute)) { log.info("Mining FTree Tree for all patterns with {}", attribute); MutableLong minSupport = new MutableLong(minSupportValue); FrequentPatternMaxHeap frequentPatterns = growth(tree, minSupport, k, treeCache, 0, attribute, updater); patterns.put(attribute, frequentPatterns); outputCollector.collect(attribute, frequentPatterns); minSupportValue = Math.max(minSupportValue, minSupport.longValue() / 2); log.info("Found {} Patterns with Least Support {}", patterns.get(attribute).count(), patterns.get(attribute).leastSupport()); } } log.info("Tree Cache: First Level: Cache hits={} Cache Misses={}", treeCache.getHits(), treeCache.getMisses()); return patterns; }
From source file:org.apache.mahout.fpm.pfpgrowth.fpgrowth.FPGrowth.java
private static FrequentPatternMaxHeap growth(FPTree tree, MutableLong minSupportMutable, int k, FPTreeDepthCache treeCache, int level, int currentAttribute, StatusUpdater updater) { FrequentPatternMaxHeap frequentPatterns = new FrequentPatternMaxHeap(k, true); int i = Arrays.binarySearch(tree.getHeaderTableAttributes(), currentAttribute); if (i < 0) { return frequentPatterns; }/*from w ww .j a v a 2s . c om*/ int headerTableCount = tree.getHeaderTableCount(); while (i < headerTableCount) { int attribute = tree.getAttributeAtIndex(i); long count = tree.getHeaderSupportCount(attribute); if (count < minSupportMutable.longValue()) { i++; continue; } updater.update("FPGrowth Algorithm for a given feature: " + attribute); FPTree conditionalTree = treeCache.getFirstLevelTree(attribute); if (conditionalTree.isEmpty()) { traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(), conditionalTree, tree); // printTree(conditionalTree); } FrequentPatternMaxHeap returnedPatterns; if (attribute == currentAttribute) { returnedPatterns = growthTopDown(conditionalTree, minSupportMutable, k, treeCache, level + 1, true, currentAttribute, updater); frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true); } else { returnedPatterns = growthTopDown(conditionalTree, minSupportMutable, k, treeCache, level + 1, false, currentAttribute, updater); frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, false); } if (frequentPatterns.isFull() && minSupportMutable.longValue() < frequentPatterns.leastSupport()) { minSupportMutable.setValue(frequentPatterns.leastSupport()); } i++; } return frequentPatterns; }
From source file:org.apache.mahout.fpm.pfpgrowth.fpgrowth.FPGrowth.java
private static FrequentPatternMaxHeap growthBottomUp(FPTree tree, MutableLong minSupportMutable, int k, FPTreeDepthCache treeCache, int level, boolean conditionalOfCurrentAttribute, int currentAttribute, StatusUpdater updater) {/*from w w w . j a v a2 s . c o m*/ FrequentPatternMaxHeap frequentPatterns = new FrequentPatternMaxHeap(k, false); if (!conditionalOfCurrentAttribute) { int index = Arrays.binarySearch(tree.getHeaderTableAttributes(), currentAttribute); if (index < 0) { return frequentPatterns; } else { int attribute = tree.getAttributeAtIndex(index); long count = tree.getHeaderSupportCount(attribute); if (count < minSupportMutable.longValue()) { return frequentPatterns; } } } if (tree.singlePath()) { return generateSinglePathPatterns(tree, k, minSupportMutable.longValue()); } updater.update("Bottom Up FP Growth"); for (int i = tree.getHeaderTableCount() - 1; i >= 0; i--) { int attribute = tree.getAttributeAtIndex(i); long count = tree.getHeaderSupportCount(attribute); if (count < minSupportMutable.longValue()) { continue; } FPTree conditionalTree = treeCache.getTree(level); FrequentPatternMaxHeap returnedPatterns; if (conditionalOfCurrentAttribute) { traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(), conditionalTree, tree); returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1, true, currentAttribute, updater); frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true); } else { if (attribute == currentAttribute) { traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(), conditionalTree, tree); returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1, true, currentAttribute, updater); frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true); } else if (attribute > currentAttribute) { traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(), conditionalTree, tree); returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1, false, currentAttribute, updater); frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, false); } } if (frequentPatterns.isFull() && minSupportMutable.longValue() < frequentPatterns.leastSupport()) { minSupportMutable.setValue(frequentPatterns.leastSupport()); } } return frequentPatterns; }
From source file:org.apache.mahout.fpm.pfpgrowth.fpgrowth.FPGrowth.java
private static FrequentPatternMaxHeap growthTopDown(FPTree tree, MutableLong minSupportMutable, int k, FPTreeDepthCache treeCache, int level, boolean conditionalOfCurrentAttribute, int currentAttribute, StatusUpdater updater) {//from w w w .j a v a2 s . c o m FrequentPatternMaxHeap frequentPatterns = new FrequentPatternMaxHeap(k, true); if (!conditionalOfCurrentAttribute) { int index = Arrays.binarySearch(tree.getHeaderTableAttributes(), currentAttribute); if (index < 0) { return frequentPatterns; } else { int attribute = tree.getAttributeAtIndex(index); long count = tree.getHeaderSupportCount(attribute); if (count < minSupportMutable.longValue()) { return frequentPatterns; } } } if (tree.singlePath()) { return generateSinglePathPatterns(tree, k, minSupportMutable.longValue()); } updater.update("Top Down Growth:"); for (int i = 0; i < tree.getHeaderTableCount(); i++) { int attribute = tree.getAttributeAtIndex(i); long count = tree.getHeaderSupportCount(attribute); if (count < minSupportMutable.longValue()) { continue; } FPTree conditionalTree = treeCache.getTree(level); FrequentPatternMaxHeap returnedPatterns; if (conditionalOfCurrentAttribute) { traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(), conditionalTree, tree); returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1, true, currentAttribute, updater); frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true); } else { if (attribute == currentAttribute) { traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(), conditionalTree, tree); returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1, true, currentAttribute, updater); frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true); } else if (attribute > currentAttribute) { traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(), conditionalTree, tree); returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1, false, currentAttribute, updater); frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, false); } } if (frequentPatterns.isFull() && minSupportMutable.longValue() < frequentPatterns.leastSupport()) { minSupportMutable.setValue(frequentPatterns.leastSupport()); } } return frequentPatterns; }