List of usage examples for org.apache.commons.collections MultiMap remove
Object remove(Object key);
From source file:edu.uci.ics.jung.algorithms.blockmodel.GraphCollapser.java
/** * INTERNAL (undocumented) method/*from ww w. j a v a2 s .co m*/ * @param m * @param dest * @param superV */ protected void replaceWith(MultiMap m, Vertex dest, CollapsedVertex superV) { Collection c = (Collection) m.get(dest); for (Iterator iter = c.iterator(); iter.hasNext();) { m.put(superV, iter.next()); } m.remove(dest); }
From source file:org.apache.flume.channel.file.ReplayHandler.java
void replayLog(List<File> logs) throws IOException { int total = 0; int count = 0; MultiMap transactionMap = new MultiValueMap(); LOG.info("Starting replay of " + logs); for (File log : logs) { LOG.info("Replaying " + log); LogFile.SequentialReader reader = null; try {/*from ww w .j a va2 s . c om*/ reader = new LogFile.SequentialReader(log); reader.skipToLastCheckpointPosition(queue.getTimestamp()); Pair<Integer, TransactionEventRecord> entry; FlumeEventPointer ptr; // for puts the fileId is the fileID of the file they exist in // for takes the fileId and offset are pointers to a put int fileId = reader.getLogFileID(); int readCount = 0; int putCount = 0; int takeCount = 0; int rollbackCount = 0; int commitCount = 0; int skipCount = 0; while ((entry = reader.next()) != null) { int offset = entry.getLeft(); TransactionEventRecord record = entry.getRight(); short type = record.getRecordType(); long trans = record.getTransactionID(); readCount++; if (record.getTimestamp() > lastCheckpoint) { if (type == TransactionEventRecord.Type.PUT.get()) { putCount++; ptr = new FlumeEventPointer(fileId, offset); transactionMap.put(trans, ptr); } else if (type == TransactionEventRecord.Type.TAKE.get()) { takeCount++; Take take = (Take) record; ptr = new FlumeEventPointer(take.getFileID(), take.getOffset()); transactionMap.put(trans, ptr); } else if (type == TransactionEventRecord.Type.ROLLBACK.get()) { rollbackCount++; transactionMap.remove(trans); } else if (type == TransactionEventRecord.Type.COMMIT.get()) { commitCount++; @SuppressWarnings("unchecked") Collection<FlumeEventPointer> pointers = (Collection<FlumeEventPointer>) transactionMap .remove(trans); if (pointers != null && pointers.size() > 0) { processCommit(((Commit) record).getType(), pointers); count += pointers.size(); } } else { Preconditions.checkArgument(false, "Unknown record type: " + Integer.toHexString(type)); } } else { skipCount++; } } LOG.info("Replayed " + count + " from " + log); if (LOG.isDebugEnabled()) { LOG.debug("read: " + readCount + ", put: " + putCount + ", take: " + takeCount + ", rollback: " + rollbackCount + ", commit: " + commitCount + ", skipp: " + skipCount); } } catch (EOFException e) { LOG.warn("Hit EOF on " + log); } finally { total += count; count = 0; if (reader != null) { reader.close(); } } } int pendingTakesSize = pendingTakes.size(); if (pendingTakesSize > 0) { String msg = "Pending takes " + pendingTakesSize + " exist after the end of replay"; if (LOG.isDebugEnabled()) { for (Long pointer : pendingTakes) { LOG.debug("Pending take " + FlumeEventPointer.fromLong(pointer)); } } else { LOG.error(msg + ". Duplicate messages will exist in destination."); } } LOG.info("Replayed " + total); }
From source file:org.talend.dataprofiler.core.ui.events.EventManager.java
/** * clear all eventReceivers for the current event, for the current context * /*w w w .j a va2 s.c o m*/ * @param context * @param event * @return */ public boolean clearEvent(Object context, EventEnum event) { MultiMap receverQueryMap = ctxToReceiverQueueMap.get(context); if (receverQueryMap == null) { return false; } receverQueryMap.remove(event); return true; }
From source file:salomon.engine.platform.data.tree.TreeManager.java
@SuppressWarnings("unchecked") private void connectNodes(MultiMap nodesMap, TreeNode parentNode) { LOGGER.info("TreeManager.connectNodes()"); if (!nodesMap.isEmpty()) { int parentNodeID = parentNode.getInfo().getId(); LOGGER.debug("parentNodeID: " + parentNodeID); Collection<TreeNode> childNodes = (Collection<TreeNode>) nodesMap.remove(parentNodeID); if (childNodes != null) { for (Iterator iter = childNodes.iterator(); iter.hasNext();) { TreeNode childNode = (TreeNode) iter.next(); LOGGER.debug("childNodeID: " + childNode.getInfo().getId()); parentNode.addChildNode(childNode, childNode.getInfo().getParentEdgeValue()); connectNodes(nodesMap, childNode); }/*from w w w . ja v a 2 s . com*/ } } }