List of usage examples for com.google.common.collect TreeMultiset create
public static <E extends Comparable> TreeMultiset<E> create(Iterable<? extends E> elements)
From source file:org.apache.awf.io.timeout.JMXDebuggableTimeoutManager.java
private long executeKeepAliveTimeouts() { // makes a defensive copy to avoid (1) CME (new timeouts are added this iteration) and (2) IO starvation. TreeMultiset<DecoratedTimeout> defensive = TreeMultiset.create(keepAliveTimeouts); Iterator<DecoratedTimeout> iter = defensive.iterator(); final long now = System.currentTimeMillis(); while (iter.hasNext()) { DecoratedTimeout candidate = iter.next(); if (candidate.timeout.getTimeout() > now) { break; }//from ww w . j av a2s. co m candidate.timeout.getCallback().onCallback(); index.remove(candidate.channel); iter.remove(); keepAliveTimeouts.remove(candidate); logger.debug("Keep-alive timeout triggered: {}", candidate.timeout); } return keepAliveTimeouts.isEmpty() ? Long.MAX_VALUE : Math.max(1, keepAliveTimeouts.iterator().next().timeout.getTimeout() - now); }
From source file:bots.mctsbot.ai.bots.bot.gametree.search.expander.SamplingExpander.java
public List<Pair<ActionWrapper, WeightedNode>> getWeightedChildren(boolean uniformTokens) { List<ProbabilityAction> probActions = new ArrayList<ProbabilityAction>(getProbabilityActions()); double[] cumulProb = new double[probActions.size()]; for (int i = 0; i < probActions.size(); i++) { cumulProb[i] = (i > 0 ? cumulProb[i - 1] : 0) + probActions.get(i).getProbability(); }// w w w .j a v a 2 s. com if (logger.isTraceEnabled()) { for (int i = 0; i < probActions.size(); i++) { logger.trace("cumulProb[" + i + "]=" + cumulProb[i] + " for action " + probActions.get(i)); } } // ordening for sexy debugging output Multiset<ProbabilityAction> samples = TreeMultiset.create(new Comparator<ProbabilityAction>() { @Override public int compare(ProbabilityAction o1, ProbabilityAction o2) { if (o2.getProbability() < o1.getProbability()) { return -1; } if (o2.getProbability() > o1.getProbability()) { return 1; } if (o1.getAction() instanceof RaiseAction && o2.getAction() instanceof RaiseAction) { return ((RaiseAction) o2.getAction()).amount - ((RaiseAction) o1.getAction()).amount; } if (o1.getAction() instanceof BetAction && o2.getAction() instanceof BetAction) { return ((BetAction) o2.getAction()).amount - ((BetAction) o1.getAction()).amount; } // if probabilities are equal for different classes, // objects are NOT equal per se // go alphabetically? return o1.toString().compareTo(o2.toString()); } }); // Multiset<ProbabilityAction> samples = new // HashMultiset<ProbabilityAction>(); int nbSamples = Math.min(Max_Granularity, tokens); for (int i = 0; i < nbSamples; i++) { ProbabilityAction sampledAction = sampleAction(probActions, cumulProb); samples.add(sampledAction); } Set<Entry<ProbabilityAction>> entrySet = samples.entrySet(); ImmutableList.Builder<Pair<ActionWrapper, WeightedNode>> childrenBuilder = ImmutableList.builder(); for (Entry<ProbabilityAction> entry : entrySet) { int tokensShare = uniformTokens ? tokens / entrySet.size() : tokens * entry.getCount() / nbSamples; // childrenBuilder.add(new Pair<ActionWrapper, WeightedNode>(entry.getElement(), new WeightedNode( node.getChildAfter(entry.getElement(), tokensShare), entry.getCount() / (double) nbSamples))); } return childrenBuilder.build(); }
From source file:org.sonar.api.ce.measure.RangeDistributionBuilder.java
private void init(Number[] bottomLimits) { this.bottomLimits = new Number[bottomLimits.length]; System.arraycopy(bottomLimits, 0, this.bottomLimits, 0, this.bottomLimits.length); Arrays.sort(this.bottomLimits); changeDoublesToInts();// w w w. j a va 2 s. c o m distributionSet = TreeMultiset.create(NumberComparator.INSTANCE); }
From source file:org.jon.ivmark.graphit.core.graph.traversal.Traversable.java
public Collection<E> asSortedCollection(Comparator<E> comparator) { TreeMultiset<E> res = TreeMultiset.create(comparator); for (E element : iterable) { res.add(element);/*from w w w. ja v a2 s. c o m*/ } return res; }
From source file:cpw.mods.inventorysorter.InventoryHandler.java
public Multiset<ItemStackHolder> getInventoryContent(Action.ActionContext context) { int slotLow = context.slotMapping.begin; int slotHigh = context.slotMapping.end + 1; SortedMultiset<ItemStackHolder> itemcounts = TreeMultiset .create(new InventoryHandler.ItemStackComparator()); for (int i = slotLow; i < slotHigh; i++) { final Slot slot = context.player.openContainer.getSlot(i); if (!slot.canTakeStack(context.player)) continue; ItemStack stack = slot.getStack(); if (stack != null && stack.getItem() != null) { ItemStackHolder ish = new ItemStackHolder(stack.copy()); itemcounts.add(ish, stack.stackSize); }/*from w w w. ja v a 2s . c o m*/ } final HashMultiset<ItemStackHolder> entries = HashMultiset.create(); for (Multiset.Entry<ItemStackHolder> entry : itemcounts.descendingMultiset().entrySet()) { entries.add(entry.getElement(), entry.getCount()); } return entries; }
From source file:fabric.worker.transaction.DeadlockDetectorThread.java
/** * Resolves deadlocks by aborting transactions. * /* www . j av a 2s . c o m*/ * @param cycles * the set of deadlocks, represented by the logs of transactions * involved in waits-for cycles. */ private void resolveDeadlocks(Set<Set<Log>> cycles) { // Turn the set of cycles into a map from top-level TIDs to sorted multisets // of transaction logs. The multisets are sorted by transaction depth, outer // transactions first. LongKeyMap<Multiset<Log>> logsByTopLevelTid = new LongKeyHashMap<Multiset<Log>>(); for (Set<Log> cycle : cycles) { for (Log log : cycle) { long topLevelTid = log.getTid().topTid; Multiset<Log> logs = logsByTopLevelTid.get(topLevelTid); if (logs == null) { logs = TreeMultiset.create(LOG_COMPARATOR); logsByTopLevelTid.put(topLevelTid, logs); } logs.add(log); } } // Abort transactions to break up cycles. Transactions involved in more // cycles are aborted first. while (!cycles.isEmpty()) { // Figure out which top-level transaction(s) is involved in the most number // of deadlocks. int curMax = 0; LongSet abortCandidates = new LongHashSet(); for (LongKeyMap.Entry<Multiset<Log>> entry : logsByTopLevelTid.entrySet()) { int curSize = entry.getValue().size(); if (curMax > curSize) continue; if (curMax < curSize) { curMax = curSize; abortCandidates.clear(); } abortCandidates.add(entry.getKey()); } // Figure out which transaction to abort. (Pick the newest one.) Log toAbort = null; Multiset<Log> abortSet = null; for (LongIterator it = abortCandidates.iterator(); it.hasNext();) { long curTopLevelTid = it.next(); Multiset<Log> curCandidateSet = logsByTopLevelTid.get(curTopLevelTid); Log curCandidate = curCandidateSet.iterator().next(); if (toAbort == null || toAbort.startTime < curCandidate.startTime) { toAbort = curCandidate; abortSet = curCandidateSet; } } // Abort the transaction. WORKER_DEADLOCK_LOGGER.log(Level.FINE, "Aborting {0}", toAbort); toAbort.flagRetry(); // Fix up our data structures to reflect the aborted transaction. for (Iterator<Set<Log>> cycleIt = cycles.iterator(); cycleIt.hasNext();) { Set<Log> cycle = cycleIt.next(); // Check if the cycle has a transaction that was aborted. if (!haveCommonElements(cycle, abortSet.elementSet())) continue; // Cycle was broken, so remove from the set of cycles. cycleIt.remove(); // Fix up logsByTopLevelTid. for (Log log : cycle) { long topLevelTid = log.getTid().topTid; Multiset<Log> logs = logsByTopLevelTid.get(topLevelTid); logs.remove(log); if (logs.isEmpty()) { logsByTopLevelTid.remove(topLevelTid); } } } } }
From source file:org.cinchapi.concourse.server.storage.db.Block.java
/** * Construct a new instance./* w ww . j a va 2s.co m*/ * * @param id * @param directory * @param diskLoad - set to {@code true} to deserialize the block {@code id} * from {@code directory} on disk */ protected Block(String id, String directory, boolean diskLoad) { FileSystem.mkdirs(directory); this.id = id; this.file = directory + File.separator + id + BLOCK_NAME_EXTENSION; if (diskLoad) { this.mutable = false; this.size = (int) FileSystem.getFileSize(this.file); this.filter = BloomFilter.open(directory + File.separator + id + FILTER_NAME_EXTENSION); this.index = BlockIndex.open(directory + File.separator + id + INDEX_NAME_EXTENSION); this.revisions = null; } else { this.mutable = true; this.size = 0; this.revisions = TreeMultiset.create(Sorter.INSTANCE); this.filter = BloomFilter.create((directory + File.separator + id + FILTER_NAME_EXTENSION), EXPECTED_INSERTIONS); this.index = BlockIndex.create(directory + File.separator + id + INDEX_NAME_EXTENSION, EXPECTED_INSERTIONS); } }
From source file:org.onebusaway.nyc.vehicle_tracking.impl.simulator.SimulatorTask.java
public VehicleLocationDetails getParticleDetails(int particleId, int recordIndex) { final VehicleLocationDetails details = new VehicleLocationDetails(); details.setId(_id);/* www .j a va 2 s . c o m*/ final Collection<Multiset.Entry<Particle>> particles; if (recordIndex < 0) { details.setLastObservation( RecordLibrary.getNycTestInferredLocationRecordAsNycRawLocationRecord(_mostRecentRecord)); particles = _vehicleLocationInferenceService.getCurrentParticlesForVehicleId(_vehicleId).entrySet(); } else { details.setLastObservation(getDetails(recordIndex).getLastObservation()); particles = getDetails(recordIndex).getParticles(); } if (particles != null) { for (final Multiset.Entry<Particle> pEntry : particles) { Particle p = pEntry.getElement(); if (p.getIndex() == particleId) { final Multiset<Particle> history = TreeMultiset.create(Ordering.natural()); while (p != null && history.elementSet().size() <= _particleParentSize) { history.add(p, pEntry.getCount()); p = p.getParent(); } details.setParticles(history); details.setHistory(true); break; } } } return details; }
From source file:org.joda.beans.ser.GuavaSerIteratorFactory.java
/** * Gets an iterable wrapper for {@code SortedMultiset}. * /* ww w . ja v a 2 s. c o m*/ * @param valueType the value type, not null * @param valueTypeTypes the generic parameters of the value type * @return the iterable, not null */ @SuppressWarnings({ "rawtypes", "unchecked" }) public static final SerIterable sortedMultiset(final Class<?> valueType, final List<Class<?>> valueTypeTypes) { Ordering natural = Ordering.natural(); final SortedMultiset<Object> coll = TreeMultiset.create(natural); return multiset(valueType, valueTypeTypes, coll); }
From source file:gov.nij.bundles.intermediaries.ers.EntityResolutionMessageHandler.java
/** * This method takes the ER response and converts the Java objects to the Merge Response XML. * //www. j a va 2s. c o m * @param entityContainerNode * @param results * @param recordLimit * @param attributeParametersNode * @return * @throws ParserConfigurationException * @throws XPathExpressionException * @throws TransformerException */ private Document createResponseMessage(Node entityContainerNode, EntityResolutionResults results, Node attributeParametersNode, int recordLimit) throws Exception { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); Document resultDocument = dbf.newDocumentBuilder().newDocument(); Element entityMergeResultMessageElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_NAMESPACE, "EntityMergeResultMessage"); resultDocument.appendChild(entityMergeResultMessageElement); Element entityContainerElement = resultDocument .createElementNS(EntityResolutionNamespaceContext.MERGE_RESULT_NAMESPACE, "EntityContainer"); entityMergeResultMessageElement.appendChild(entityContainerElement); NodeList inputEntityNodes = (NodeList) xpath.evaluate("er-ext:Entity", entityContainerNode, XPathConstants.NODESET); Collection<Element> inputEntityElements = null; if (attributeParametersNode == null) { inputEntityElements = new ArrayList<Element>(); } else { inputEntityElements = TreeMultiset .create(new EntityElementComparator((Element) attributeParametersNode)); //inputEntityElements = new ArrayList<Element>(); } for (int i = 0; i < inputEntityNodes.getLength(); i++) { inputEntityElements.add((Element) inputEntityNodes.item(i)); } if (attributeParametersNode == null) { LOG.warn("Attribute Parameters element was null, so records will not be sorted"); } //Collections.sort((List<Element>) inputEntityElements, new EntityElementComparator((Element) attributeParametersNode)); if (inputEntityElements.size() != inputEntityNodes.getLength()) { LOG.error("Lost elements in ER output sorting. Input count=" + inputEntityNodes.getLength() + ", output count=" + inputEntityElements.size()); } for (Element e : inputEntityElements) { Node clone = resultDocument.adoptNode(e.cloneNode(true)); resultDocument.renameNode(clone, EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, e.getLocalName()); entityContainerElement.appendChild(clone); } Element mergedRecordsElement = resultDocument .createElementNS(EntityResolutionNamespaceContext.MERGE_RESULT_NAMESPACE, "MergedRecords"); entityMergeResultMessageElement.appendChild(mergedRecordsElement); if (results != null) { List<RecordWrapper> records = results.getRecords(); // Loop through RecordWrappers to extract info to create merged records for (RecordWrapper record : records) { LOG.debug(" !#!#!#!# Record 1, id=" + record.getExternalId() + ", externals=" + record.getRelatedIds()); // Create Merged Record Container Element mergedRecordElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "MergedRecord"); mergedRecordsElement.appendChild(mergedRecordElement); // Create Original Record Reference for 'first record' Element originalRecordRefElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "OriginalRecordReference"); originalRecordRefElement.setAttributeNS(EntityResolutionNamespaceContext.STRUCTURES_NAMESPACE, "ref", record.getExternalId()); mergedRecordElement.appendChild(originalRecordRefElement); // Loop through and add any related records for (String relatedRecordId : record.getRelatedIds()) { originalRecordRefElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "OriginalRecordReference"); originalRecordRefElement.setAttributeNS(EntityResolutionNamespaceContext.STRUCTURES_NAMESPACE, "ref", relatedRecordId); mergedRecordElement.appendChild(originalRecordRefElement); } // Create Merge Quality Element Element mergeQualityElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "MergeQuality"); mergedRecordElement.appendChild(mergeQualityElement); Set<AttributeStatistics> stats = results.getStatisticsForRecord(record.getExternalId()); for (AttributeStatistics stat : stats) { Element stringDistanceStatsElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "StringDistanceStatistics"); mergeQualityElement.appendChild(stringDistanceStatsElement); Element xpathElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "AttributeXPath"); stringDistanceStatsElement.appendChild(xpathElement); Node contentNode = resultDocument.createTextNode(stat.getAttributeName()); xpathElement.appendChild(contentNode); Element meanElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "StringDistanceMeanInRecord"); stringDistanceStatsElement.appendChild(meanElement); contentNode = resultDocument.createTextNode(String.valueOf(stat.getAverageStringDistance())); meanElement.appendChild(contentNode); Element sdElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "StringDistanceStandardDeviationInRecord"); stringDistanceStatsElement.appendChild(sdElement); contentNode = resultDocument .createTextNode(String.valueOf(stat.getStandardDeviationStringDistance())); sdElement.appendChild(contentNode); } } } else { for (Element e : inputEntityElements) { String id = e.getAttributeNS(EntityResolutionNamespaceContext.STRUCTURES_NAMESPACE, "id"); Element mergedRecordElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "MergedRecord"); mergedRecordsElement.appendChild(mergedRecordElement); Element originalRecordRefElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "OriginalRecordReference"); originalRecordRefElement.setAttributeNS(EntityResolutionNamespaceContext.STRUCTURES_NAMESPACE, "ref", id); mergedRecordElement.appendChild(originalRecordRefElement); Element mergeQualityElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "MergeQuality"); mergedRecordElement.appendChild(mergeQualityElement); XPath xp = XPathFactory.newInstance().newXPath(); xp.setNamespaceContext(new EntityResolutionNamespaceContext()); NodeList attributeParameterNodes = (NodeList) xp.evaluate("er-ext:AttributeParameter", attributeParametersNode, XPathConstants.NODESET); for (int i = 0; i < attributeParameterNodes.getLength(); i++) { String attributeName = xp.evaluate("er-ext:AttributeXPath", attributeParametersNode); Element stringDistanceStatsElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "StringDistanceStatistics"); mergeQualityElement.appendChild(stringDistanceStatsElement); Element xpathElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "AttributeXPath"); stringDistanceStatsElement.appendChild(xpathElement); Node contentNode = resultDocument.createTextNode(attributeName); xpathElement.appendChild(contentNode); Element meanElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "StringDistanceMeanInRecord"); stringDistanceStatsElement.appendChild(meanElement); contentNode = resultDocument.createTextNode("0.0"); meanElement.appendChild(contentNode); Element sdElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_EXT_NAMESPACE, "StringDistanceStandardDeviationInRecord"); stringDistanceStatsElement.appendChild(sdElement); contentNode = resultDocument.createTextNode("0.0"); sdElement.appendChild(contentNode); } } } Element recordLimitExceededElement = resultDocument.createElementNS( EntityResolutionNamespaceContext.MERGE_RESULT_NAMESPACE, "RecordLimitExceededIndicator"); recordLimitExceededElement.setTextContent(new Boolean(results == null).toString()); entityMergeResultMessageElement.appendChild(recordLimitExceededElement); return resultDocument; }