List of usage examples for com.google.common.collect Lists newLinkedList
@GwtCompatible(serializable = true) public static <E> LinkedList<E> newLinkedList()
From source file:org.eclipse.elk.alg.layered.compaction.oned.algs.LongestPathCompaction.java
/** * {@inheritDoc}/*from w ww. j a v a 2 s. c o m*/ */ @Override public void compact(final OneDimensionalCompactor compactor) { // calculating the left-most position of any element // this will be our starting point for the compaction double minStartPos = Double.POSITIVE_INFINITY; for (CNode cNode : compactor.cGraph.cNodes) { minStartPos = Math.min(minStartPos, cNode.cGroup.reference.hitbox.x + cNode.cGroupOffset.x); } // finding the sinks of the constraint graph Queue<CGroup> sinks = Lists.newLinkedList(); for (CGroup group : compactor.cGraph.cGroups) { group.startPos = minStartPos; if (group.outDegree == 0) { sinks.add(group); } } // process sinks until every node in the constraint graph was handled while (!sinks.isEmpty()) { CGroup group = sinks.poll(); // record the movement of this group during the current compaction // this has to be recorded _before_ the nodes' positions are updated // and care has to be taken about the compaction direction. In certain // scenarios nodes may move "back-and-forth". To detect this, we associate // a negative delta with two of the compaction directions. double diff = group.reference.hitbox.x; // ------------------------------------------ // #1 final positions for this group's nodes // ------------------------------------------ for (CNode node : group.cNodes) { // CNodes can be locked in place to avoid pulling clusters apart double suggestedX = group.startPos + node.cGroupOffset.x; if (node.cGroup.reposition //node.reposition // does the "fixed" position violate the constraints? || (node.getPosition() < suggestedX)) { node.startPos = suggestedX; } else { // leave the node where it was! node.startPos = node.hitbox.x; } } diff -= group.reference.startPos; group.delta += diff; if (compactor.direction == Direction.RIGHT || compactor.direction == Direction.DOWN) { group.deltaNormalized += diff; } else { group.deltaNormalized -= diff; } // --------------------------------------------------- // #2 propagate start positions to constrained groups // --------------------------------------------------- for (CNode node : group.cNodes) { for (CNode incNode : node.constraints) { // determine the required spacing double spacing; if (compactor.direction.isHorizontal()) { spacing = compactor.spacingsHandler.getHorizontalSpacing(node, incNode); } else { spacing = compactor.spacingsHandler.getVerticalSpacing(node, incNode); } incNode.cGroup.startPos = Math.max(incNode.cGroup.startPos, node.startPos + node.hitbox.width + spacing // respect the other group's node's offset - incNode.cGroupOffset.x); // whether the node's current position should be preserved if (!incNode.reposition) { incNode.cGroup.startPos = Math.max(incNode.cGroup.startPos, incNode.getPosition() - incNode.cGroupOffset.x); } incNode.cGroup.outDegree--; if (incNode.cGroup.outDegree == 0) { sinks.add(incNode.cGroup); } } } } // ------------------------------------------------------ // #3 setting hitbox positions to new starting positions // ------------------------------------------------------ for (CNode cNode : compactor.cGraph.cNodes) { cNode.applyPosition(); } }
From source file:cc.recommenders.io.Logger.java
public static void clearLog() { log = Lists.newLinkedList(); }
From source file:org.eclipse.emf.compare.tests.match.data.MatchInputData.java
public List<Resource> getRootIDTwoWayA1Left() throws IOException { List<Resource> result = Lists.newLinkedList(); result.add(loadFromClassLoader("rootid/twoway/a1/left.nodes")); result.add(loadFromClassLoader("rootid/twoway/a1/left2.nodes")); return result; }
From source file:co.cask.cdap.data.stream.service.upload.BufferedContentWriter.java
BufferedContentWriter(Id.Stream streamId, ConcurrentStreamWriter streamWriter, Map<String, String> headers) { this.streamId = streamId; this.streamWriter = streamWriter; this.headers = ImmutableMap.copyOf(headers); this.bodies = Lists.newLinkedList(); }
From source file:org.terasology.navgraph.FloorFinder.java
public void findFloors(NavGraphChunk map) { findRegions(map);/*w w w .j a va 2s. c o m*/ map.floors.clear(); for (Region region : regions) { if (region.floor != null) { continue; } Floor floor = new Floor(map, map.floors.size()); map.floors.add(floor); List<Region> stack = Lists.newLinkedList(); stack.add(0, region); while (!stack.isEmpty()) { Collections.sort(stack, new Comparator<Region>() { @Override public int compare(Region o1, Region o2) { return o1.id < o2.id ? -1 : o1.id > o2.id ? 1 : 0; } }); Region current = stack.remove(0); if (current.floor != null) { continue; } if (!floor.overlap(current)) { floor.merge(current); Set<Region> neighborRegions = current.getNeighborRegions(); for (Region neighborRegion : neighborRegions) { if (neighborRegion.floor == null) { stack.add(neighborRegion); } } } } } for (Map.Entry<WalkableBlock, Region> entry : regionMap.entrySet()) { entry.getKey().floor = entry.getValue().floor; } }
From source file:org.apache.streams.converter.HoconConverterProcessor.java
@Override public List<StreamsDatum> process(StreamsDatum entry) { List<StreamsDatum> result = Lists.newLinkedList(); Object document = entry.getDocument(); Object outDoc = HoconConverterUtil.convert(document, outClass, hocon, outPath); StreamsDatum datum = DatumUtils.cloneDatum(entry); datum.setDocument(outDoc);//from ww w. j a va2 s . co m result.add(datum); return result; }
From source file:org.apache.flume.client.avro.SimpleTextLineEventReader.java
@Override public List<Event> readEvents(int n) throws IOException { List<Event> events = Lists.newLinkedList(); while (events.size() < n) { Event event = readEvent(); if (event != null) { events.add(event);/* w w w . java2 s . c om*/ } else { break; } } return events; }
From source file:com.google.litecoin.wallet.KeyTimeCoinSelector.java
@Override public CoinSelection select(BigInteger target, LinkedList<TransactionOutput> candidates) { try {//from w w w . j a v a 2 s. c om LinkedList<TransactionOutput> gathered = Lists.newLinkedList(); BigInteger valueGathered = BigInteger.ZERO; for (TransactionOutput output : candidates) { if (ignorePending && !isConfirmed(output)) continue; // Find the key that controls output, assuming it's a regular pay-to-pubkey or pay-to-address output. // We ignore any other kind of exotic output on the assumption we can't spend it ourselves. final Script scriptPubKey = output.getScriptPubKey(); ECKey controllingKey; if (scriptPubKey.isSentToRawPubKey()) { controllingKey = wallet.findKeyFromPubKey(scriptPubKey.getPubKey()); } else if (scriptPubKey.isSentToAddress()) { controllingKey = wallet.findKeyFromPubHash(scriptPubKey.getPubKeyHash()); } else { log.info("Skipping tx output {} because it's not of simple form.", output); continue; } if (controllingKey.getCreationTimeSeconds() >= unixTimeSeconds) continue; // It's older than the cutoff time so select. valueGathered = valueGathered.add(output.getValue()); gathered.push(output); if (gathered.size() >= MAX_SIMULTANEOUS_INPUTS) { log.warn("Reached {} inputs, going further would yield a tx that is too large, stopping here.", gathered.size()); break; } } return new CoinSelection(valueGathered, gathered); } catch (ScriptException e) { throw new RuntimeException(e); // We should never have problems understanding scripts in our wallet. } }
From source file:com.continuuity.http.InternalHttpResponder.java
public InternalHttpResponder() { contentChunks = Lists.newLinkedList(); statusCode = 0; }
From source file:natlab.CommentBuffer.java
public CommentBuffer() { this.commentQueue = Lists.newLinkedList(); }