Example usage for com.google.common.collect Lists newLinkedList

List of usage examples for com.google.common.collect Lists newLinkedList

Introduction

In this page you can find the example usage for com.google.common.collect Lists newLinkedList.

Prototype

@GwtCompatible(serializable = true)
public static <E> LinkedList<E> newLinkedList() 

Source Link

Document

Creates a mutable, empty LinkedList instance (for Java 6 and earlier).

Usage

From source file:com.google.devtools.depan.maven.builder.MavenGraphResolver.java

public GraphModel resolveReferences(GraphModel analysisGraph) {
    Collection<GraphNode> nodes = analysisGraph.getNodes();

    Map<String, List<ArtifactElement>> baseMap = Maps.newHashMapWithExpectedSize(nodes.size());

    // Build map of known artifact base names to all matching
    // ArtifactElements.
    for (GraphNode node : nodes) {
        if (node instanceof ArtifactElement) {
            ArtifactElement artifact = (ArtifactElement) node;
            String baseLabel = artifact.getBaseLabel();
            List<ArtifactElement> known = baseMap.get(baseLabel);
            if (null == known) {
                known = Lists.newLinkedList();
                baseMap.put(baseLabel, known);
            }//from   w  ww.  j a  va  2 s.  c  o  m
            known.add(artifact);
        }
    }

    // Use the sets of ArtifactElements from the baseMap to build
    // the update map for resolvable ArtifactElements.
    for (List<ArtifactElement> bases : baseMap.values()) {
        if (bases.size() < 2) {
            continue;
        }
        buildUpdateMap(bases);
    }

    // Use the constructed update map to build a graph with all
    // resolvable ArtifactElement references mapped to their
    // definitions.
    return rewriteReferences(analysisGraph);
}

From source file:org.apache.streams.rss.serializer.SyndEntryActivitySerializer.java

@Override
public List<Activity> deserializeAll(List<ObjectNode> objectNodes) {
    List<Activity> result = Lists.newLinkedList();
    for (ObjectNode node : objectNodes) {
        result.add(deserialize(node));//www .  j a v a  2 s.c  o m
    }
    return result;
}

From source file:com.puppycrawl.tools.checkstyle.checks.AbstractDeclarationCollector.java

@Override
public void beginTree(DetailAST rootAST) {
    final Deque<LexicalFrame> frameStack = Lists.newLinkedList();
    frameStack.add(new GlobalFrame());

    frames = Maps.newHashMap();/*from  w w  w  .  ja v  a 2 s  .co  m*/

    DetailAST curNode = rootAST;
    while (curNode != null) {
        collectDeclarations(frameStack, curNode);
        DetailAST toVisit = curNode.getFirstChild();
        while (curNode != null && toVisit == null) {
            endCollectingDeclarations(frameStack, curNode);
            toVisit = curNode.getNextSibling();
            if (toVisit == null) {
                curNode = curNode.getParent();
            }
        }
        curNode = toVisit;
    }
}

From source file:com.qcadoo.model.api.utils.EntityTreeUtilsService.java

/**
 * Return list of entities sorted in the same order as they appear on the tree
 * //from  ww w.j av a  2 s .com
 * @param tree
 *            entity tree containing entities to be listed
 * @return list of sorted entities
 * 
 * @since 1.1.5
 */
public List<Entity> getSortedEntities(final EntityTree tree) {
    List<Entity> nodesList = Lists.newLinkedList();
    if (tree.isEmpty()) {
        return nodesList;
    }
    return getSortedEntitiesFromNode(tree.getRoot());
}

From source file:bio.pih.genoogle.search.CollectionSearcher.java

@Override
public SearchResults call() {
    long begin = System.currentTimeMillis();

    int indexSearchers = databankCollection.size();

    ExecutorService subDatabanksExecutor = Executors.newFixedThreadPool(indexSearchers);
    CompletionService<IndexSearchResults> subDataBanksCS = new ExecutorCompletionService<IndexSearchResults>(
            subDatabanksExecutor);//from  w w w  .j  av  a2  s .  c o m

    ExecutorService queryExecutor = Executors.newFixedThreadPool(sp.getMaxThreadsIndexSearch());

    List<Throwable> fails = Lists.newLinkedList();
    fails = Collections.synchronizedList(fails);
    Iterator<AbstractSequenceDataBank> it = databankCollection.databanksIterator();
    while (it.hasNext()) {
        AbstractSequenceDataBank innerBank = it.next();
        final IndexBothStrandSearcher indexSearcher = new IndexBothStrandSearcher(id, sp,
                (IndexedSequenceDataBank) innerBank, queryExecutor, fails);
        subDataBanksCS.submit(indexSearcher);
    }

    IndexSearchResults indexSearchResults = null;
    try {
        ;
        for (int i = 0; i < indexSearchers; i++) {
            IndexSearchResults subResults = subDataBanksCS.take().get();
            if (subResults == null) {
                logger.error("Results from searcher " + i + " was empty.");
            } else {
                if (indexSearchResults == null) {
                    indexSearchResults = subResults;
                } else {
                    indexSearchResults.merge(subResults);
                }
            }
        }
    } catch (InterruptedException e) {
        sr.addFail(e);
        return sr;
    } catch (ExecutionException e) {
        sr.addFail(e);
        return sr;
    }

    queryExecutor.shutdown();
    subDatabanksExecutor.shutdown();

    if (fails.size() > 0) {
        sr.addAllFails(fails);
        return sr;
    }

    logger.info("DNAIndexBothStrandSearcher total Time of " + this.toString() + " "
            + (System.currentTimeMillis() - begin));

    long alignmentBegin = System.currentTimeMillis();

    ExecutorService alignerExecutor = Executors.newFixedThreadPool(sp.getMaxThreadsExtendAlign());

    int maxHits = sp.getMaxHitsResults() > 0 ? sp.getMaxHitsResults() : indexSearchResults.size();
    maxHits = Math.min(maxHits, indexSearchResults.size());

    CountDownLatch alignnmentsCountDown = new CountDownLatch(maxHits);

    try {
        for (int i = 0; i < maxHits; i++) {
            RetrievedSequenceAreas retrievedArea = indexSearchResults.get(i);
            SequenceAligner sequenceAligner = new SequenceAligner(alignnmentsCountDown,
                    indexSearchResults.getIndexSearchers(), retrievedArea, sr, databankCollection);
            alignerExecutor.submit(sequenceAligner);
        }
    } catch (IOException e) {
        sr.addFail(e);
        return sr;
    }

    try {
        alignnmentsCountDown.await();
    } catch (InterruptedException e) {
        sr.addFail(e);
        return sr;
    }

    alignerExecutor.shutdown();

    ListIterator<Hit> hitsIterator = sr.getHits().listIterator();
    while (hitsIterator.hasNext()) {
        Hit hit = hitsIterator.next();
        filterHSPs(hit.getHSPs());
        if (hit.getHSPs().isEmpty()) {
            hitsIterator.remove();
        } else {
            Collections.sort(hit.getHSPs(), HSP.COMPARATOR);
        }
    }

    Collections.sort(sr.getHits(), Hit.COMPARATOR);
    logger.info("Alignments total Time of " + this.toString() + " "
            + (System.currentTimeMillis() - alignmentBegin));
    logger.info("Total Time of " + this.toString() + " " + (System.currentTimeMillis() - begin));

    return sr;
}

From source file:edu.washington.cs.cupid.wizards.internal.DerivedCapability.java

public static List<DerivedCapability> derived(ICapability capability) {
    List<DerivedCapability> result = Lists.newLinkedList();
    for (IOutput<?> output : capability.getOutputs()) {
        result.add(new DerivedCapability(capability, output));
    }//from  w  w  w.ja  v a  2  s  .c  om
    return result;
}

From source file:org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.builder.DefaultPendingDependenciesVisitor.java

private void markNoLongerPending(PendingDependencies pendingDependencies) {
    if (pendingDependencies.hasPendingComponents()) {
        if (noLongerPending == null) {
            noLongerPending = Lists.newLinkedList();
        }//from  w  ww . j  a va 2  s . co  m
        noLongerPending.add(pendingDependencies);
    }
    pendingDependencies.increaseHardEdgeCount();
}

From source file:org.sonar.plugins.technicaldebt.TechnicalDebtDecorator.java

@DependsUpon
public List<Metric> dependsOnMetrics() {
    List<Metric> list = Lists.newLinkedList();
    for (AxisDebtCalculator axis : axisList) {
        list.addAll(axis.dependsOn());//w w  w  .j av  a  2 s  .com
    }
    return list;
}

From source file:org.apache.shindig.gadgets.parse.caja.CajaHtmlParser.java

@Override
protected Document parseDomImpl(String source) throws GadgetException {
    DocumentFragment fragment = parseFragmentImpl(source);

    // TODO: remove parseDomImpl() altogether; only have subclasses
    // support parseFragmentImpl() with base class cleaning up.
    Document document = fragment.getOwnerDocument();
    Node html = null;//from  w w  w.  j  a v  a  2s .c  o m
    LinkedList<Node> beforeHtml = Lists.newLinkedList();
    while (fragment.hasChildNodes()) {
        Node child = fragment.removeChild(fragment.getFirstChild());
        if (child.getNodeType() == Node.ELEMENT_NODE && "html".equalsIgnoreCase(child.getNodeName())) {
            if (html == null) {
                html = child;
            } else {
                // Ignore the current (duplicated) html node but add its children
                transferChildren(html, child);
            }
        } else if (html != null) {
            html.appendChild(child);
        } else {
            beforeHtml.add(child);
        }
    }

    if (html == null) {
        html = document.createElement("html");
    }

    prependToNode(html, beforeHtml);

    // Ensure document.getDocumentElement() is html node.
    document.appendChild(html);

    return document;
}

From source file:org.apache.hadoop.hive.ql.exec.spark.status.impl.JobMetricsListener.java

@Override
public synchronized void onTaskEnd(SparkListenerTaskEnd taskEnd) {
    int stageId = taskEnd.stageId();
    Integer jobId = stageIdToJobId.get(stageId);
    if (jobId == null) {
        LOG.warn("Can not find job id for stage[" + stageId + "].");
    } else {/* w w w .j  av a2 s . co  m*/
        Map<Integer, List<TaskMetrics>> jobMetrics = allJobMetrics.get(jobId);
        if (jobMetrics == null) {
            jobMetrics = Maps.newHashMap();
            allJobMetrics.put(jobId, jobMetrics);
        }
        List<TaskMetrics> stageMetrics = jobMetrics.get(stageId);
        if (stageMetrics == null) {
            stageMetrics = Lists.newLinkedList();
            jobMetrics.put(stageId, stageMetrics);
        }
        stageMetrics.add(taskEnd.taskMetrics());
    }
}