Example usage for com.google.common.collect Lists newLinkedList

List of usage examples for com.google.common.collect Lists newLinkedList

Introduction

In this page you can find the example usage for com.google.common.collect Lists newLinkedList.

Prototype

@GwtCompatible(serializable = true)
public static <E> LinkedList<E> newLinkedList() 

Source Link

Document

Creates a mutable, empty LinkedList instance (for Java 6 and earlier).

Usage

From source file:net.oneandone.maven.plugins.cycles.graph.FeedbackArcSet.java

private static <V, E> Ordering<V> vertexOrdering(DirectedGraph<V, E> graph,
        VertexEvaluator<V, DirectedGraph<V, E>> evaluator) {
    List<V> front = Lists.newLinkedList();
    List<V> back = Lists.newLinkedList();

    // FIXME: there must be an easier way to clone a graph
    DirectedGraph<V, E> g = FilterUtils.createInducedSubgraph(graph.getVertices(), graph);

    while (!g.getVertices().isEmpty()) {
        collectSinks(g, back);//from  w  w  w.  j  a v  a  2s  .com
        collectSources(g, front);
        collectMaxDelta(g, front, evaluator);
    }

    return Ordering.explicit(Lists.newArrayList(Iterables.concat(front, back)));
}

From source file:com.jayway.jaxrs.hateoas.support.ReflectionUtils.java

public static Collection<Field> getFieldsHierarchical(Class clazz) {
    Collection<Field> fields = Lists.newLinkedList();

    for (Field field : clazz.getDeclaredFields()) {
        fields.add(field);//from ww  w.j  av a  2s.com
    }
    Class superClass = clazz.getSuperclass();

    if (!clazz.getSuperclass().equals(Object.class)) {
        fields.addAll(getFieldsHierarchical(clazz.getSuperclass()));
    }
    return fields;
}

From source file:org.splevo.ui.sourceconnection.jdt.VariantLinkQuickFixGenerator.java

/**
 * Build the link quick fixes to the alternatives of a variant.
 *
 * @param currentVariant//w  w  w.j  a  v a2 s .c om
 *            The variant to get the alternatives for.
 * @return The array of quick links.
 */
private IMarkerResolution[] buildAlternativeVariantLinks(Variant currentVariant) {

    List<IMarkerResolution> resolutions = Lists.newLinkedList();
    for (Variant alternativeVariant : currentVariant.getVariationPoint().getVariants()) {
        if (alternativeVariant != currentVariant) {
            resolutions.add(new VartiantLinkQuickFix(alternativeVariant));
        }
    }

    return resolutions.toArray(new IMarkerResolution[resolutions.size()]);
}

From source file:com.facebook.buck.graph.AcyclicDepthFirstPostOrderTraversal.java

/**
 * Performs a depth-first, post-order traversal over a DAG.
 * @param initialNodes The nodes from which to perform the traversal. Not allowed to contain
 *     {@code null}.//from  w  ww  . j a v  a 2  s .  c  om
 * @throws CycleException if a cycle is found while performing the traversal.
 */
@SuppressWarnings("PMD.PrematureDeclaration")
public Iterable<T> traverse(Iterable<? extends T> initialNodes) throws CycleException {
    // This corresponds to the current chain of nodes being explored. Enforcing this invariant makes
    // this data structure useful for debugging.
    Deque<Explorable> toExplore = Lists.newLinkedList();
    for (T node : initialNodes) {
        toExplore.add(new Explorable(node));
    }

    Set<T> inProgress = Sets.newHashSet();
    LinkedHashSet<T> explored = Sets.newLinkedHashSet();

    while (!toExplore.isEmpty()) {
        Explorable explorable = toExplore.peek();
        T node = explorable.node;

        // This could happen if one of the initial nodes is a dependency of the other, for example.
        if (explored.contains(node)) {
            toExplore.removeFirst();
            continue;
        }

        inProgress.add(node);

        // Find children that need to be explored to add to the stack.
        int stackSize = toExplore.size();
        for (Iterator<T> iter = explorable.children; iter.hasNext();) {
            T child = iter.next();
            if (inProgress.contains(child)) {
                throw createCycleException(child, toExplore);
            } else if (!explored.contains(child)) {
                toExplore.addFirst(new Explorable(child));

                // Without this break statement:
                // (1) Children will be explored in reverse order instead of the specified order.
                // (2) CycleException may contain extra nodes.
                // Comment out the break statement and run the unit test to verify this for yourself.
                break;
            }
        }

        if (stackSize == toExplore.size()) {
            // Nothing was added to toExplore, so the current node can be popped off the stack and
            // marked as explored.
            toExplore.removeFirst();
            inProgress.remove(node);
            explored.add(node);
        }
    }

    Preconditions.checkState(inProgress.isEmpty(), "No more nodes should be in progress.");

    return Iterables.unmodifiableIterable(explored);
}

From source file:org.sonar.plugins.gosu.cobertura.CoberturaReportParser.java

private static List<String> collectSourceDirs(SMInputCursor source) throws XMLStreamException {
    List<String> directories = Lists.newLinkedList();
    while (source.getNext() != null) {
        String sourceDir = cleanSourceDir(source.getElemStringValue());
        if (StringUtils.isNotBlank(sourceDir)) {
            directories.add(sourceDir);//from w w  w .j a v a  2s  .  co  m
        }
    }
    return directories;
}

From source file:exec.validate_evaluation.stats.UsageToMicroCommitRatioCalculator.java

public void run() throws IOException {

    System.out.println("\nreading all available MicroCommits...");
    for (String zip : io.findZips()) {
        for (MicroCommit mc : io.read(zip)) {
            List<MicroCommit> mcs = allMicroCommits.get(mc.getType());
            if (mcs == null) {
                mcs = Lists.newLinkedList();
                allMicroCommits.put(mc.getType(), mcs);
            }/*from   ww  w .  j  a va2 s. c  om*/
            mcs.add(mc);
        }
    }

    Map<String, Double> usageToHistoryRatio = Maps.newLinkedHashMap();

    int numTypesTotal = 0;
    int numCommitsTotal = 0;
    int numUsagesTotal = 0;

    int numTypesDATEV = 0;
    int numCommitsDATEV = 0;
    int numUsagesDATEV = 0;

    int numTypesWith = 0;
    int numCommitsWith = 0;
    int numUsagesWith = 0;
    int numTypesWithout = 0;
    int numCommitsWithout = 0;
    int numUsagesWithout = 0;

    for (ICoReTypeName t : allMicroCommits.keySet()) {

        List<MicroCommit> commits = allMicroCommits.get(t);
        List<Usage> usages = dirUsages.readAllZips(t, Usage.class);

        int numCommits = commits.size();
        int numUsages = usages.size();
        System.out.printf("%s: %d commits, %d usages\n", t, numCommits, numUsages);

        // if (numUsages > 0 && !isDatev(t)) {
        if (!isDatev(t)) {
            double ratio = (0.000001 + numUsages) / (1.0 * numCommits);
            String key = String.format("%s (%d/%d)", t, numUsages, numCommits);
            usageToHistoryRatio.put(key, ratio);
        }

        numTypesTotal++;
        numCommitsTotal += numCommits;
        numUsagesTotal += numUsages;

        if (numCommits > 0 && numUsages > 0) {
            numTypesWith++;
            numCommitsWith += numCommits;
            numUsagesWith += numUsages;
        } else {
            numTypesWithout++;
            numCommitsWithout += numCommits;
            numUsagesWithout += numUsages;

            if (isDatev(t)) {
                numTypesDATEV++;
                numCommitsDATEV += numCommits;
                numUsagesDATEV += numUsages;
            }
        }
    }

    System.out.printf("\n\nsummary:\n");
    System.out.printf("we have a total of %d commits and %d usages for %d different types\n", numCommitsTotal,
            numUsagesTotal, numTypesTotal);
    System.out.printf("currently, we have both commits and usages for %d types (%d commits, %d usages)\n",
            numTypesWith, numCommitsWith, numUsagesWith);
    System.out.printf("we have commits, but no usages for %d types (%d commits, %d usages)\n", numTypesWithout,
            numCommitsWithout, numUsagesWithout);
    System.out.printf("out of these, %d types (%d commits, %d usages) are related to DATEV\n", numTypesDATEV,
            numCommitsDATEV, numUsagesDATEV);

    System.out.printf("\n\nratios (usages/histories):\n");
    Map<String, Double> sortedRatios = MapSorter.sort(usageToHistoryRatio);
    for (String key : sortedRatios.keySet()) {
        double ratio = sortedRatios.get(key);
        System.out.printf("%3.2f - %s\n", ratio, key);
    }

}

From source file:org.apache.shindig.gadgets.rewrite.ConcatLinkRewriter.java

public List<Uri> rewrite(String mimeType, LinkedHashSet<Uri> uris) {
    String concatBase = getConcatBase(gadgetUri, rewriterFeature, mimeType, container);
    List<Uri> concatUris = Lists.newLinkedList();
    int paramIndex = 1;
    StringBuilder builder = null;
    int maxUriLen = MAX_URL_LENGTH + concatBase.length();
    try {//from   w w w.ja  va  2s  . c o m
        int uriIx = 0;
        //
        for (Uri uri : uris) {
            String uriStr = uri.toString();
            if (builder != null && builder.length() + uriStr.length() > maxUriLen) {
                // The next one will go over limit
                concatUris.add(Uri.parse(builder.toString()));
                builder = null;
                paramIndex = 1;

                // If the current uri is too long, simply don't rewrite, since
                // concat is for developers benefit
                if (uriStr.length() > MAX_URL_LENGTH) {
                    concatUris.add(uri);
                    continue;
                }
            }

            if (paramIndex == 1) {
                builder = new StringBuilder(concatBase);
                if (debug)
                    builder.append("debug=1&");
                if (ignoreCache)
                    builder.append("nocache=1&");
                if (rewriterFeature.getExpires() != null) {
                    builder.append(ProxyBase.REFRESH_PARAM).append('=')
                            .append(rewriterFeature.getExpires().toString()).append('&');
                }
            } else {
                builder.append('&');
            }
            builder.append(paramIndex).append('=').append(URLEncoder.encode(uriStr, "UTF-8"));
            ++paramIndex;
            ++uriIx;
        }
        if (builder != null)
            concatUris.add(Uri.parse(builder.toString()));
    } catch (UnsupportedEncodingException e) {
        throw new RuntimeException(e);
    }
    return concatUris;
}

From source file:org.locationtech.geogig.web.api.commands.BranchWebOp.java

/**
 * Runs the command and builds the appropriate response
 * /*from w w  w . j a v  a  2 s  .  c o m*/
 * @param context - the context to use for this command
 */
@Override
public void run(CommandContext context) {
    if (list) {
        final Context geogig = this.getCommandLocator(context);
        final List<Ref> localBranches = geogig.command(BranchListOp.class).call();
        final List<Ref> remoteBranches;
        if (remotes) {
            remoteBranches = geogig.command(BranchListOp.class).setLocal(false).setRemotes(remotes).call();
        } else {
            remoteBranches = Lists.newLinkedList();
        }
        context.setResponseContent(new CommandResponse() {
            @Override
            public void write(ResponseWriter out) throws Exception {
                out.start();
                out.writeBranchListResponse(localBranches, remoteBranches);
                out.finish();
            }
        });
    }
}

From source file:org.apache.streams.jackson.TypeConverterProcessor.java

@Override
public List<StreamsDatum> process(StreamsDatum entry) {
    List<StreamsDatum> result = Lists.newLinkedList();
    Object inDoc = entry.getDocument();
    ObjectNode node = null;/*from w  ww . j av  a  2s.  c  om*/
    if (inClass == String.class || inDoc instanceof String) {
        try {
            node = this.mapper.readValue((String) entry.getDocument(), ObjectNode.class);
        } catch (IOException e) {
            e.printStackTrace();
        }
    } else {
        node = this.mapper.convertValue(inDoc, ObjectNode.class);
    }

    if (node != null) {
        Object outDoc;
        try {
            if (outClass == String.class)
                outDoc = this.mapper.writeValueAsString(node);
            else
                outDoc = this.mapper.convertValue(node, outClass);

            StreamsDatum outDatum = new StreamsDatum(outDoc, entry.getId(), entry.getTimestamp(),
                    entry.getSequenceid());
            outDatum.setMetadata(entry.getMetadata());
            result.add(outDatum);
        } catch (Throwable e) {
            LOGGER.warn(e.getMessage());
            LOGGER.warn(node.toString());
        }
    }

    return result;
}

From source file:com.cloudera.branchreduce.impl.local.MultiThreadedBranchReduceEngine.java

@Override
public <T extends Writable, G extends GlobalState<G>> BranchReduceContext<T, G> run(BranchReduceJob<T, G> job) {
    final Processor<T, G> processor = job.constructProcessor();
    final Context<T, G> ctxt = new Context<T, G>(job.getConfiguration(), job.constructGlobalState());
    processor.initialize(ctxt);/*from www  .j ava  2  s  . c o  m*/

    // Get the initial set of tasks.
    processor.execute(job.constructInitialTask(), ctxt);

    List<Future<?>> inFlight = Lists.newLinkedList();
    do {
        // Start more tasks when there is work to do.
        while (!ctxt.isTaskQueueEmpty()) {
            inFlight.add(executor.submit(new Runnable() {
                @Override
                public void run() {
                    processor.execute(ctxt.take(), ctxt);
                }
            }));
        }

        // Wait a second.
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {

        }

        // Check on everyone's status.
        Iterator<Future<?>> iter = inFlight.iterator();
        while (iter.hasNext()) {
            Future<?> f = iter.next();
            if (f.isDone()) {
                iter.remove();
            }
        }

    } while (!inFlight.isEmpty() && !ctxt.isTaskQueueEmpty());

    processor.cleanup(ctxt);
    return ctxt;
}