Example usage for com.google.common.collect Lists newLinkedList

List of usage examples for com.google.common.collect Lists newLinkedList

Introduction

In this page you can find the example usage for com.google.common.collect Lists newLinkedList.

Prototype

@GwtCompatible(serializable = true)
public static <E> LinkedList<E> newLinkedList() 

Source Link

Document

Creates a mutable, empty LinkedList instance (for Java 6 and earlier).

Usage

From source file:com.intelligentsia.dowsers.entity.meta.provider.MetaEntityProviders.java

/**
 * Build a {@link MetaEntityProvider} which aggregate for each directory
 * under root, a {@link MetaEntityProviderFileSystem} which select last
 * version.//  w  ww.j a  v  a2s. com
 * 
 * This permit to define meta entity from several extension with version
 * management.
 * 
 * @param reference
 *            {@link Reference}
 * @param root
 *            Root {@link File}
 * @param entityMapper
 * @param addAnalyzer
 *            if true add {@link MetaEntityProviderAnalyzer} in first
 *            {@link MetaEntityProvider}
 * @return {@link MetaEntityProvider} instance
 */
public static MetaEntityProvider newMetaEntityProvider(final Reference reference, final File root,
        final EntityMapper entityMapper, final boolean addAnalyzer) {
    // for each directory under root, build a file meta entity provider and
    // select last version under each
    final Collection<MetaEntityProvider> metaEntityProviders = Lists.newLinkedList();
    if (addAnalyzer) {
        metaEntityProviders.add(newMetaEntityProviderAnalyzer());
    }
    for (final File file : root.listFiles()) {
        if (file.isDirectory()) {
            metaEntityProviders.add(selectLastVersion(newMetaEntityProvider(file, entityMapper)));
        }
    }
    return newMetaEntityProvider(metaEntityProviders);
}

From source file:com.facebook.buck.util.hash.AppendingHasher.java

/**
 * Creates a new {@link AppendingHasher} backed by a sequence of {@code numHasher}
 * {@link Hasher}s created from the specified {@link HashFunction}.
 */// ww  w. j av  a 2 s .c  om
public AppendingHasher(HashFunction hashFunction, int numHashers) {
    Preconditions.checkNotNull(hashFunction);
    Preconditions.checkArgument(numHashers > 0);
    LinkedList<Hasher> hashers = Lists.newLinkedList();
    for (int i = 0; i < numHashers; ++i) {
        hashers.add(hashFunction.newHasher());
    }
    this.hashers = hashers;
}

From source file:cc.recommenders.evaluation.queries.PartialUsageQueryBuilder.java

@Override
public List<Query> createQueries(Usage usage) {

    // TODO re-implement this! (ad-hoc implementation)

    int numCalls = calcCountFor(usage.getReceiverCallsites().size());
    int numParams = calcCountFor(usage.getParameterCallsites().size());

    int iteration = 0;
    Set<Set<CallSite>> paths = Sets.newLinkedHashSet();
    while (paths.size() < numOfQueries && iteration++ < 100) {

        Set<CallSite> calls = getRandom(numCalls, usage.getReceiverCallsites());
        Set<CallSite> params = getRandom(numParams, usage.getParameterCallsites());

        Set<CallSite> path = mergeAndShuffle(calls, params);

        if (!paths.contains(path)) {
            paths.add(path);/* w w w  .j ava 2s .  c o m*/
        }
    }

    List<Query> qs = Lists.newLinkedList();
    for (Set<CallSite> path : paths) {
        Query query = createAsCopyFrom(usage);
        query.setAllCallsites(path);
        qs.add(query);
    }

    return qs;
}

From source file:com.textocat.textokit.tokenizer.simple.PostTokenizer.java

/**
 * {@inheritDoc}//  w ww.j a v a  2 s.c o m
 */
@Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
    CAS cas = jCas.getCas();
    mergedMap = Maps.newHashMap();
    wordType = jCas.getCasType(W.type);
    numType = jCas.getCasType(NUM.type);
    try {
        AnnotationIndex<Annotation> tokenBases = jCas.getAnnotationIndex(TokenBase.typeIndexID);
        // sequence of tokens that does not contain whitespace
        List<Token> curTokenSeq = Lists.newLinkedList();
        for (Annotation tokenBase : tokenBases) {
            if (tokenBase instanceof WhiteSpace) {
                handle(cas, ImmutableList.copyOf(curTokenSeq));
                curTokenSeq.clear();
            } else {
                // it's Token
                curTokenSeq.add((Token) tokenBase);
            }
        }
        // handle last seq
        handle(cas, ImmutableList.copyOf(curTokenSeq));
        curTokenSeq.clear();
        // index/unindex
        Set<String> mergedTokenStrings = Sets.newHashSet();
        for (Map.Entry<AnnotationFS, Collection<? extends AnnotationFS>> entry : mergedMap.entrySet()) {
            jCas.addFsToIndexes(entry.getKey());
            mergedTokenStrings.add(entry.getKey().getCoveredText());
            for (AnnotationFS anno : entry.getValue()) {
                jCas.removeFsFromIndexes(anno);
            }
        }
        getLogger().debug("Merged tokens: " + mergedTokenStrings);
    } finally {
        mergedMap.clear();
    }
}

From source file:com.google.wave.api.data.converter.v22.EventDataConverterV22.java

@Override
public WaveletData toWaveletData(Wavelet wavelet, Conversation conversation,
        EventMessageBundle eventMessageBundle) {
    WaveletData waveletData = super.toWaveletData(wavelet, conversation, eventMessageBundle);
    List<String> blipIds = Lists.newLinkedList();
    for (ConversationBlip conversationBlip : conversation.getRootThread().getBlips()) {
        blipIds.add(conversationBlip.getId());
    }/*  w w  w  .  j  a  va 2 s  . c  o  m*/
    waveletData.setRootThread(new BlipThread("", -1, blipIds, null));
    return waveletData;
}

From source file:org.sosy_lab.cpachecker.cpa.wp.segkro.ExtractNewPreds.java

public List<BooleanFormula> extractNewPreds(BooleanFormula pInputFormula) {
    List<BooleanFormula> result = Lists.newArrayList();

    List<BooleanFormula> l = Lists.newArrayList();
    LinkedList<BooleanFormula> lPrime = Lists.newLinkedList();

    // Start with the list of basic predicates
    //  (extracted from the formula
    Set<BooleanFormula> sb = extractAtoms(pInputFormula);

    lPrime.addAll(sb);//from   www. j  a  v a 2  s.  c  o  m

    // Keep applying the rules until no new predicates get produced
    do {
        l.clear();
        l.addAll(lPrime);

        for (Rule r : rules) {
            // We have to iterate over a tuple that is element of l^k.

            int k = 1; // r.getPremises().size();
            if (k == 3 - 2) {
                throw new UnsupportedOperationException("Fixme");
            }

            List<List<BooleanFormula>> dimensions = new ArrayList<>(k);

            for (int i = 0; i < k; i++) {
                dimensions.add(l);
            }

            for (List<BooleanFormula> tuple : Cartesian.product(dimensions)) {
                boolean existsTnotInSb = false;

                for (BooleanFormula t : tuple) {
                    if (!sb.contains(t)) {
                        existsTnotInSb = true;
                    }

                    boolean isElimOrEq = r instanceof EliminationRule || r instanceof EquivalenceRule;

                    if (!isElimOrEq || existsTnotInSb) {
                        // Store predicates according to their priority
                        // "in a position of the list that is beyond the positions of the associated antecedents"
                        Set<BooleanFormula> s = r.apply(t);
                        List<Integer> positions = Lists.newArrayList();

                        for (int j = 0; j < k; j++) {
                            if (equalFormula(l.get(j), tuple.get(j))) {
                                positions.add(j); // TODO: This might be wrong
                            }
                        }
                        // TODO
                        int pos = ordering.max(positions);
                        for (BooleanFormula p : s) {
                            if (!lPrime.contains(p)) {
                                // insert p after position pos in lPrime
                                lPrime.add(pos + 1, p);
                            }
                        }
                    }
                }
            }
        }

    } while (l.equals(lPrime)); // TODO: Does this compare what was intended?

    // Store new predicates according to their priority
    return result;
}

From source file:ru.frostman.web.plugin.Plugin.java

public List<MethodInterceptor> getMethodInterceptors() {
    return Lists.newLinkedList();
}

From source file:com.github.autermann.yaml.nodes.YamlPairsNode.java

/**
 * Creates a new {@link YamlPairsNode}./* w  w  w.j  av  a2  s  .  c  om*/
 *
 * @param factory the factory to create children with
 */
public YamlPairsNode(YamlNodeFactory factory) {
    super(factory);
    LinkedHashMap<YamlNode, Collection<YamlNode>> map = Maps.newLinkedHashMap();
    Supplier<List<YamlNode>> supplier = LinkedListSupplier.instance();
    this.multiMap = Multimaps.newListMultimap(map, supplier);
    this.value = Lists.newLinkedList();
}

From source file:org.richfaces.cdk.templatecompiler.statements.RenderFragmentStatement.java

public List<String> getArguments() {
    if (arguments == null) {
        arguments = Lists.newLinkedList();

        final Fragment fragment = fragmentStore.getFragment(methodName);

        for (Argument argument : fragment.getAllArguments()) {
            String argumentValue = getAttributeValue(argument);
            arguments.add(argumentValue);
        }//from w  ww.  j a  va 2 s  .  c  o  m
    }

    return arguments;
}

From source file:org.apache.tez.log.analyzer.TezGroupAnalyzer.java

@Override
public Object getResult() {
    List<String> result = Lists.newLinkedList();
    result.addAll(interestedLines);//w ww.j a  v  a 2 s.c o  m
    result.addAll(cantDoAnythingList);
    return result;
}