Example usage for com.google.common.collect Lists newArrayListWithCapacity

List of usage examples for com.google.common.collect Lists newArrayListWithCapacity

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayListWithCapacity.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayListWithCapacity(int initialArraySize) 

Source Link

Document

Creates an ArrayList instance backed by an array with the specified initial size; simply delegates to ArrayList#ArrayList(int) .

Usage

From source file:net.derquinse.bocas.ZipBocas.java

/**
 * Puts a loaded zip file into the repository.
 * @return A map from the zip entry names to their keys.
 * @throws BocasException if an error occurs.
 */// ww w.jav  a  2  s. c o  m
public Map<String, ByteString> putZip(LoadedZipFile data) {
    if (data == null || data.isEmpty()) {
        return ImmutableMap.of();
    }
    final int n = data.size();
    final List<String> names = Lists.newArrayListWithCapacity(n);
    final List<MemoryByteSource> values = Lists.newArrayListWithCapacity(n);
    for (Entry<String, MemoryByteSource> d : data.entrySet()) {
        names.add(d.getKey());
        values.add(d.getValue());
    }
    List<ByteString> keys = putAll(values);
    if (keys.size() != n) {
        throw new IllegalStateException("Invalid number of keys");
    }
    final ImmutableMap.Builder<String, ByteString> builder = ImmutableMap.builder();
    for (int i = 0; i < n; i++) {
        builder.put(names.get(i), keys.get(i));
    }
    return builder.build();
}

From source file:org.elasticsearch.search.fetch.matchedfilters.MatchedFiltersFetchSubPhase.java

@Override
public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
    List<String> matchedFilters = Lists.newArrayListWithCapacity(2);
    for (Map.Entry<String, Filter> entry : context.parsedQuery().namedFilters().entrySet()) {
        String name = entry.getKey();
        Filter filter = entry.getValue();
        try {//from   w w w  .ja v  a  2  s .c  om
            DocIdSet docIdSet = filter.getDocIdSet(hitContext.readerContext(), null); // null is fine, since we filter by hitContext.docId()
            if (docIdSet != null) {
                Bits docSet = DocIdSets.toSafeBits(hitContext.reader(), docIdSet);
                if (docSet.get(hitContext.docId())) {
                    matchedFilters.add(name);
                }
            }
        } catch (IOException e) {
            // ignore
        }
    }
    hitContext.hit().matchedFilters(matchedFilters.toArray(new String[matchedFilters.size()]));
}

From source file:org.apache.phoenix.mapreduce.index.PhoenixIndexDBWritable.java

@Override
public void readFields(ResultSet resultSet) throws SQLException {
    // we do this once per mapper.
    if (columnCount == -1) {
        this.columnCount = resultSet.getMetaData().getColumnCount();
    }//from   w ww  .  ja  v a 2  s .com

    values = Lists.newArrayListWithCapacity(columnCount);
    for (int i = 0; i < columnCount; i++) {
        Object value = resultSet.getObject(i + 1);
        values.add(value);
    }

}

From source file:com.twitter.graphjet.algorithms.counting.tweet.TopSecondDegreeByCountTweetRecsGenerator.java

/**
 * Return tweet recommendations./*from  w  w w. ja  v a 2  s  . c o m*/
 *
 * @param request       topSecondDegreeByCount request
 * @param nodeInfoList  a list of node info containing engagement social proof and weights
 * @return a list of tweet recommendations
 */
public static List<RecommendationInfo> generateTweetRecs(TopSecondDegreeByCountRequestForTweet request,
        List<NodeInfo> nodeInfoList) {
    int maxNumResults = GeneratorHelper.getMaxNumResults(request, RecommendationType.TWEET);
    int minUserSocialProofSize = GeneratorHelper.getMinUserSocialProofSize(request, RecommendationType.TWEET);
    byte[] validSocialProofs = request.getSocialProofTypes();

    PriorityQueue<NodeInfo> topResults = new PriorityQueue<NodeInfo>(maxNumResults);

    // handling specific rules of tweet recommendations
    for (NodeInfo nodeInfo : nodeInfoList) {
        // do not return if size of each social proof is less than minUserSocialProofSize.
        if (isLessThanMinUserSocialProofSize(nodeInfo.getSocialProofs(), validSocialProofs,
                minUserSocialProofSize) &&
        // do not return if size of each social proof union is less than minUserSocialProofSize.
                isLessThanMinUserSocialProofSizeCombined(nodeInfo.getSocialProofs(), minUserSocialProofSize,
                        request.getSocialProofTypeUnions())) {
            continue;
        }
        GeneratorHelper.addResultToPriorityQueue(topResults, nodeInfo, maxNumResults);
    }

    List<RecommendationInfo> outputResults = Lists.newArrayListWithCapacity(topResults.size());
    while (!topResults.isEmpty()) {
        NodeInfo nodeInfo = topResults.poll();
        outputResults.add(new TweetRecommendationInfo(TWEET_ID_MASK.restore(nodeInfo.getValue()),
                nodeInfo.getWeight(), GeneratorHelper.pickTopSocialProofs(nodeInfo.getSocialProofs(),
                        request.getMaxUserSocialProofSize())));
    }
    Collections.reverse(outputResults);

    return outputResults;
}

From source file:org.elasticsearch.search.facets.FacetsPhase.java

@Override
public void execute(SearchContext context) throws ElasticSearchException {
    if (context.facets() == null) {
        return;//  w  ww. ja v  a2  s.c om
    }
    if (context.queryResult().facets() != null) {
        // no need to compute the facets twice, they should be computed on a per conext basis
        return;
    }

    SearchContextFacets contextFacets = context.facets();

    List<Facet> facets = Lists.newArrayListWithCapacity(2);
    if (contextFacets.queryFacets() != null) {
        for (SearchContextFacets.QueryFacet queryFacet : contextFacets.queryFacets()) {
            if (queryFacet.global()) {
                try {
                    Query globalQuery = new ConstantScoreQuery(
                            context.filterCache().cache(new QueryWrapperFilter(queryFacet.query())));
                    long count = Lucene.count(context.searcher(), globalQuery, -1.0f);
                    facets.add(new CountFacet(queryFacet.name(), count));
                } catch (Exception e) {
                    throw new FacetPhaseExecutionException(queryFacet.name(),
                            "Failed to execute global facet [" + queryFacet.query() + "]", e);
                }
            } else {
                Filter facetFilter = new QueryWrapperFilter(queryFacet.query());
                facetFilter = context.filterCache().cache(facetFilter);
                long count;
                // if we already have the doc id set, then use idset since its faster
                if (context.searcher().docIdSet() != null
                        || contextFacets.queryType() == SearchContextFacets.QueryExecutionType.IDSET) {
                    count = executeQueryIdSetCount(context, queryFacet, facetFilter);
                } else if (contextFacets.queryType() == SearchContextFacets.QueryExecutionType.COLLECT) {
                    count = executeQueryCollectorCount(context, queryFacet, facetFilter);
                } else {
                    throw new ElasticSearchIllegalStateException(
                            "No matching for type [" + contextFacets.queryType() + "]");
                }
                facets.add(new CountFacet(queryFacet.name(), count));
            }
        }
    }

    context.queryResult().facets(new Facets(facets));
}

From source file:com.cloudera.cdk.data.filesystem.PathIterator.java

private boolean advance() {
    while (true) {
        if (directories.hasNext()) {
            final Path directory = directories.next();
            try {
                final FileStatus[] stats = fs.listStatus(directory, PathFilters.notHidden());
                final List<Path> nextFileSet = Lists.newArrayListWithCapacity(stats.length);
                for (FileStatus stat : stats) {
                    if (stat.isFile()) {
                        nextFileSet.add(stat.getPath());
                    }/*from ww  w . j a v  a 2  s. c om*/
                }
                if (nextFileSet.size() > 0) {
                    this.files = nextFileSet.iterator();
                    return true;
                }
            } catch (IOException ex) {
                throw new DatasetException("Cannot list files in " + directory, ex);
            }
        } else {
            return false;
        }
    }
}

From source file:org.jetbrains.jet.lang.resolve.java.resolver.JavaAnnotationResolver.java

@NotNull
public List<AnnotationDescriptor> resolveAnnotations(@NotNull PsiModifierListOwner owner,
        @NotNull PostponedTasks tasks) {
    PsiAnnotation[] psiAnnotations = getAllAnnotations(owner);
    List<AnnotationDescriptor> r = Lists.newArrayListWithCapacity(psiAnnotations.length);
    for (PsiAnnotation psiAnnotation : psiAnnotations) {
        AnnotationDescriptor annotation = resolveAnnotation(psiAnnotation, tasks);
        if (annotation != null) {
            r.add(annotation);/*w  ww.  j av  a2  s. co m*/
        }
    }
    return r;
}

From source file:com.google.template.soy.soytree.PrintDirectiveNode.java

/**
 * Copy constructor./*from  w  w w .  j  a v a2 s.c o  m*/
 * @param orig The node to copy.
 */
private PrintDirectiveNode(PrintDirectiveNode orig, CopyState copyState) {
    super(orig, copyState);
    this.name = orig.name;
    this.argsText = orig.argsText;
    List<ExprRootNode> tempArgs = Lists.newArrayListWithCapacity(orig.args.size());
    for (ExprRootNode origArg : orig.args) {
        tempArgs.add(origArg.copy(copyState));
    }
    this.args = ImmutableList.copyOf(tempArgs);
}

From source file:at.molindo.notify.dao.dummy.DummyNotificationDAO.java

@Override
public List<Notification> getRecent(String userId, Set<Type> types, int first, int count) {
    if (DummyUtils.USER_ID.equals(userId) && types.contains(Type.PRIVATE) && first == 0 && count > 0) {
        return Lists.newArrayList(getNext());
    } else {/* ww  w  .  ja va2  s. com*/
        return Lists.newArrayListWithCapacity(0);
    }
}

From source file:org.terasology.entitySystem.metadata.extension.EntityRefTypeHandler.java

public List<EntityRef> deserializeList(EntityData.Value value) {
    List<EntityRef> result = Lists.newArrayListWithCapacity(value.getIntegerCount());
    for (Integer item : value.getIntegerList()) {
        result.add(entityManager.createEntityRefWithId(item));
    }/*from   ww w.  jav  a2  s .  c  o m*/
    return result;
}