Example usage for com.google.common.collect Lists newArrayListWithExpectedSize

List of usage examples for com.google.common.collect Lists newArrayListWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayListWithExpectedSize.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayListWithExpectedSize(int estimatedSize) 

Source Link

Document

Creates an ArrayList instance to hold estimatedSize elements, plus an unspecified amount of padding; you almost certainly mean to call #newArrayListWithCapacity (see that method for further advice on usage).

Usage

From source file:org.apache.shindig.social.opensocial.service.JsonRpcServlet.java

protected void dispatchBatch(JSONArray batch, HttpServletRequest servletRequest,
        HttpServletResponse servletResponse, SecurityToken token) throws JSONException, IOException {
    // Use linked hash map to preserve order
    List<Future<?>> responses = Lists.newArrayListWithExpectedSize(batch.length());

    // Gather all Futures.  We do this up front so that
    // the first call to get() comes after all futures are created,
    // which allows for implementations that batch multiple Futures
    // into single requests.
    for (int i = 0; i < batch.length(); i++) {
        JSONObject batchObj = batch.getJSONObject(i);
        RpcRequestItem requestItem = new RpcRequestItem(batchObj, token, jsonConverter);
        responses.add(handleRequestItem(requestItem, servletRequest));
    }/*  w ww .  ja va  2 s.  c om*/

    // Resolve each Future into a response.
    // TODO: should use shared deadline across each request
    JSONArray result = new JSONArray();
    for (int i = 0; i < batch.length(); i++) {
        JSONObject batchObj = batch.getJSONObject(i);
        String key = null;
        if (batchObj.has("id")) {
            key = batchObj.getString("id");
        }
        result.put(getJSONResponse(key, getResponseItem(responses.get(i))));
    }
    servletResponse.getWriter().write(result.toString());
}

From source file:org.apache.distributedlog.impl.ZKLogMetadataStore.java

@Override
public CompletableFuture<Iterator<String>> getLogs(String logNamePrefix) {
    final CompletableFuture<Iterator<String>> promise = new CompletableFuture<Iterator<String>>();
    final String nsRootPath;
    if (StringUtils.isEmpty(logNamePrefix)) {
        nsRootPath = namespace.getPath();
    } else {//from   w w w .ja v a  2  s  . c  om
        nsRootPath = namespace.getPath() + "/" + logNamePrefix;
    }
    try {
        final ZooKeeper zk = zkc.get();
        zk.sync(nsRootPath, new AsyncCallback.VoidCallback() {
            @Override
            public void processResult(int syncRc, String syncPath, Object ctx) {
                if (KeeperException.Code.OK.intValue() == syncRc) {
                    zk.getChildren(nsRootPath, false, new AsyncCallback.Children2Callback() {
                        @Override
                        public void processResult(int rc, String path, Object ctx, List<String> children,
                                Stat stat) {
                            if (KeeperException.Code.OK.intValue() == rc) {
                                List<String> results = Lists.newArrayListWithExpectedSize(children.size());
                                for (String child : children) {
                                    if (!isReservedStreamName(child)) {
                                        results.add(child);
                                    }
                                }
                                promise.complete(results.iterator());
                            } else if (KeeperException.Code.NONODE.intValue() == rc) {
                                List<String> streams = Lists.newLinkedList();
                                promise.complete(streams.iterator());
                            } else {
                                promise.completeExceptionally(new ZKException(
                                        "Error reading namespace " + nsRootPath, KeeperException.Code.get(rc)));
                            }
                        }
                    }, null);
                } else if (KeeperException.Code.NONODE.intValue() == syncRc) {
                    List<String> streams = Lists.newLinkedList();
                    promise.complete(streams.iterator());
                } else {
                    promise.completeExceptionally(new ZKException("Error reading namespace " + nsRootPath,
                            KeeperException.Code.get(syncRc)));
                }
            }
        }, null);
        zkc.get();
    } catch (ZooKeeperClient.ZooKeeperConnectionException e) {
        promise.completeExceptionally(e);
    } catch (InterruptedException e) {
        Thread.currentThread().interrupt();
        promise.completeExceptionally(e);
    }
    return promise;
}

From source file:com.android.build.gradle.internal.LintGradleClient.java

/**
 * Given a list of results from separate variants, merge them into a single
 * list of warnings, and mark their//from ww  w .j  a  v a 2s  .c  o  m
 * @param warningMap a map from variant to corresponding warnings
 * @param project the project model
 * @return a merged list of issues
 */
@NonNull
public static List<Warning> merge(@NonNull Map<Variant, List<Warning>> warningMap,
        @NonNull AndroidProject project) {
    // Easy merge?
    if (warningMap.size() == 1) {
        return warningMap.values().iterator().next();
    }
    int maxCount = 0;
    for (List<Warning> warnings : warningMap.values()) {
        int size = warnings.size();
        maxCount = Math.max(size, maxCount);
    }
    if (maxCount == 0) {
        return Collections.emptyList();
    }

    int totalVariantCount = project.getVariants().size();

    List<Warning> merged = Lists.newArrayListWithExpectedSize(2 * maxCount);

    // Map fro issue to message to line number to file name to canonical warning
    Map<Issue, Map<String, Map<Integer, Map<String, Warning>>>> map = Maps
            .newHashMapWithExpectedSize(2 * maxCount);

    for (Map.Entry<Variant, List<Warning>> entry : warningMap.entrySet()) {
        Variant variant = entry.getKey();
        List<Warning> warnings = entry.getValue();
        for (Warning warning : warnings) {
            Map<String, Map<Integer, Map<String, Warning>>> messageMap = map.get(warning.issue);
            if (messageMap == null) {
                messageMap = Maps.newHashMap();
                map.put(warning.issue, messageMap);
            }
            Map<Integer, Map<String, Warning>> lineMap = messageMap.get(warning.message);
            if (lineMap == null) {
                lineMap = Maps.newHashMap();
                messageMap.put(warning.message, lineMap);
            }
            Map<String, Warning> fileMap = lineMap.get(warning.line);
            if (fileMap == null) {
                fileMap = Maps.newHashMap();
                lineMap.put(warning.line, fileMap);
            }
            String fileName = warning.file != null ? warning.file.getName() : "<unknown>";
            Warning canonical = fileMap.get(fileName);
            if (canonical == null) {
                canonical = warning;
                fileMap.put(fileName, canonical);
                canonical.variants = Sets.newHashSet();
                canonical.gradleProject = project;
                merged.add(canonical);
            }
            canonical.variants.add(variant);
        }
    }

    // Clear out variants on any nodes that define all
    for (Warning warning : merged) {
        if (warning.variants != null && warning.variants.size() == totalVariantCount) {
            // If this error is present in all variants, just clear it out
            warning.variants = null;
        }

    }

    Collections.sort(merged);
    return merged;
}

From source file:com.cloudera.science.ml.kmeans.core.KMeansEvaluation.java

private void init() {
    predictionStrengths = Lists.newArrayListWithExpectedSize(testCenters.size());
    trainCosts = Lists.newArrayListWithExpectedSize(testCenters.size());
    testCosts = Lists.newArrayListWithExpectedSize(testCenters.size());
    stableClusters = Lists.newArrayListWithExpectedSize(testCenters.size());
    stablePoints = Lists.newArrayListWithExpectedSize(testCenters.size());

    for (int i = 0; i < testCenters.size(); i++) {
        Centers test = testCenters.get(i);
        Centers train = trainCenters.get(i);
        double trainCost = 0.0;
        double testCost = 0.0;
        double[][] assignments = new double[test.size()][train.size()];
        double totalPoints = 0.0;
        for (Weighted<Vector> wv : testPoints) {
            double wt = wv.weight();
            totalPoints += wt;/* w w  w  . j  a v  a  2 s  . c o m*/
            Vector v = wv.thing();
            int testId = test.indexOfClosest(v);
            testCost += wt * v.getDistanceSquared(test.get(testId));
            int trainId = train.indexOfClosest(wv.thing());
            trainCost += wt * v.getDistanceSquared(train.get(trainId));
            assignments[testId][trainId] += wt;
        }
        trainCosts.add(trainCost);
        testCosts.add(testCost);

        double minScore = Double.POSITIVE_INFINITY;
        double points = 0;
        double clusters = 0;
        List<String> details = Lists.newArrayList();
        for (int j = 0; j < assignments.length; j++) {
            double[] assignment = assignments[j];
            double total = 0.0;
            double same = 0.0;
            for (double a : assignment) {
                total += a;
                same += a * (a - 1);
            }
            double score = total > 1 ? same / (total * (total - 1)) : 1.0;
            // Only consider clusters that contain a non-trivial number of obs
            if (total > assignment.length && score < minScore) {
                minScore = score;
            }
            if (score > 0.8) { // stability threshold
                clusters++;
                points += total;
            }
            if (detailsFile != null) {
                details.add(String.format("%d,%d,%d,%.4f", i, j, (int) total, score));
            }
        }
        predictionStrengths.add(minScore);
        stableClusters.add(clusters / assignments.length);
        stablePoints.add(points / totalPoints);
        if (detailsFile != null) {
            try {
                if (i == 0) {
                    Files.write("ClusteringId,CenterId,NumPoints,PredictionStrength\n", detailsFile,
                            Charsets.UTF_8);
                }
                Files.append(NEWLINE_JOINER.join(details) + '\n', detailsFile, Charsets.UTF_8);
            } catch (IOException e) {
                LOG.warn("Exception writing evaluation details file: {}", detailsFile, e);
            }
        }
    }
}

From source file:org.gradoop.flink.algorithms.fsm.canonicalization.CAMLabeler.java

@Override
public String label(Embedding embedding) {

    Map<Integer, String> vertices = embedding.getVertices();
    Map<Integer, FSMEdge> edges = embedding.getEdges();

    Map<Integer, Map<Integer, Set<Integer>>> adjacencyMatrix = createAdjacencyMatrix(vertices, edges);

    List<String> adjacencyListLabels = Lists.newArrayListWithCapacity(vertices.size());

    // for each vertex
    for (Map.Entry<Integer, String> vertex : vertices.entrySet()) {

        int vertexId = vertex.getKey();
        String adjacencyListLabel = vertex.getValue() + LIST_START;

        Map<Integer, Set<Integer>> adjacencyList = adjacencyMatrix.get(vertexId);

        List<String> entryLabels = Lists.newArrayListWithCapacity(adjacencyList.size());

        // for each adjacent vertex
        for (Map.Entry<Integer, Set<Integer>> entry : adjacencyList.entrySet()) {

            int adjacentVertexId = entry.getKey();
            String entryLabel = vertices.get(adjacentVertexId);

            // for each edge
            Set<Integer> incidentEdgeIds = entry.getValue();

            if (incidentEdgeIds.size() == 1) {
                FSMEdge incidentEdge = edges.get(incidentEdgeIds.iterator().next());

                entryLabel += format(incidentEdge, vertexId);

            } else {

                List<String> incidentEdges = Lists.newArrayListWithExpectedSize(incidentEdgeIds.size());

                for (int incidentEdgeId : incidentEdgeIds) {
                    incidentEdges.add(format(edges.get(incidentEdgeId), vertexId));
                }/*from  www  . j av  a 2 s  . c o  m*/

                Collections.sort(incidentEdges);
                entryLabel += StringUtils.join(incidentEdges, "");
            }

            entryLabels.add(entryLabel);
        }

        Collections.sort(entryLabels);
        adjacencyListLabel += StringUtils.join(entryLabels, NEW_ENTRY);
        adjacencyListLabels.add(adjacencyListLabel);
    }

    Collections.sort(adjacencyListLabels);
    return StringUtils.join(adjacencyListLabels, NEW_LIST);
}

From source file:org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.builder.RejectedModuleMessageBuilder.java

private static void renderReason(StringBuilder sb, SelectorState selector) {
    ComponentSelectionReasonInternal selectionReason = selector.getSelectionReason();
    if (selectionReason.hasCustomDescriptions()) {
        sb.append(" because of the following reason");
        List<String> reasons = Lists.newArrayListWithExpectedSize(1);
        for (ComponentSelectionDescriptor componentSelectionDescriptor : selectionReason.getDescriptions()) {
            ComponentSelectionDescriptorInternal next = (ComponentSelectionDescriptorInternal) componentSelectionDescriptor;
            if (next.hasCustomDescription()) {
                reasons.add(next.getDescription());
            }//  w ww .j  av a2s . c om
        }
        if (reasons.size() == 1) {
            sb.append(": ").append(reasons.get(0));
        } else {
            sb.append("s: ");
            Joiner.on(", ").appendTo(sb, reasons);
        }
    }
}

From source file:com.google.idea.blaze.base.lang.buildfile.lexer.BuildLexerBase.java

/**
 * Constructs a lexer which tokenizes the contents of the specified InputBuffer. Any errors during
 * lexing are reported on "handler"./*from  w ww .ja v a 2s.  c  om*/
 */
public BuildLexerBase(CharSequence input, int initialStackDepth, LexerMode mode) {
    this.buffer = input.toString().toCharArray();
    // Empirical measurements show roughly 1 token per 8 characters in buffer.
    this.tokens = Lists.newArrayListWithExpectedSize(buffer.length / 8);
    this.pos = 0;
    this.openParenStackDepth = initialStackDepth;
    this.mode = mode;

    indentStack.push(0);
    tokenize();
}

From source file:com.google.gitiles.LogServlet.java

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse res) throws IOException {
    GitilesView view = ViewFilter.getView(req);
    Repository repo = ServletUtils.getRepository(req);
    RevWalk walk = null;/*  w  w  w  . j av a 2  s  . c o m*/
    try {
        try {
            walk = newWalk(repo, view);
        } catch (IncorrectObjectTypeException e) {
            res.setStatus(SC_NOT_FOUND);
            return;
        }

        Optional<ObjectId> start = getStart(view.getParameters(), walk.getObjectReader());
        if (start == null) {
            res.setStatus(SC_NOT_FOUND);
            return;
        }

        Map<String, Object> data = Maps.newHashMapWithExpectedSize(5);

        if (!view.getRevision().nameIsId()) {
            List<Map<String, Object>> tags = Lists.newArrayListWithExpectedSize(1);
            for (RevObject o : RevisionServlet.listObjects(walk, view.getRevision().getId())) {
                if (o instanceof RevTag) {
                    tags.add(new TagSoyData(linkifier, req).toSoyData((RevTag) o));
                }
            }
            if (!tags.isEmpty()) {
                data.put("tags", tags);
            }
        }

        Paginator paginator = new Paginator(walk, limit, start.orNull());
        Map<AnyObjectId, Set<Ref>> refsById = repo.getAllRefsByPeeledObjectId();
        List<Map<String, Object>> entries = Lists.newArrayListWithCapacity(limit);
        for (RevCommit c : paginator) {
            entries.add(new CommitSoyData(null, req, repo, walk, view, refsById).toSoyData(c, KeySet.SHORTLOG));
        }

        String title = "Log - ";
        if (view.getOldRevision() != Revision.NULL) {
            title += view.getRevisionRange();
        } else {
            title += view.getRevision().getName();
        }

        data.put("title", title);
        data.put("entries", entries);
        ObjectId next = paginator.getNextStart();
        if (next != null) {
            data.put("nextUrl", copyAndCanonicalize(view).replaceParam(START_PARAM, next.name()).toUrl());
        }
        ObjectId prev = paginator.getPreviousStart();
        if (prev != null) {
            GitilesView.Builder prevView = copyAndCanonicalize(view);
            if (!prevView.getRevision().getId().equals(prev)) {
                prevView.replaceParam(START_PARAM, prev.name());
            }
            data.put("previousUrl", prevView.toUrl());
        }

        render(req, res, "gitiles.logDetail", data);
    } catch (RevWalkException e) {
        log.warn("Error in rev walk", e);
        res.setStatus(SC_INTERNAL_SERVER_ERROR);
        return;
    } finally {
        if (walk != null) {
            walk.release();
        }
    }
}

From source file:org.artifactory.webapp.wicket.page.importexport.repos.ExportRepoPanel.java

public ExportRepoPanel(String string) {
    super(string);
    Form exportForm = new SecureForm("exportForm");
    add(exportForm);/*from  w  w  w .j  a  v  a  2s . c  om*/

    IModel<String> sourceRepoModel = new PropertyModel<>(this, "sourceRepoKey");
    List<LocalRepoDescriptor> localRepos = repositoryService.getLocalAndCachedRepoDescriptors();
    Collections.sort(localRepos, new LocalRepoAlphaComparator());
    List<String> repoKeys = Lists.newArrayListWithExpectedSize(localRepos.size() + 1);
    //Add the "All" pseudo repository
    repoKeys.add(ImportExportReposPage.ALL_REPOS);
    for (LocalRepoDescriptor localRepo : localRepos) {
        String key = localRepo.getKey();
        repoKeys.add(key);
    }
    DropDownChoice sourceRepoDdc = new DropDownChoice<>("sourceRepo", sourceRepoModel, repoKeys);
    //Needed because get getDefaultChoice does not update the actual selection object
    sourceRepoDdc.setDefaultModelObject(ImportExportReposPage.ALL_REPOS);
    exportForm.add(sourceRepoDdc);

    PropertyModel<File> pathModel = new PropertyModel<>(this, "exportToPath");
    final PathAutoCompleteTextField exportToPathTf = new PathAutoCompleteTextField("exportToPath", pathModel);
    exportToPathTf.setMask(PathMask.FOLDERS);
    exportToPathTf.setRequired(true);
    exportForm.add(exportToPathTf);

    FileBrowserButton browserButton = new FileBrowserButton("browseButton", pathModel) {
        @Override
        protected void onOkClicked(AjaxRequestTarget target) {
            super.onOkClicked(target);
            target.add(exportToPathTf);
        }
    };
    browserButton.setMask(PathMask.FOLDERS);
    exportForm.add(browserButton);

    exportForm.add(new StyledCheckbox("m2Compatible", new PropertyModel<Boolean>(this, "m2Compatible")));
    exportForm.add(new HelpBubble("m2CompatibleHelp",
            "Includes Maven 2 repository metadata and checksum files as part of the export"));

    exportForm.add(new StyledCheckbox("excludeMetadata", new PropertyModel<Boolean>(this, "excludeMetadata")));
    exportForm.add(new HelpBubble("excludeMetadataHelp", "Excludes repositories metadata from the export.\n"
            + "(Maven 2 metadata is unaffected by this setting)"));

    StyledCheckbox verboseCheckbox = new StyledCheckbox("verbose", new PropertyModel<Boolean>(this, "verbose"));
    verboseCheckbox.setRequired(false);
    exportForm.add(verboseCheckbox);
    String systemLogsPage = WicketUtils.absoluteMountPathForPage(SystemLogsPage.class);
    exportForm.add(new HelpBubble("verboseHelp",
            "Lowers the log level to debug and redirects the output from the "
                    + "standard log to the import-export log."
                    + "\nHint: You can monitor the log in the <a href=\"" + systemLogsPage
                    + "\">'System Logs'</a> page."));

    TitledAjaxSubmitLink exportButton = new TitledAjaxSubmitLink("export", "Export", exportForm) {
        @Override
        protected void onSubmit(AjaxRequestTarget target, Form form) {
            try {
                Session.get().cleanupFeedbackMessages();
                //If we chose "All" run manual backup to dest dir, else export a single repo
                ImportExportStatusHolder status = new ImportExportStatusHolder();
                ExportSettingsImpl exportSettings = new ExportSettingsImpl(exportToPath, status);
                exportSettings.setIncludeMetadata(!excludeMetadata);
                exportSettings.setM2Compatible(m2Compatible);
                exportSettings.setVerbose(verbose);
                if (ImportExportReposPage.ALL_REPOS.equals(sourceRepoKey)) {
                    backupService.backupRepos(exportToPath, exportSettings);
                } else {
                    repositoryService.exportRepo(sourceRepoKey, exportSettings);
                }
                List<StatusEntry> warnings = status.getWarnings();
                if (!warnings.isEmpty()) {
                    String systemLogsPage = WicketUtils.absoluteMountPathForPage(SystemLogsPage.class);
                    warn(new UnescapedFeedbackMessage(warnings.size()
                            + " Warnings have been produces during the export. Please review the "
                            + "<a href=\"" + systemLogsPage + "\">log</a> for further information."));
                }
                if (status.isError()) {
                    String message = status.getStatusMsg();
                    Throwable exception = status.getException();
                    if (exception != null) {
                        message = exception.getMessage();
                    }
                    error("Failed to export from: " + sourceRepoKey + "' to '" + exportToPath + "'. Cause: "
                            + message);
                } else {
                    info("Successfully exported '" + sourceRepoKey + "' to '" + exportToPath + "'.");
                }
            } catch (Exception e) {
                String message = "Exception occurred during export: ";
                error(message + e.getMessage());
                log.error(message, e);
            }
            AjaxUtils.refreshFeedback(target);
            target.add(form);
        }
    };
    exportForm.add(exportButton);
    exportForm.add(new DefaultButtonBehavior(exportButton));
}

From source file:com.google.gerrit.lucene.OnlineReindexer.java

void activateIndex() {
    indexes.setSearchIndex(index);/*from w  w  w.  j  a  va  2s  .c  om*/
    log.info("Using schema version {}", version(index));
    try {
        index.markReady(true);
    } catch (IOException e) {
        log.warn("Error activating new schema version {}", version(index));
    }

    List<ChangeIndex> toRemove = Lists.newArrayListWithExpectedSize(1);
    for (ChangeIndex i : indexes.getWriteIndexes()) {
        if (version(i) != version(index)) {
            toRemove.add(i);
        }
    }
    for (ChangeIndex i : toRemove) {
        try {
            i.markReady(false);
            indexes.removeWriteIndex(version(i));
        } catch (IOException e) {
            log.warn("Error deactivating old schema version {}", version(i));
        }
    }
}