Example usage for com.google.common.collect Lists partition

List of usage examples for com.google.common.collect Lists partition

Introduction

In this page you can find the example usage for com.google.common.collect Lists partition.

Prototype

public static <T> List<List<T>> partition(List<T> list, int size) 

Source Link

Document

Returns consecutive List#subList(int,int) sublists of a list, each of the same size (the final list may be smaller).

Usage

From source file:org.sonar.css.checks.RuleDescriptionsGenerator.java

private String generateHtmlLessFunctionTable(List<StandardFunction> standardFunctions) {
    StringBuilder html = new StringBuilder("<table style=\"border: 0;\">\n");
    List<List<StandardFunction>> subLists = Lists.partition(standardFunctions, 3);
    for (List<StandardFunction> subList : subLists) {
        html.append("<tr>");
        for (StandardFunction standardCssFunction : subList) {
            List<String> links = standardCssFunction.getLinks().stream().filter(f -> f.contains("lesscss.org"))
                    .collect(Collectors.toList());
            html.append("<td style=\"border: 0; \">");
            if (!links.isEmpty()) {
                html.append("<a target=\"_blank\" href=\"").append(links.get(0)).append("\">");
            }//from   w w  w . jav  a2 s  .  c  o  m
            html.append("<code>").append(standardCssFunction.getName()).append("</code>");
            if (!links.isEmpty()) {
                html.append("</a>");
            }
            html.append("</code>");
            for (int i = 1; i < links.size(); i++) {
                html.append("&nbsp;&nbsp;<a target=\"_blank\" href=\"").append(links.get(i)).append("\">#")
                        .append(i + 1).append("</a>");
            }
            html.append("</td>\n");
        }
        html.append("</tr>");
    }
    html.append("</table>\n");
    return html.toString();
}

From source file:org.openscada.ae.server.net.ServerConnectionHandler.java

public void dataChangedEvents(final String poolId, final List<Event> addedEvents) {
    for (final List<Event> chunk : Lists.partition(addedEvents, getChunkSize())) {
        final Message message = new Message(Messages.CC_EVENT_POOL_DATA);

        message.getValues().put(MESSAGE_QUERY_ID, new StringValue(poolId));
        message.getValues().put("events", EventMessageHelper.toValue(chunk));

        this.messenger.sendMessage(message);
    }// w w w  .  j a va 2 s  .  c o m
}

From source file:com.palantir.atlasdb.keyvalue.cassandra.CassandraKeyValueService.java

private Map<Cell, Value> getRowsForSingleHost(final InetAddress host, final String tableName,
        final List<byte[]> rows, final long startTs) {
    try {//  ww  w.ja va2  s. c  o m
        int rowCount = 0;
        final Map<Cell, Value> result = Maps.newHashMap();
        int fetchBatchCount = configManager.getConfig().fetchBatchCount();
        for (final List<byte[]> batch : Lists.partition(rows, fetchBatchCount)) {
            rowCount += batch.size();
            result.putAll(clientPool.runWithPooledResourceOnHost(host,
                    new FunctionCheckedException<Client, Map<Cell, Value>, Exception>() {
                        @Override
                        public Map<Cell, Value> apply(Client client) throws Exception {
                            // We want to get all the columns in the row so set start and end to empty.
                            SliceRange slice = new SliceRange(ByteBuffer.wrap(PtBytes.EMPTY_BYTE_ARRAY),
                                    ByteBuffer.wrap(PtBytes.EMPTY_BYTE_ARRAY), false, Integer.MAX_VALUE);
                            SlicePredicate pred = new SlicePredicate();
                            pred.setSlice_range(slice);

                            List<ByteBuffer> rowNames = Lists.newArrayListWithCapacity(batch.size());
                            for (byte[] r : batch) {
                                rowNames.add(ByteBuffer.wrap(r));
                            }

                            ColumnParent colFam = new ColumnParent(internalTableName(tableName));
                            Map<ByteBuffer, List<ColumnOrSuperColumn>> results = multigetInternal(client,
                                    tableName, rowNames, colFam, pred, readConsistency);
                            Map<Cell, Value> ret = Maps.newHashMap();
                            new ValueExtractor(ret).extractResults(results, startTs, ColumnSelection.all());
                            return ret;
                        }

                        @Override
                        public String toString() {
                            return "multiget_slice(" + tableName + ", " + batch.size() + " rows" + ")";
                        }
                    }));
        }
        if (rowCount > fetchBatchCount) {
            log.warn("Rebatched in getRows a call to " + tableName + " that attempted to multiget " + rowCount
                    + " rows; this may indicate overly-large batching on a higher level.\n"
                    + CassandraKeyValueServices.getFilteredStackTrace("com.palantir"));
        }
        return ImmutableMap.copyOf(result);
    } catch (Exception e) {
        throw Throwables.throwUncheckedException(e);
    }
}

From source file:org.wso2.carbon.analytics.datasource.rdbms.RDBMSAnalyticsRecordStore.java

@Override
public void delete(int tenantId, String tableName, List<String> ids)
        throws AnalyticsException, AnalyticsTableNotAvailableException {
    if (ids.size() == 0) {
        return;/*from  ww  w .  j  ava 2 s  . c o  m*/
    }
    Connection conn = null;
    List<List<String>> idsSubLists = Lists.partition(ids,
            this.rdbmsQueryConfigurationEntry.getRecordBatchSize());
    try {
        conn = this.getConnection();
        for (List<String> idSubList : idsSubLists) {
            this.delete(conn, tenantId, tableName, idSubList);
        }
    } catch (SQLException e) {
        throw new AnalyticsException("Error in deleting records: " + e.getMessage(), e);
    } finally {
        RDBMSUtils.cleanupConnection(null, null, conn);
    }
}

From source file:org.sonar.css.checks.RuleDescriptionsGenerator.java

private String generateHtmlTableFromListOfStrings(List<String> elements) {
    StringBuilder html = new StringBuilder("<table style=\"border: 0;\">\n");
    List<List<String>> subLists = Lists.partition(elements, 3);
    for (List<String> subList : subLists) {
        html.append("<tr>");
        for (String element : subList) {
            html.append("<td style=\"border: 0; \">");
            html.append("<code>").append(element).append("</code>");
            html.append("</td>\n");
        }//from  w w  w .j  a  v  a  2 s  . com
        html.append("</tr>");
    }
    html.append("</table>\n");
    return html.toString();
}

From source file:org.lttng.scope.tmf2.views.ui.timegraph.swtjfx.SwtJfxTimeGraphViewer.java

private static Node prepareTreeContents(TimeGraphTreeRender treeRender, double paneWidth) {
    /* Prepare the tree element objects */
    List<Label> treeElements = treeRender.getAllTreeElements().stream()
            // TODO Put as a real tree. TreeView ?
            .map(elem -> new Label(elem.getName())).peek(label -> {
                label.setPrefHeight(ENTRY_HEIGHT);
                label.setPadding(new Insets(0, LABEL_SIDE_MARGIN, 0, LABEL_SIDE_MARGIN));
                /*//from  w  w w .ja v a  2 s  .  c  om
                 * Re-set the solid background for the labels, so we do not
                 * see the background lines through.
                 */
                label.setStyle(BACKGROUND_STYLE);
            }).collect(Collectors.toList());

    VBox treeElemsBox = new VBox(); // Change to TreeView eventually ?
    treeElemsBox.getChildren().addAll(treeElements);

    /* Prepare the Canvases with the horizontal alignment lines */
    List<Canvas> canvases = new ArrayList<>();
    int maxEntriesPerCanvas = (int) (MAX_CANVAS_HEIGHT / ENTRY_HEIGHT);
    Lists.partition(treeElements, maxEntriesPerCanvas).forEach(subList -> {
        int nbElements = subList.size();
        double height = nbElements * ENTRY_HEIGHT;

        Canvas canvas = new Canvas(paneWidth, height);
        drawBackgroundLines(canvas, ENTRY_HEIGHT);
        canvas.setCache(true);
        canvases.add(canvas);
    });
    VBox canvasBox = new VBox();
    canvasBox.getChildren().addAll(canvases);

    /* Put the background Canvas and the Tree View into their containers */
    StackPane stackPane = new StackPane(canvasBox, treeElemsBox);
    stackPane.setStyle(BACKGROUND_STYLE);
    return stackPane;
}

From source file:com.netflix.metacat.usermetadata.mysql.MysqlUserMetadataService.java

@Override
public void saveMetadatas(final String user, final List<? extends HasMetadata> metadatas, final boolean merge) {
    try {/*from  w  w  w.  j ava2  s .co m*/
        final Connection conn = poolingDataSource.getConnection();
        try {
            @SuppressWarnings("unchecked")
            final List<List<HasMetadata>> subLists = Lists.partition((List<HasMetadata>) metadatas,
                    config.getUserMetadataMaxInClauseItems());
            for (List<HasMetadata> hasMetadatas : subLists) {
                final List<String> uris = Lists.newArrayList();
                final List<QualifiedName> names = Lists.newArrayList();
                // Get the names and uris
                final List<HasDefinitionMetadata> definitionMetadatas = Lists.newArrayList();
                final List<HasDataMetadata> dataMetadatas = Lists.newArrayList();
                hasMetadatas.stream().forEach(hasMetadata -> {
                    if (hasMetadata instanceof HasDefinitionMetadata) {
                        final HasDefinitionMetadata oDef = (HasDefinitionMetadata) hasMetadata;
                        names.add(oDef.getDefinitionName());
                        if (oDef.getDefinitionMetadata() != null) {
                            definitionMetadatas.add(oDef);
                        }
                    }
                    if (hasMetadata instanceof HasDataMetadata) {
                        final HasDataMetadata oData = (HasDataMetadata) hasMetadata;
                        if (oData.isDataExternal() && oData.getDataMetadata() != null
                                && oData.getDataMetadata().size() > 0) {
                            uris.add(oData.getDataUri());
                            dataMetadatas.add(oData);
                        }
                    }
                });
                if (!definitionMetadatas.isEmpty() || !dataMetadatas.isEmpty()) {
                    // Get the existing metadata based on the names and uris
                    final Map<String, ObjectNode> definitionMap = getDefinitionMetadataMap(names);
                    final Map<String, ObjectNode> dataMap = getDataMetadataMap(uris);
                    // Curate the list of existing and new metadatas
                    final List<Object[]> insertDefinitionMetadatas = Lists.newArrayList();
                    final List<Object[]> updateDefinitionMetadatas = Lists.newArrayList();
                    final List<Object[]> insertDataMetadatas = Lists.newArrayList();
                    final List<Object[]> updateDataMetadatas = Lists.newArrayList();
                    definitionMetadatas.stream().forEach(oDef -> {
                        final QualifiedName qualifiedName = oDef.getDefinitionName();
                        if (qualifiedName != null && oDef.getDefinitionMetadata() != null
                                && oDef.getDefinitionMetadata().size() != 0) {
                            final String name = qualifiedName.toString();
                            final ObjectNode oNode = definitionMap.get(name);
                            if (oNode == null) {
                                insertDefinitionMetadatas.add(
                                        new Object[] { metacatJson.toJsonString(oDef.getDefinitionMetadata()),
                                                user, user, name, });
                            } else {
                                metacatJson.mergeIntoPrimary(oNode, oDef.getDefinitionMetadata());
                                updateDefinitionMetadatas
                                        .add(new Object[] { metacatJson.toJsonString(oNode), user, name });
                            }
                        }
                    });
                    dataMetadatas.stream().forEach(oData -> {
                        final String uri = oData.getDataUri();
                        final ObjectNode oNode = dataMap.get(uri);
                        if (oData.getDataMetadata() != null && oData.getDataMetadata().size() != 0) {
                            if (oNode == null) {
                                insertDataMetadatas.add(new Object[] {
                                        metacatJson.toJsonString(oData.getDataMetadata()), user, user, uri, });
                            } else {
                                metacatJson.mergeIntoPrimary(oNode, oData.getDataMetadata());
                                updateDataMetadatas
                                        .add(new Object[] { metacatJson.toJsonString(oNode), user, uri });
                            }
                        }
                    });
                    //Now run the queries
                    final QueryRunner runner = new QueryRunner();
                    if (!insertDefinitionMetadatas.isEmpty()) {
                        runner.batch(conn, SQL.INSERT_DEFINITION_METADATA, insertDefinitionMetadatas
                                .toArray(new Object[insertDefinitionMetadatas.size()][4]));
                    }
                    if (!updateDefinitionMetadatas.isEmpty()) {
                        runner.batch(conn, SQL.UPDATE_DEFINITION_METADATA, updateDefinitionMetadatas
                                .toArray(new Object[updateDefinitionMetadatas.size()][3]));
                    }
                    if (!insertDataMetadatas.isEmpty()) {
                        runner.batch(conn, SQL.INSERT_DATA_METADATA,
                                insertDataMetadatas.toArray(new Object[insertDataMetadatas.size()][4]));
                    }
                    if (!updateDataMetadatas.isEmpty()) {
                        runner.batch(conn, SQL.UPDATE_DATA_METADATA,
                                updateDataMetadatas.toArray(new Object[updateDataMetadatas.size()][3]));
                    }
                }
            }
            conn.commit();
        } catch (SQLException e) {
            conn.rollback();
            throw e;
        } finally {
            conn.close();
        }
    } catch (SQLException e) {
        log.error("Sql exception", e);
        throw new UserMetadataServiceException("Failed to save metadata", e);
    }
}

From source file:org.sonar.css.checks.RuleDescriptionsGenerator.java

private String generateHtmlTable(List<StandardCssObject> standardCssObjects) {
    StringBuilder html = new StringBuilder("<table style=\"border: 0;\">\n");
    List<List<StandardCssObject>> subLists = Lists.partition(standardCssObjects, 3);
    for (List<StandardCssObject> subList : subLists) {
        html.append("<tr>");
        for (StandardCssObject standardCssObject : subList) {
            html.append("<td style=\"border: 0; \">");
            if (!standardCssObject.getLinks().isEmpty()) {
                html.append("<a target=\"_blank\" href=\"").append(standardCssObject.getLinks().get(0))
                        .append("\">");
            }/*from  w w  w .j av  a 2  s .  c o  m*/
            html.append("<code>").append(standardCssObject.getName()).append("</code>");
            if (!standardCssObject.getLinks().isEmpty()) {
                html.append("</a>");
            }
            html.append("</code>");
            for (int i = 1; i < standardCssObject.getLinks().size(); i++) {
                html.append("&nbsp;&nbsp;<a target=\"_blank\" href=\"")
                        .append(standardCssObject.getLinks().get(i)).append("\">#").append(i + 1)
                        .append("</a>");
            }
            html.append("</td>\n");
        }
        html.append("</tr>");
    }
    html.append("</table>\n");
    return html.toString();
}

From source file:org.apache.beam.sdk.util.GcsUtil.java

/**
 * Makes get {@link BatchRequest BatchRequests}.
 *
 * @param paths {@link GcsPath GcsPaths}.
 * @param results mutable {@link List} for return values.
 * @return {@link BatchRequest BatchRequests} to execute.
 * @throws IOException/*from  www .j a  v a 2 s  .c  o  m*/
 */
@VisibleForTesting
List<BatchRequest> makeGetBatches(Collection<GcsPath> paths, List<StorageObjectOrIOException[]> results)
        throws IOException {
    List<BatchRequest> batches = new LinkedList<>();
    for (List<GcsPath> filesToGet : Lists.partition(Lists.newArrayList(paths), MAX_REQUESTS_PER_BATCH)) {
        BatchRequest batch = createBatchRequest();
        for (GcsPath path : filesToGet) {
            results.add(enqueueGetFileSize(path, batch));
        }
        batches.add(batch);
    }
    return batches;
}

From source file:org.openscada.ae.server.net.ServerConnectionHandler.java

public void sendQueryData(final QueryImpl queryImpl, final List<Event> events) {
    // TODO: check if query is still active

    for (final List<Event> chunk : Lists.partition(events, getChunkSize())) {
        final Message message = new Message(Messages.CC_QUERY_DATA);
        message.getValues().put("data", EventMessageHelper.toValue(chunk));
        message.getValues().put(MESSAGE_QUERY_ID, new LongValue(queryImpl.getQueryId()));
        this.messenger.sendMessage(message);
    }/*from  w ww  .  java  2  s . c om*/
}