Example usage for java.util.stream Collectors joining

List of usage examples for java.util.stream Collectors joining

Introduction

In this page you can find the example usage for java.util.stream Collectors joining.

Prototype

public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) 

Source Link

Document

Returns a Collector that concatenates the input elements, separated by the specified delimiter, in encounter order.

Usage

From source file:com.blackducksoftware.integration.hub.detect.help.DetectOption.java

public void printDetailedOption(final HelpTextWriter writer) {
    writer.println("");
    writer.println("Detailed information for " + detectProperty.getPropertyKey());
    writer.println("");
    if (getDetectOptionHelp().isDeprecated) {
        writer.println("Deprecated: " + getDeprecationText());
        writer.println("Deprecation description: " + getDetectOptionHelp().deprecation);
        writer.println("");
    }//from   w w  w.j  a va2  s  . c om
    writer.println("Property description: " + getDetectOptionHelp().description);
    writer.println("Property default value: " + detectProperty.getDefaultValue());
    if (getValidValues().size() > 0) {
        writer.println(
                "Property acceptable values: " + getValidValues().stream().collect(Collectors.joining(", ")));
    }
    writer.println("");

    final DetectOptionHelp help = getDetectOptionHelp();
    if (StringUtils.isNotBlank(help.detailedHelp)) {
        writer.println("Detailed help:");
        writer.println(help.detailedHelp);
        writer.println();
    }
}

From source file:example.UserGuideTest.java

@Test
public void graph() throws Exception {
    IRI nameIri = factory.createIRI("http://example.com/name");
    BlankNode aliceBlankNode = factory.createBlankNode();
    Literal aliceLiteral = factory.createLiteral("Alice");
    Triple triple = factory.createTriple(aliceBlankNode, nameIri, aliceLiteral);

    Graph graph = factory.createGraph();

    graph.add(triple);//from   w  ww. j av a  2 s  .c  o m

    IRI bob = factory.createIRI("http://example.com/bob");
    Literal bobName = factory.createLiteral("Bob");
    graph.add(bob, nameIri, bobName);

    System.out.println(graph.contains(triple));

    System.out.println(graph.contains(null, nameIri, bobName));

    System.out.println(graph.size());

    for (Triple t : graph.iterate()) {
        System.out.println(t.getObject());
    }

    for (Triple t : graph.iterate(null, null, bobName)) {
        System.out.println(t.getPredicate());
    }

    Stream<RDFTerm> subjects = graph.getTriples().map(t -> t.getObject());
    String s = subjects.map(RDFTerm::ntriplesString).collect(Collectors.joining(" "));
    System.out.println(s);

    Stream<? extends Triple> namedB = graph.getTriples(null, nameIri, null)
            .filter(t -> t.getObject().ntriplesString().contains("B"));
    System.out.println(namedB.map(t -> t.getSubject()).findAny().get());

    graph.remove(triple);
    System.out.println(graph.contains(triple));

    graph.remove(null, nameIri, null);

    graph.clear();
    System.out.println(graph.contains(null, null, null));

}

From source file:com.ge.research.semtk.edc.client.ResultsClient.java

/**
 * Store Table.  fullResult is csv.  sample is shorter csv.
 * @param contents/* w  w w.  j av a  2s.  c  o  m*/
 * @throws Exception
 */
@SuppressWarnings("unchecked")
public void execStoreTableResults(String jobId, Table table)
        throws ConnectException, EndpointNotFoundException, Exception {
    // chunk up the table by size and then send all the chunks. 
    // hopefully, this will avoid sending anything too large to the results service

    int tableRowsDone = 0;
    int totalRows = table.getNumRows();
    int segment = 0;

    long startTime = 0, endTime = 0;
    double prepSec = 0.0;
    double sendSec = 0.0;
    boolean timerFlag = false;

    Thread thread = null;

    if (totalRows == 0) {
        // just create and send the header row.
        StringBuilder resultsSoFar = new StringBuilder();

        for (int i1 = 0; i1 < table.getNumColumns(); i1 += 1) {
            resultsSoFar.append((table.getColumnNames())[i1]);
            if (i1 < table.getNumColumns() - 1) {
                resultsSoFar.append(",");
            }
        }

        resultsSoFar.append("\n");

        conf.setServiceEndpoint("results/storeIncrementalCsvResults");
        this.parametersJSON.put("contents", resultsSoFar.toString());
        this.parametersJSON.put("jobId", jobId);
        this.parametersJSON.put("segmentNumber", segment);

        thread = new Thread(this);
        thread.run();
    }

    else { // write out all the results, y'know?
        while (tableRowsDone < totalRows) {
            if (timerFlag) {
                startTime = System.nanoTime();
            }
            int tableRowsAtStart = tableRowsDone;
            // get the next few rows.
            StringBuilder resultsSoFar = new StringBuilder();
            //String lastResults  = "";

            // get the next allocation of rows. 
            for (int i = 0; i < this.ROWS_TO_PROCESS; i += 1) {
                try {

                    // Make sure we include a header row.
                    if (tableRowsDone == 0) { // first record...
                        for (int i1 = 0; i1 < table.getNumColumns(); i1 += 1) {
                            resultsSoFar.append((table.getColumnNames())[i1]);
                            if (i1 < table.getNumColumns() - 1) {
                                resultsSoFar.append(",");
                            }
                        }
                    }

                    // get the next row into a comma separated string.
                    String curr = new StringBuilder(table.getRow(tableRowsDone).toString()).toString(); // ArrayList.toString() is fast
                    // but if any element contained commas, then can't use ArrayList.toString()
                    if (StringUtils.countMatches(curr, ",") != (table.getNumColumns() - 1)) {
                        // escape double quotes (using "" for csv files), then enclose each element in double quotes 
                        curr = table
                                .getRow(tableRowsDone).stream().map(s -> (new StringBuilder()).append("\"")
                                        .append(s.replace("\"", "\"\"")).append("\"").toString())
                                .collect(Collectors.joining(","));
                    } else {
                        // ArrayList.toString() added surrounding brackets and spaces after each comma - remove these
                        curr = StringUtils.substring(curr, 1, curr.length() - 1);
                        curr = StringUtils.replace(curr, ", ", ",");
                    }

                    tableRowsDone += 1;

                    // add to the existing results we want to send.
                    //lastResults = resultsSoFar.toString(); // PEC changed  
                    resultsSoFar.append("\n");
                    resultsSoFar.append(curr); // TODO when this was using +=, it would have triggered the batch-too-big behavior, but now that it's a StringBuilder, not sure

                } catch (IndexOutOfBoundsException eek) {
                    // we have run out of rows. the remaining rows were fewer than the block size. just note this and move on.
                    i = this.ROWS_TO_PROCESS;
                }

                // TODO review with Justin.  Removing the "revert to slightly smaller batch size" for now because saving the lastBatch after every row
                // was slowing the performance.  We can reintroduce it in a better way later.  For now, let any exceptions flow up
                //            catch(Exception eee){
                //               // the send size would have been too large.
                //               tableRowsDone = tableRowsDone - 1;
                //               
                //               System.out.println("*** caught an exception trying to process a result: " +  tableRowsDone);
                //               System.out.println(eee.getMessage());
                //         
                //               i = this.ROWS_TO_PROCESS; // remove the one that broke things. this way, we reprocess it
                //               //resultsSoFar = new StringBuilder(lastResults); // reset the values.  
                //            }
            }

            // fail if tableRowsDone has not changed. this implies that even the first result was too large.
            if ((tableRowsDone == tableRowsAtStart) && (tableRowsDone < totalRows)) {
                throw new Exception(
                        "unable to write results. there is a row size which is too large. row number was "
                                + tableRowsDone + " of a total " + totalRows + ".");
            }

            if (timerFlag) {
                endTime = System.nanoTime();
                prepSec += ((endTime - startTime) / 1000000000.0);
                System.err.println(String.format("tot prep=%.2f sec", prepSec));
                startTime = endTime;
            }

            // take care of last run
            if (thread != null) {
                thread.join();
                ((SimpleResultSet) this.getRunRes()).throwExceptionIfUnsuccessful();
                if (this.getRunException() != null) {
                    throw this.getRunException();
                }
                segment += 1;
                conf.setServiceEndpoint(null);
                this.parametersJSON.remove("contents");
                this.parametersJSON.remove("jobId");
            }

            // send the current one:

            conf.setServiceEndpoint("results/storeIncrementalCsvResults");
            this.parametersJSON.put("contents", resultsSoFar.toString());
            this.parametersJSON.put("jobId", jobId);
            this.parametersJSON.put("segmentNumber", segment);

            thread = new Thread(this);
            thread.run();

            if (timerFlag) {
                endTime = System.nanoTime();
                sendSec += ((endTime - startTime) / 1000000000.0);
                System.err.println(String.format("tot send=%.2f sec", sendSec));
                startTime = endTime;
            }
        } // end of while loop.

    }

    // cleanup
    // take care of last run
    if (thread != null) {
        thread.join();
        ((SimpleResultSet) this.getRunRes()).throwExceptionIfUnsuccessful();
        if (this.getRunException() != null) {
            throw this.getRunException();
        }

    }

    if (timerFlag) {
        System.err.println(String.format("prep=%.2f sec   send=%.2f sec", prepSec, sendSec));
    }
    return;
}

From source file:com.epam.ta.reportportal.events.handler.TicketActivitySubscriber.java

private String issuesIdsToString(Set<TestItemIssue.ExternalSystemIssue> externalSystemIssues,
        String separator) {/*from  w ww .j av  a  2  s  . c o m*/
    if (null != externalSystemIssues && !externalSystemIssues.isEmpty()) {
        return externalSystemIssues.stream().map(externalSystemIssue -> externalSystemIssue.getTicketId()
                .concat(":").concat(externalSystemIssue.getUrl())).collect(Collectors.joining(separator));
    }
    return null;
}

From source file:com.thinkbiganalytics.alerts.spi.defaults.KyloEntityAwareAlertCriteria.java

private String filterStringForSlaAlertEntities(String keyword) {
    return KyloEntityAwareAlertManager.alertSlaFilters.keySet().stream().map(key -> key + "=~" + keyword)
            .collect(Collectors.joining(","));
}

From source file:com.qcadoo.mes.deliveries.CompanyProductServiceImpl.java

public String checkIfDefaultExistsForProductsInFamily(final Entity companyProduct) {
    if (companyProduct.getBooleanField(CompanyProductFields.IS_DEFAULT)) {
        Entity product = companyProduct.getBelongsToField(CompanyProductFields.PRODUCT);

        if (product == null) {
            return StringUtils.EMPTY;
        } else {/*from  w  w  w  .  ja v  a 2  s . co m*/
            StringBuilder productNames = new StringBuilder();
            List<Entity> children = product.getHasManyField(ProductFields.PRODUCT_FAMILY_CHILDRENS);
            for (Entity child : children) {
                List<Entity> familiesCompanies = child
                        .getHasManyField(ProductFieldsD.PRODUCTS_FAMILY_COMPANIES);
                if (!familiesCompanies.isEmpty()) {
                    String defaultCompaniesForFamilies = familiesCompanies.stream()
                            .filter(cp -> cp.getBooleanField(CompanyProductsFamilyFields.IS_DEFAULT))
                            .map(cp -> cp.getBelongsToField(CompanyProductsFamilyFields.PRODUCT)
                                    .getStringField(ProductFields.NUMBER))
                            .collect(Collectors.joining(", "));
                    productNames.append(defaultCompaniesForFamilies);
                }
                List<Entity> productCompanies = child.getHasManyField(ProductFieldsD.PRODUCT_COMPANIES);
                if (!productCompanies.isEmpty()) {
                    String defaultCompaniesForProducts = productCompanies.stream()
                            .filter(cp -> cp.getBooleanField(CompanyProductFields.IS_DEFAULT))
                            .map(cp -> cp.getBelongsToField(CompanyProductFields.PRODUCT)
                                    .getStringField(ProductFields.NUMBER))
                            .collect(Collectors.joining(", "));
                    productNames.append(defaultCompaniesForProducts);
                }
            }
            return productNames.toString();
        }
    }
    return StringUtils.EMPTY;
}

From source file:com.thinkbiganalytics.metadata.jobrepo.nifi.provenance.NifiBulletinExceptionExtractor.java

/**
 * queries for bulletins from component, in the flow file
 *
 * @param processorIds The collection UUID of the flow file to extract the error message from
 * @return a list of bulletin objects that were posted by the component to the flow file
 * @throws NifiConnectionException if cannot query Nifi
 *//*from  w w w.j a  va 2s .  c o m*/
public List<BulletinDTO> getErrorBulletinsForProcessorId(Collection<String> processorIds, Long afterId)
        throws NifiConnectionException {
    List<BulletinDTO> bulletins;
    try {
        String regexPattern = processorIds.stream().collect(Collectors.joining("|"));
        if (afterId != null && afterId != -1L) {
            bulletins = nifiRestClient.getBulletinsMatchingSource(regexPattern, afterId);
        } else {
            bulletins = nifiRestClient.getBulletinsMatchingSource(regexPattern, null);
        }

        bulletins = nifiRestClient.getProcessorBulletins(regexPattern);
        log.info("Query for {} bulletins returned {} results ", regexPattern, bulletins.size());
        if (bulletins != null && !bulletins.isEmpty()) {
            bulletins = bulletins.stream()
                    .filter(bulletinDTO -> bulletinErrorLevels.contains(bulletinDTO.getLevel().toUpperCase()))
                    .collect(Collectors.toList());
        }

        return bulletins;
    } catch (NifiClientRuntimeException e) {
        if (e instanceof NifiConnectionException) {
            throw e;
        } else {
            log.error("Error getErrorBulletinsForProcessorId {} ,{}  ", processorIds, e.getMessage());
        }
    }
    return null;
}

From source file:com.ggvaidya.scinames.ui.DataReconciliatorController.java

private TableColumn<String, String> createColumnFromPrecalc(String colName,
        Table<String, String, Set<String>> precalc) {
    TableColumn<String, String> column = new TableColumn<>(colName);
    column.cellValueFactoryProperty().set((TableColumn.CellDataFeatures<String, String> cdf) -> {
        String clusterID = cdf.getValue();

        // There might be columns found in some dataset but not in others
        // so we detect those cases here and put in "NA"s instead.
        String output = "NA";
        if (precalc.contains(clusterID, colName))
            output = precalc.get(clusterID, colName).stream().collect(Collectors.joining("; "));

        return new ReadOnlyStringWrapper(output);
    });/*from   w ww . ja v a2  s.  c  o m*/
    column.setPrefWidth(100.0);
    column.setEditable(false);
    return column;
}

From source file:io.sqp.proxy.ClientSession.java

private void processMessageQueue() {
    // check if there is a message to process
    if (_messageQueue.isEmpty()) {
        return;/*from   w  ww  . j  a va2  s. c  om*/
    }
    // TODO: refactor
    SqpMessage message = _messageQueue.peek();
    try {
        // validate and check if we are allowed to proceed or need to wait
        if (!_state.canProcess(message.getType())) {
            // in a blocking state, just return and handle this message another time
            if (_state.isBlocking()) {
                return;
            }
            // otherwise discard the message and throw an error
            _clientConnection.setMessageFormat(message.getMessageFormat()); // answer in same format
            _messageQueue.poll();
            // not blocking and cannot process: fail
            String allowed = _state.getProcessableMessages().stream().map(Object::toString)
                    .collect(Collectors.joining(", "));
            throw new UnexpectedMessageException("Cannot process message of type '" + message.getType()
                    + "' in state " + _state + ". Allowed message types are: " + allowed + ".",
                    _state.getErrorAction());
        }
        // make sure we answer in the same format
        _clientConnection.setMessageFormat(message.getMessageFormat());
        _messageQueue.poll(); // actually remove the msg from queue
        processMessageInternal(message);
    } catch (Throwable e) {
        if (e instanceof SqpException) {
            handleError((SqpException) e);
        } else {
            handleError(new BackendErrorException("Unexpected error in backend: " + e.getMessage(), e));
        }
    }
}