Example usage for java.util.stream Collectors joining

List of usage examples for java.util.stream Collectors joining

Introduction

In this page you can find the example usage for java.util.stream Collectors joining.

Prototype

public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) 

Source Link

Document

Returns a Collector that concatenates the input elements, separated by the specified delimiter, in encounter order.

Usage

From source file:com.evolveum.midpoint.gui.api.component.ObjectListPanel.java

private IModel<String> getItemValuesString(Item<?, ?> item, PrismObject<LookupTableType> lookupTable) {
    return Model.of(item.getValues().stream().filter(Objects::nonNull).map(itemValue -> {
        if (itemValue instanceof PrismPropertyValue) {
            if (lookupTable == null) {
                return String.valueOf(((PrismPropertyValue<?>) itemValue).getValue());
            } else {
                String lookupTableKey = ((PrismPropertyValue<?>) itemValue).getValue().toString();
                LookupTableType lookupTableObject = lookupTable.getValue().asObjectable();
                String rowLabel = "";
                for (LookupTableRowType lookupTableRow : lookupTableObject.getRow()) {
                    if (lookupTableRow.getKey().equals(lookupTableKey)) {
                        rowLabel = lookupTableRow.getLabel() != null ? lookupTableRow.getLabel().getOrig()
                                : lookupTableRow.getValue();
                        break;
                    }//from  w  ww . j  ava2s .c  o m
                }
                return rowLabel;
            }
        } else {
            return itemValue.toString() + " "; // TODO why + " "?
        }
    }).collect(Collectors.joining(", ")));
}

From source file:com.thinkbiganalytics.metadata.jobrepo.nifi.provenance.ProvenanceEventReceiver.java

/**
 * Make a REST call to NiFi and query for the NiFi Bulletins that have a flowfile id matching for this job execution and write the bulletin message to the {@link
 * BatchJobExecution#setExitMessage(String)}
 *
 * @param event a provenance event//from w w w  .j a  v  a2s.c  o m
 */
private void queryForNiFiErrorBulletins(ProvenanceEventRecordDTO event) {
    try {
        metadataAccess.commit(() -> {
            //query for nifi logs
            List<String> relatedFlowFiles = batchJobExecutionProvider
                    .findRelatedFlowFiles(event.getFlowFileUuid());
            if (relatedFlowFiles == null) {
                relatedFlowFiles = new ArrayList<>();
            }
            if (relatedFlowFiles.isEmpty()) {
                relatedFlowFiles.add(event.getFlowFileUuid());
            }
            log.info("Failed Job {}/{}. Found {} related flow files. ", event.getEventId(),
                    event.getFlowFileUuid(), relatedFlowFiles.size());
            List<BulletinDTO> bulletinDTOS = nifiBulletinExceptionExtractor
                    .getErrorBulletinsForFlowFiles(relatedFlowFiles);
            if (bulletinDTOS != null && !bulletinDTOS.isEmpty()) {
                //write them back to the job
                BatchJobExecution jobExecution = batchJobExecutionProvider.findJobExecution(event);
                if (jobExecution != null) {
                    String msg = jobExecution.getExitMessage() != null ? jobExecution.getExitMessage() + "\n"
                            : "";
                    msg += "NiFi exceptions: \n" + bulletinDTOS.stream()
                            .map(bulletinDTO -> bulletinDTO.getMessage()).collect(Collectors.joining("\n"));
                    jobExecution.setExitMessage(msg);
                    this.batchJobExecutionProvider.save(jobExecution);
                }
            }
        }, MetadataAccess.SERVICE);
    } catch (Exception e) {
        log.error(
                "Unable to query NiFi and save exception bulletins for job failure eventid/flowfile : {} / {}. Exception Message:  {}",
                event.getEventId(), event.getFlowFileUuid(), e.getMessage(), e);
    }
}

From source file:org.rakam.client.builder.document.SlateDocumentGenerator.java

private String toExampleJsonParameters(Operation operation) {
    if (operation.getParameters().size() == 1 && operation.getParameters().get(0).getIn().equals("body")) {
        Model model = ((BodyParameter) operation.getParameters().get(0)).getSchema();

        Map<String, Property> properties;
        if (model.getReference() != null) {
            String prefix = "#/definitions/";
            if (model.getReference().startsWith(prefix)) {
                Model model1 = swagger.getDefinitions().get(model.getReference().substring(prefix.length()));
                if (model1 instanceof ArrayModel) {
                    return prettyJson("[" + getValue(((ArrayModel) model1).getItems()) + "]");
                }//from   w ww.j a  va 2  s . c om
                properties = model1.getProperties();
            } else {
                throw new IllegalStateException();
            }
        } else {
            properties = model.getProperties();
        }
        return prettyJson(toExampleJsonParameters(properties));
    }

    String jsonString = "{" + operation.getParameters().stream().filter(e -> e instanceof FormParameter)
            .map(e -> "\"" + e.getName() + "\" : " + getValue((AbstractSerializableParameter) e))
            .collect(Collectors.joining(", ")) + "}";

    return prettyJson(jsonString);
}

From source file:org.dataconservancy.packaging.tool.impl.AnnotationDrivenPackageStateSerializer.java

/**
 * Serializes the identified stream from the package state to the supplied result.
 *
 * @param state    the package state object containing the identified stream
 * @param streamId the stream identifier for the content being serialized
 * @param result   holds the output stream for the serialization result
 *///from w  w  w  .j  av  a 2s.c  om
void serializeToResult(PackageState state, StreamId streamId, StreamResult result) {

    if (marshallerMap == null) {
        throw new IllegalStateException(ERR_MISSING_MARSHALLINGMAP);
    }

    PropertyDescriptor pd = propertyDescriptors.get(streamId);

    if (pd == null) {
        throw new IllegalArgumentException(String.format(ERR_INVALID_STREAMID, streamId.name(),
                PackageState.class.getName(),
                propertyDescriptors.keySet().stream().map(Enum::name).collect(Collectors.joining(", "))));
    }

    Object toSerialize;

    try {
        toSerialize = pd.getReadMethod().invoke(state);
        if (toSerialize == null) {
            // The field on the package state had a null value, which is OK.  We have nothing to serialize.
            return;
        }
    } catch (Exception e) {
        String err = String.format(ERR_INVOKING_METHOD, pd.getReadMethod(), state.getClass().getName(),
                e.getMessage());
        throw new RuntimeException(err, e);
    }

    try {
        StreamMarshaller streamMarshaller = marshallerMap.get(streamId);
        if (streamMarshaller == null) {
            throw new RuntimeException(String.format(ERR_MISSING_STREAMMARSHALLER, streamId));
        }

        Marshaller marshaller = streamMarshaller.getMarshaller();
        if (marshaller == null) {
            throw new RuntimeException(String.format(ERR_MISSING_SPRINGMARSHALLER, streamId, streamId));
        }

        marshaller.marshal(toSerialize, result);
    } catch (Exception e) {
        throw new RuntimeException(String.format(ERR_MARSHALLING_STREAM, streamId, e.getMessage()), e);
    }
}

From source file:alfio.config.MvcConfiguration.java

@Bean
public JMustacheTemplateFactory getTemplateFactory() throws Exception {
    final JMustacheTemplateFactory templateFactory = new JMustacheTemplateFactory();

    templateFactory.setPrefix("/WEB-INF/templates");
    templateFactory.setSuffix(".ms");
    templateFactory.setTemplateLoader(templateLoader);
    templateFactory.setCompiler(Mustache.compiler().escapeHTML(true).standardsMode(false).defaultValue("")
            .nullValue("").withFormatter((o) -> {
                if (o instanceof ZonedDateTime) {
                    return DateTimeFormatter.ISO_ZONED_DATE_TIME.format((ZonedDateTime) o);
                } else if (o instanceof DefaultMessageSourceResolvable) {
                    DefaultMessageSourceResolvable m = ((DefaultMessageSourceResolvable) o);
                    return m.getCode() + " "
                            + Arrays.stream(Optional.ofNullable(m.getArguments()).orElse(new Object[] {}))
                                    .map(x -> "[" + x.toString() + "]").collect(Collectors.joining(" "));
                } else {
                    return String.valueOf(o);
                }//  w  w w  .  jav a2  s  .com
            }).withLoader(templateLoader));

    templateFactory.afterPropertiesSet();
    return templateFactory;
}

From source file:controllers.nwbib.Lobid.java

private static String preprocess(final String q) {
    String result;/*from ww w  . ja va2  s . c  o  m*/
    if (q.trim().isEmpty() || q.matches(".*?([+~]|AND|OR|\\s-|\\*).*?")) {
        // if supported query string syntax is used, leave it alone:
        result = q;
    } else {
        // else prepend '+' to all terms for AND search:
        result = Arrays.asList(q.split("[\\s-]")).stream().map(x -> "+" + x).collect(Collectors.joining(" "));
    }
    return result// but escape unsupported query string syntax:
            .replace("\\", "\\\\").replace(":", "\\:").replace("^", "\\^").replace("&&", "\\&&")
            .replace("||", "\\||").replace("!", "\\!").replace("(", "\\(").replace(")", "\\)")
            .replace("{", "\\{").replace("}", "\\}").replace("[", "\\[").replace("]", "\\]")
            // `embedded` phrases, like foo"something"bar -> foo\"something\"bar
            .replaceAll("([^\\s])\"([^\"]+)\"([^\\s])", "$1\\\\\"$2\\\\\"$3")
            // remove inescapable range query symbols, possibly prepended with `+`:
            .replaceAll("^\\+?<", "").replace("^\\+?>", "");
}

From source file:org.fcrepo.importexport.exporter.ExportVersionsTest.java

private String createJson(final URI resource, final Resource type, final URI... children) {
    final StringBuilder json = new StringBuilder("{\"@id\":\"" + resource.toString() + "\"");
    if (type != null) {
        json.append(",\"@type\":[\"" + type.getURI() + "\"]");
    }/*from  w  w w.  ja  v  a  2 s.  co  m*/
    if (children != null && children.length > 0) {
        json.append(",\"" + CONTAINS.getURI() + "\":[").append(Arrays.stream(children)
                .map(child -> "{\"@id\":\"" + child.toString() + "\"}").collect(Collectors.joining(",")))
                .append(']');
    }
    json.append('}');
    return json.toString();
}

From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java

@Override
public Future<Dataset> predict(Dataset inputDataset, Model model, MetaInfo datasetMeta, String taskId) {

    CompletableFuture<Dataset> futureDataset = new CompletableFuture<>();

    Dataset dataset = DatasetFactory.copy(inputDataset);
    Dataset tempWithDependentFeatures = DatasetFactory.copy(dataset,
            new HashSet<>(model.getDependentFeatures()));

    dataset.getDataEntry().parallelStream().forEach(dataEntry -> {
        dataEntry.getValues().keySet().retainAll(model.getIndependentFeatures());
    });//  w  ww. j  a v  a  2 s  .c  o m
    PredictionRequest predictionRequest = new PredictionRequest();
    predictionRequest.setDataset(dataset);
    predictionRequest.setRawModel(model.getActualModel());
    predictionRequest.setAdditionalInfo(model.getAdditionalInfo());

    final HttpPost request = new HttpPost(model.getAlgorithm().getPredictionService());
    request.addHeader("Accept", "application/json");
    request.addHeader("Content-Type", "application/json");

    PipedOutputStream out = new PipedOutputStream();
    PipedInputStream in;
    try {
        in = new PipedInputStream(out);
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
        return futureDataset;
    }
    request.setEntity(new InputStreamEntity(in, ContentType.APPLICATION_JSON));

    Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() {

        @Override
        public void completed(final HttpResponse response) {
            futureMap.remove(taskId);
            int status = response.getStatusLine().getStatusCode();
            try {
                InputStream responseStream = response.getEntity().getContent();

                switch (status) {
                case 200:
                case 201:
                    try {
                        PredictionResponse predictionResponse = serializer.parse(responseStream,
                                PredictionResponse.class);

                        List<LinkedHashMap<String, Object>> predictions = predictionResponse.getPredictions();
                        if (dataset.getDataEntry().isEmpty()) {
                            DatasetFactory.addEmptyRows(dataset, predictions.size());
                        }
                        List<Feature> features = featureHandler
                                .findBySource("algorithm/" + model.getAlgorithm().getId());
                        IntStream.range(0, dataset.getDataEntry().size())
                                // .parallel()
                                .forEach(i -> {
                                    Map<String, Object> row = predictions.get(i);
                                    DataEntry dataEntry = dataset.getDataEntry().get(i);
                                    if (model.getAlgorithm().getOntologicalClasses().contains("ot:Scaling")
                                            || model.getAlgorithm().getOntologicalClasses()
                                                    .contains("ot:Transformation")) {
                                        dataEntry.getValues().clear();
                                        dataset.getFeatures().clear();
                                    }
                                    row.entrySet().stream().forEach(entry -> {
                                        //                                                    Feature feature = featureHandler.findByTitleAndSource(entry.getKey(), "algorithm/" + model.getAlgorithm().getId());
                                        Feature feature = features.stream()
                                                .filter(f -> f.getMeta().getTitles().contains(entry.getKey()))
                                                .findFirst().orElse(null);
                                        if (feature == null) {
                                            return;
                                        }
                                        dataEntry.getValues().put(baseURI + "feature/" + feature.getId(),
                                                entry.getValue());
                                        FeatureInfo featInfo = new FeatureInfo(
                                                baseURI + "feature/" + feature.getId(),
                                                feature.getMeta().getTitles().stream().findFirst().get());
                                        featInfo.setCategory(Dataset.DescriptorCategory.PREDICTED);
                                        dataset.getFeatures().add(featInfo);
                                    });
                                });
                        dataset.setId(randomStringGenerator.nextString(20));
                        dataset.setTotalRows(dataset.getDataEntry().size());
                        dataset.setMeta(datasetMeta);
                        futureDataset.complete(DatasetFactory.mergeColumns(dataset, tempWithDependentFeatures));
                    } catch (Exception ex) {
                        futureDataset.completeExceptionally(ex);
                    }
                    break;
                case 400:
                    String message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new BadRequestException(message));
                    break;
                case 404:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new NotFoundException(message));
                    break;
                case 500:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                    break;
                default:
                    message = new BufferedReader(new InputStreamReader(responseStream)).lines()
                            .collect(Collectors.joining("\n"));
                    futureDataset.completeExceptionally(new InternalServerErrorException(message));
                }
            } catch (IOException | UnsupportedOperationException ex) {
                futureDataset.completeExceptionally(ex);
            }
        }

        @Override
        public void failed(final Exception ex) {
            futureMap.remove(taskId);
            futureDataset.completeExceptionally(new InternalServerErrorException(ex));
        }

        @Override
        public void cancelled() {
            futureMap.remove(taskId);
            futureDataset.cancel(true);
        }
    });
    serializer.write(predictionRequest, out);
    try {
        out.close();
    } catch (IOException ex) {
        futureDataset.completeExceptionally(ex);
    }
    futureMap.put(taskId, futureResponse);
    return futureDataset;
}

From source file:com.ggvaidya.scinames.complexquery.ComplexQueryViewController.java

public void updateTableWithChangesUsingNameClusters(Project project, List<NameCluster> nameClusters,
        List<Dataset> datasets) {
    Set<Change> changesToDisplay = new HashSet<>();
    for (NameCluster cluster : nameClusters) {
        // Yes, we want to use getAllChanges() here, because we'd like to match eliminated changes too.
        changesToDisplay//from   www.  j  a  va  2 s. c  o m
                .addAll(datasets.stream().flatMap(ds -> ds.getAllChanges()).collect(Collectors.toSet()));
    }

    List<Change> changes = changesToDisplay.stream().sorted((a, b) -> a.getDataset().compareTo(b.getDataset()))
            .collect(Collectors.toList());

    NameClusterManager ncm = project.getNameClusterManager();

    // And add tablecolumns for the rest.
    dataTableView.getColumns().clear();
    dataTableView.getColumns().addAll(createTableColumnFromChange("id", ch -> ch.getId().toString()),
            createTableColumnFromChange("dataset", ch -> ch.getDataset().getName()),
            createTableColumnFromChange("type", ch -> ch.getType().getType()),
            createTableColumnFromChange("from", ch -> ch.getFromString()),
            createTableColumnFromChange("from_name_cluster_ids",
                    ch -> ncm.getClusters(ch.getFrom()).stream().map(cl -> cl.getId().toString())
                            .collect(Collectors.joining(" and "))),
            createTableColumnFromChange("from_name_clusters",
                    ch -> ncm.getClusters(ch.getFrom()).stream()
                            .map(cl -> cl.getNames().stream().map(n -> n.getFullName())
                                    .collect(Collectors.joining("; ")))
                            .collect(Collectors.joining(" and "))),
            createTableColumnFromChange("to", ch -> ch.getToString()),
            createTableColumnFromChange("to_name_cluster_ids",
                    ch -> ncm.getClusters(ch.getTo()).stream().map(cl -> cl.getId().toString())
                            .collect(Collectors.joining(" and "))),
            createTableColumnFromChange("to_name_clusters",
                    ch -> ncm.getClusters(ch.getTo()).stream()
                            .map(cl -> cl.getNames().stream().map(n -> n.getFullName())
                                    .collect(Collectors.joining("; ")))
                            .collect(Collectors.joining(" and "))),
            createTableColumnFromChange("filter_status",
                    ch -> project.getChangeFilter().test(ch) ? "retained" : "eliminated"),
            createTableColumnFromChange("citations", ch -> ch.getCitationStream().map(cit -> cit.getCitation())
                    .collect(Collectors.joining("; "))));

    dataTableView.getItems().clear();
    dataTableView.getItems().addAll(changes);

    dataTableView.refresh();

    // Fill in status text field.
    statusTextField.setText(dataTableView.getItems().size() + " changes across "
            + changes.stream().map(ch -> ch.getDataset()).distinct().count() + " distinct datasets");
}

From source file:org.niord.core.keycloak.KeycloakIntegrationService.java

/**
 * Searches the users from Keycloak matching the given search criteria
 * @return the users from Keycloak/*from www .ja v  a  2 s  .c om*/
 */
private List<UserVo> searchKeycloakUsers(Map<String, String> paramMap, int first, int max) throws Exception {

    String params = paramMap.entrySet().stream()
            .map(e -> e.getKey() + "=" + WebUtils.encodeURIComponent(e.getValue()))
            .collect(Collectors.joining("&")) + "&first=" + first + "&max=" + max;

    return executeAdminRequest(
            new HttpGet(resolveAuthServerUrl() + "/admin/realms/" + KEYCLOAK_REALM + "/users?" + params), true, // Add auth header
            is -> {
                List<UserRepresentation> result = new ObjectMapper()
                        .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
                        .readValue(is, new TypeReference<List<UserRepresentation>>() {
                        });
                log.debug("Read users from Keycloak");
                return result.stream().map(this::readUser).collect(Collectors.toList());
            });
}