Example usage for java.util.stream Collectors joining

List of usage examples for java.util.stream Collectors joining

Introduction

In this page you can find the example usage for java.util.stream Collectors joining.

Prototype

public static Collector<CharSequence, ?, String> joining(CharSequence delimiter) 

Source Link

Document

Returns a Collector that concatenates the input elements, separated by the specified delimiter, in encounter order.

Usage

From source file:cc.kave.commons.pointsto.evaluation.events.MRREvaluation.java

public void exportResults(Path dir, ResultExporter exporter) throws IOException {
    Path resultFile = dir.resolve(getClass().getSimpleName() + ".txt");
    exporter.export(resultFile, results.entrySet().stream().map(entry -> {
        ICoReTypeName type = entry.getKey().getLeft();
        return new String[] { CoReNames.vm2srcQualifiedType(type), entry.getKey().getMiddle(),
                entry.getKey().getRight(), String.format(Locale.US, "%.5f", entry.getValue()) };
    }));/*  w  w w . ja v a 2 s . c o m*/

    Path numQueriesFile = dir.resolve(getClass().getSimpleName() + ".nq.txt");
    exporter.export(numQueriesFile,
            numQueries.entrySet().stream()
                    .map(entry -> new String[] { CoReNames.vm2srcQualifiedType(entry.getKey().getLeft()),
                            entry.getKey().getRight(), Integer.toString(entry.getValue()) }));

    Path zeroExtractedQueriesFile = dir.resolve(getClass().getSimpleName() + ".zeq.txt");
    exporter.export(zeroExtractedQueriesFile, zeroExtractedQueries.entrySet().stream()
            .map(entry -> new String[] { entry.getKey(), Integer.toString(entry.getValue()) }));

    Path prunedUsagesFile = dir.resolve(getClass().getSimpleName() + ".pruned.txt");
    exporter.export(prunedUsagesFile, prunedUsages.stream().map(entry -> new String[] { entry.getLeft(),
            CoReNames.vm2srcQualifiedType(entry.getMiddle()), entry.getRight().toString() }));

    exporter.export(dir.resolve(getClass().getSimpleName() + ".mst.txt"),
            missingStoreTypes.entrySet().stream().map(entry -> {
                String typeNames = entry.getValue().stream().map(CoReNames::vm2srcQualifiedType)
                        .collect(Collectors.joining(","));
                return new String[] { entry.getKey(), typeNames };
            }));
}

From source file:com.blackducksoftware.integration.hub.detect.workflow.hub.DetectProjectService.java

private List<ProjectCloneCategoriesType> convertClonePropertyToEnum(final String[] cloneCategories) {
    final List<ProjectCloneCategoriesType> categories = new ArrayList<>();
    for (final String category : cloneCategories) {
        categories.add(ProjectCloneCategoriesType.valueOf(category));
    }/*ww  w  .j a v  a 2 s.c  o m*/
    logger.debug("Found clone categories:"
            + categories.stream().map(it -> it.toString()).collect(Collectors.joining(",")));
    return categories;
}

From source file:de.tu_dortmund.ub.hb_ng.middleware.MiddlewareHbNgEndpoint.java

protected void doPost(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse)
        throws ServletException, IOException {

    // CORS ORIGIN RESPONSE HEADER
    httpServletResponse.setHeader("Access-Control-Allow-Origin",
            config.getProperty(HBNGStatics.CORS_ACCESS_CONTROL_ALLOW_ORIGIN_IDENTIFIER));

    String authorization = "";
    String contenttype = "";

    Enumeration<String> headerNames = httpServletRequest.getHeaderNames();
    while (headerNames.hasMoreElements()) {

        String headerNameKey = headerNames.nextElement();
        this.logger.debug("headerNameKey = " + headerNameKey + " / headerNameValue = "
                + httpServletRequest.getHeader(headerNameKey));

        if (headerNameKey.equals("Authorization")) {
            authorization = httpServletRequest.getHeader(headerNameKey);
        }/*w w w.  ja  va 2s  . c  o m*/
        if (headerNameKey.equals("Content-Type")) {
            contenttype = httpServletRequest.getHeader(headerNameKey);
        }
    }

    this.logger.info("contenttype = " + contenttype);

    try {

        // TODO validate Content-Type

        String data = httpServletRequest.getReader().lines()
                .collect(Collectors.joining(System.lineSeparator()));

        if (data == null || data.equals("")) {

            this.logger.error(HttpServletResponse.SC_NO_CONTENT + " - No Content");
            httpServletResponse.sendError(HttpServletResponse.SC_NO_CONTENT, "No Content");
        } else {

            String postableData = null;

            // TODO bind interface Preprocessing
            if (Lookup.lookupAll(PreprocessingInterface.class).size() > 0) {

                PreprocessingInterface preprocessingInterface = Lookup.lookup(PreprocessingInterface.class);
                // init Authorization Service
                preprocessingInterface.init(this.config);

                postableData = preprocessingInterface.process(data);
            } else {

                // TODO correct error handling
                this.logger.error("[" + this.config.getProperty("service.name") + "] "
                        + HttpServletResponse.SC_INTERNAL_SERVER_ERROR + ": "
                        + "Authorization Interface not implemented!");
            }

            if (postableData != null) {

                // TODO if successful then POST as application/sparql-update to LinkedDataPlatform
                String sparql_url = this.config.getProperty("ldp.sparql-endpoint");

                // HTTP Request
                int timeout = Integer.parseInt(this.config.getProperty("ldp.timeout"));

                RequestConfig defaultRequestConfig = RequestConfig.custom().setSocketTimeout(timeout)
                        .setConnectTimeout(timeout).setConnectionRequestTimeout(timeout).build();

                CloseableHttpClient httpclient = HttpClients.custom()
                        .setDefaultRequestConfig(defaultRequestConfig).build();

                try {

                    HttpPost httpPost = new HttpPost(sparql_url);
                    httpPost.addHeader("Content-Type", "application/sparql-update");
                    httpPost.addHeader("Authorization", this.config.getProperty("ldp.authorization"));
                    httpPost.setEntity(new StringEntity(postableData));

                    CloseableHttpResponse httpResponse = null;

                    long start = System.nanoTime();
                    try {

                        httpResponse = httpclient.execute(httpPost);
                    } catch (ConnectTimeoutException | SocketTimeoutException e) {

                        this.logger.info("[" + this.getClass().getName() + "] " + e.getClass().getName() + ": "
                                + e.getMessage());
                        httpResponse = httpclient.execute(httpPost);
                    }
                    long elapsed = System.nanoTime() - start;
                    this.logger.info("[" + this.getClass().getName() + "] LDP request - "
                            + (elapsed / 1000.0 / 1000.0 / 1000.0) + " s");

                    try {

                        int statusCode = httpResponse.getStatusLine().getStatusCode();
                        HttpEntity httpEntity = httpResponse.getEntity();

                        // TODO
                        httpServletResponse.setStatus(statusCode);
                        httpServletResponse.getWriter().println(httpResponse.getStatusLine().getReasonPhrase());

                        EntityUtils.consume(httpEntity);

                    } finally {
                        httpResponse.close();
                    }
                } finally {

                    httpclient.close();
                }
            }
        }
    } catch (Exception e) {

        this.logger.error("something went wrong", e);
        httpServletResponse.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "something went wrong");
    }
}

From source file:com.evolveum.midpoint.provisioning.impl.manual.TestSemiManual.java

private String formatCsvLine(String[] data) {
    return Arrays.stream(data).map(s -> "\"" + s + "\"").collect(Collectors.joining(","));
}

From source file:com.intuit.wasabi.tests.service.priority.BatchPriorityAssignmentTest.java

@Test(groups = { "batchAssign" }, dependsOnGroups = { "setup" }, dependsOnMethods = { "t_batchAssign" })
public void t_changePriorityBatchAssign() {
    response = apiServerConnector.doPost("experiments/" + validExperimentsLists.get(0).id + "/priority/5");
    assertReturnCode(response, HttpStatus.SC_CREATED);
    response = apiServerConnector/*from   w  w  w  .  ja  v  a  2s .  co m*/
            .doGet("applications/" + validExperimentsLists.get(0).applicationName + "/priorities");
    assertReturnCode(response, HttpStatus.SC_OK);
    clearAssignmentsMetadataCache();
    String lables = "{\"labels\": ["
            + validExperimentsLists.stream().map(s -> "\"" + s.label + "\"").collect(Collectors.joining(","))
            + "]}";
    response = apiServerConnector.doPost(
            "/assignments/applications/" + validExperimentsLists.get(0).applicationName + "/users/johnDoe2",
            lables);
    assertReturnCode(response, HttpStatus.SC_OK);
    LOGGER.info("output: " + response.asString());
    Type listType = new TypeToken<Map<String, ArrayList<Map<String, Object>>>>() {
    }.getType();
    Map<String, List<Map<String, Object>>> result = new Gson().fromJson(response.asString(), listType);
    List<Map<String, Object>> assignments = result.get("assignments");
    Assert.assertNull(assignments.get(assignments.size() - 1).get("assignment"));
    IntStream.range(0, assignments.size() - 1)
            .forEach(i -> Assert.assertNotNull(assignments.get(i).get("assignment")));
}

From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java

/** Creates a command line call to launch spark
 * @param spark_home//from w  w  w . ja  va2s .  co  m
 * @param yarn_home
 * @param spark_master
 * @param main_clazz
 * @param context_signature
 * @param main_jar
 * @param other_jars
 * @param spark_job_options
 * @param spark_system_options
 */
public static ProcessBuilder createSparkJob(final String job_name, final String spark_home,
        final String yarn_home, final String spark_master, final Optional<String> maybe_main_clazz,
        final String context_signature, final Optional<String> test_signature, final String main_jar_or_py,
        final Collection<String> other_jars, final Collection<String> other_files,
        final Collection<String> other_lang_files, final List<String> external_jars,
        final List<String> external_files, final List<String> external_lang_files,
        final Optional<Map<String, Object>> spark_generic_options, final Map<String, String> spark_job_options,
        final Map<String, String> spark_system_options

) {
    //https://spark.apache.org/docs/1.2.0/submitting-applications.html

    final List<String> command_line = ImmutableList.<String>builder().add(SBT_SUBMIT_BINARY).add("--name")
            .add(job_name)
            .addAll(maybe_main_clazz.map(main_clazz -> Arrays.asList("--class", main_clazz))
                    .orElse(Collections.emptyList()))
            .add("--master").add(spark_master).add("--jars")
            .add(Stream.concat(other_jars.stream(), external_jars.stream()).collect(Collectors.joining(",")))
            .addAll(Optional
                    .of(Stream.concat(other_files.stream(), external_files.stream())
                            .collect(Collectors.joining(",")))
                    .filter(s -> !s.isEmpty()).map(s -> Arrays.asList("--files", s))
                    .orElse(Collections.emptyList()))
            //TODO (ALEPH-63): handle R in the example below
            .addAll(Optional
                    .of(Stream.concat(other_lang_files.stream(), external_lang_files.stream())
                            .collect(Collectors.joining(",")))
                    .filter(s -> !s.isEmpty()).map(s -> Arrays.asList("--py-files", s))
                    .orElse(Collections.emptyList()))
            .addAll(Optional.ofNullable(System.getProperty("hdp.version")).map(hdp_version -> { // Set HDP version from whatever I'm set to
                return (List<String>) ImmutableList.<String>of("--conf",
                        "spark.executor.extraJavaOptions=-Dhdp.version=" + hdp_version, "--conf",
                        "spark.driver.extraJavaOptions=-Dhdp.version=" + hdp_version, "--conf",
                        "spark.yarn.am.extraJavaOption=-Dhdp.version=" + hdp_version);
            }).orElse(Collections.emptyList()))
            .addAll(spark_job_options.isEmpty() ? Collections.emptyList()
                    : spark_job_options.entrySet().stream()
                            .flatMap(kv -> Stream.of("--conf", kv.getKey() + "=" + kv.getValue()))
                            .collect(Collectors.toList()))
            .addAll(spark_system_options.entrySet().stream()
                    .flatMap(kv -> Stream.of(kv.getKey(), kv.getValue())).collect(Collectors.toList()))
            .addAll(spark_generic_options.map(opts -> Arrays.asList("--conf",
                    SparkTopologyConfigBean.JOB_CONFIG_KEY + "="
                            + BeanTemplateUtils.configureMapper(Optional.empty()).convertValue(opts,
                                    JsonNode.class)))
                    .orElse(Collections.emptyList()))
            .add(main_jar_or_py).add(context_signature)
            .addAll(test_signature.map(ts -> Arrays.asList(ts)).orElse(Collections.emptyList())).build();

    final ProcessBuilder pb = new ProcessBuilder();

    final Map<String, String> mutable_env = pb.environment();
    mutable_env.put("HADOOP_CONF_DIR", yarn_home);

    return pb.directory(new File(spark_home)).command(command_line);
}

From source file:com.esri.geoportal.commons.agp.client.AgpClient.java

/**
 * Adds item.//from  w  w  w .ja v  a2s.  c  om
 * @param owner owner
 * @param folderId folder id (optional)
 * @param title title
 * @param description description
 * @param text text
 * @param thumbnailUrl thumbnail url
 * @param itemType item type (must be a URL type)
 * @param extent extent
 * @param typeKeywords type keywords
 * @param tags tags tags
 * @param token token
 * @return add item response
 * @throws URISyntaxException if invalid URL
 * @throws IOException if operation fails
 */
public ItemResponse addItem(String owner, String folderId, String title, String description, String text,
        URL thumbnailUrl, ItemType itemType, Double[] extent, String[] typeKeywords, String[] tags,
        String token) throws IOException, URISyntaxException {
    URIBuilder builder = new URIBuilder(addItemUri(owner, StringUtils.trimToNull(folderId)));

    HttpPost req = new HttpPost(builder.build());
    HashMap<String, String> params = new HashMap<>();
    params.put("f", "json");
    params.put("title", title);
    params.put("description", description);
    params.put("type", itemType.getTypeName());
    params.put("text", text);
    if (thumbnailUrl != null) {
        params.put("thumbnailurl", thumbnailUrl.toExternalForm());
    }
    if (extent != null && extent.length == 4) {
        params.put("extent",
                Arrays.asList(extent).stream().map(Object::toString).collect(Collectors.joining(",")));
    }
    if (typeKeywords != null) {
        params.put("typeKeywords", Arrays.asList(typeKeywords).stream().collect(Collectors.joining(",")));
    }
    if (tags != null) {
        params.put("tags", Arrays.asList(tags).stream().collect(Collectors.joining(",")));
    }
    params.put("token", token);

    req.setEntity(createEntity(params));

    return execute(req, ItemResponse.class);
}

From source file:com.sinet.gage.delta.DomainUpdatesImporter.java

/**
 * Performs the delta updates for Domains
 * /*  w w  w.ja  v  a2s  . c o  m*/
 * @param domainIdsToProcessUpdates
 */
public void processUpdatesForDomains(Set<Integer> domainIdsToProcessUpdates) {
    Set<Integer> domainIdsForUpdate;
    List<DomainResponse> domainResponseList;
    List<Domain> domainList = new ArrayList<>();

    for (Integer domainId : domainIdsToProcessUpdates) {

        domainIdsForUpdate = signalManager.getDomainIdsForDeltaImportsForADomain(domainId,
                serverSettings.getEntityChangedSignalTypeCode());

        if (!CollectionUtils.isEmpty(domainIdsForUpdate)) {
            log.debug("Domain Ids to be updated: " + domainIdsForUpdate);
            String idsString = domainIdsForUpdate.stream().map(Object::toString)
                    .collect(Collectors.joining(", "));
            domainResponseList = getDomainsWithData(domainId, idsString);

            for (DomainResponse domainResponse : domainResponseList) {

                DomainResponse parentDomainResponse = dlapDomainClient.getDomain(null,
                        Long.parseLong(domainId.toString()));

                Domain domain = mapToDomain(domainResponse);
                if (domainResponse.getData() != null && domainResponse.getData().getCustomization() != null
                        && domainResponse.getData().getCustomization().getEdivatelearn() != null) {
                    if ("DISTRICT".equalsIgnoreCase(
                            domainResponse.getData().getCustomization().getEdivatelearn().getDomaintype())) {
                        domain.setStateDomainId(Long.parseLong(parentDomainResponse.getId()));
                        domain.setStateDomainName(parentDomainResponse.getName());
                        domain.setParentDomainName(parentDomainResponse.getName());
                    } else if ("SCHOOL".equalsIgnoreCase(
                            domainResponse.getData().getCustomization().getEdivatelearn().getDomaintype())) {
                        DomainResponse stateDomainResponse = dlapDomainClient.getDomain(null,
                                Long.parseLong(parentDomainResponse.getParentid()));
                        domain.setStateDomainId(Long.parseLong(stateDomainResponse.getId()));
                        domain.setStateDomainName(stateDomainResponse.getName());
                        domain.setParentDomainName(parentDomainResponse.getName());
                    }
                } else {
                    domain.setStateDomainId(Long.parseLong(parentDomainResponse.getId()));
                    domain.setStateDomainName(parentDomainResponse.getName());
                    domain.setParentDomainName(parentDomainResponse.getName());
                }
                domainList.add(domain);
            }
            log.debug("Updating domain ids: " + domainId);
            if (!CollectionUtils.isEmpty(domainList)) {
                domainsRepository.updateDomains(domainList);
            }
        }
    }
}

From source file:com.qcadoo.mes.cmmsMachineParts.states.MaintenanceEventStateValidationService.java

private void checkWorkerTimesDeviation(Entity event, StateChangeContext stateChangeContext) {

    Optional<Integer> progressTime = getProgressTime(event);
    Optional<BigDecimal> possibleDeviationPercent = getPossibleDeviationFromParameters();
    if (!progressTime.isPresent() || !possibleDeviationPercent.isPresent()
            || MaintenanceEventType.from(event).compareTo(MaintenanceEventType.PROPOSAL) == 0) {
        return;//from   www  .j  a v a2 s.c om
    }
    Integer possibleDeviation = calculatePossibleDeviation(progressTime.get(), possibleDeviationPercent.get());
    Map<Entity, Integer> groupedWorkTimes = getGroupedStaffWorkTimes(event);
    List<String> workersWithIncorrectTime = Lists.newArrayList();
    for (Map.Entry<Entity, Integer> entry : groupedWorkTimes.entrySet()) {
        Integer diff = entry.getValue() - progressTime.get();
        if (diff > possibleDeviation) {
            workersWithIncorrectTime.add(entry.getKey().getStringField(StaffFields.NAME) + " "
                    + entry.getKey().getStringField(StaffFields.SURNAME));
        }
    }
    if (!workersWithIncorrectTime.isEmpty()) {
        stateChangeContext.addMessage("cmmsMachineParts.maintenanceEvent.state.tooLongWorkersTime",
                StateMessageType.INFO, false,
                workersWithIncorrectTime.stream().collect(Collectors.joining(", ")));
    }

}