Example usage for java.lang String join

List of usage examples for java.lang String join

Introduction

In this page you can find the example usage for java.lang String join.

Prototype

public static String join(CharSequence delimiter, Iterable<? extends CharSequence> elements) 

Source Link

Document

Returns a new String composed of copies of the CharSequence elements joined together with a copy of the specified delimiter .

Usage

From source file:com.formkiq.core.service.workflow.WorkflowEditorServiceImplTest.java

/**
 * testEventIdformfields01()./*from   w  ww  .  j  av a  2s  .  c om*/
 * @throws Exception Exception
 */
@Test
public void testEventIdformfields01() throws Exception {
    // given
    String formUUID = UUID.randomUUID().toString();
    String param81 = "Test[" + formUUID + "]";
    FormJSON testform = TestDataBuilder.createSimpleForm();

    // when
    expect(this.flow.getData()).andReturn(this.mockarchive);
    expect(this.request.getParameter("81")).andReturn(param81);
    expect(this.mockarchive.getForm(formUUID)).andReturn(testform);

    replayAll();
    FormJSONField field = this.ws.eventIdformfields(this.flow, this.request, null);

    // verify
    verifyAll();

    assertEquals("Total ($)[1]", String.join(",", field.getOptions()));
}

From source file:io.anserini.doc.DataModel.java

public String generateEvalCommand(String collection) {
    Map<String, Object> config = this.collections.get(collection);
    String allCommandsStr = "";
    Set<String> allEvalCommands = new HashSet<>();
    ObjectMapper oMapper = new ObjectMapper();
    List models = oMapper.convertValue((List) safeGet(config, "models"), List.class);
    List topics = oMapper.convertValue((List) safeGet(config, "topics"), List.class);
    List evals = oMapper.convertValue((List) safeGet(config, "evals"), List.class);
    for (Object modelObj : models) {
        Model model = oMapper.convertValue(modelObj, Model.class);
        for (Object topicObj : topics) {
            Topic topic = oMapper.convertValue(topicObj, Topic.class);
            Map<String, Map<String, List<String>>> combinedEvalCmd = new HashMap<>();
            for (Object evalObj : evals) {
                Eval eval = oMapper.convertValue(evalObj, Eval.class);
                String evalCmd = eval.getCommand();
                List evalParams = oMapper.convertValue(eval.getParams(), List.class);
                String evalCmdOption = "";
                if (evalParams != null) {
                    for (Object option : evalParams) {
                        evalCmdOption += " " + option;
                    }//w ww  .jav  a2s .  c  o m
                }
                String evalCmdResidual = "";
                evalCmdResidual += " " + Paths.get((String) safeGet(config, "qrels_root"), topic.getQrel());
                evalCmdResidual += " -output run." + safeGet(config, "name") + "." + model.getName() + "."
                        + topic.getPath();
                evalCmdResidual += "\n";
                if (eval.isCan_combine() || evalCmdOption.isEmpty()) {
                    combinedEvalCmd.putIfAbsent(evalCmd, new HashMap<>());
                    combinedEvalCmd.get(evalCmd).putIfAbsent(evalCmdResidual, new ArrayList<>());
                    combinedEvalCmd.get(evalCmd).get(evalCmdResidual).add(evalCmdOption);
                } else {
                    allCommandsStr += evalCmd + evalCmdOption + evalCmdResidual;
                }
            }
            for (Map.Entry<String, Map<String, List<String>>> entry : combinedEvalCmd.entrySet()) {
                for (Map.Entry<String, List<String>> innerEntry : entry.getValue().entrySet()) {
                    allCommandsStr += entry.getKey() + String.join("", innerEntry.getValue())
                            + innerEntry.getKey();
                }
            }
        }
        allCommandsStr += "\n";
    }

    return allCommandsStr.substring(0, allCommandsStr.lastIndexOf("\n"));
}

From source file:net.fabricmc.loader.FabricLoader.java

protected void checkDependencies() {
    LOGGER.debug("Validating mod dependencies");

    for (ModContainer mod : mods) {
        dependencies: for (Map.Entry<String, ModInfo.Dependency> entry : mod.getInfo().getRequires()
                .entrySet()) {/*w  w w.ja  va 2s  .com*/
            String depId = entry.getKey();
            ModInfo.Dependency dep = entry.getValue();
            for (ModContainer mod2 : mods) {
                if (mod == mod2) {
                    continue;
                }
                if (depId.equalsIgnoreCase(mod2.getInfo().getId()) && dep.satisfiedBy(mod2.getInfo())) {
                    continue dependencies;
                }
            }

            throw new DependencyException(String.format("Mod %s requires %s @ %s", mod.getInfo().getId(), depId,
                    String.join(", ", dep.getVersionMatchers())));
        }

        conflicts: for (Map.Entry<String, ModInfo.Dependency> entry : mod.getInfo().getConflicts().entrySet()) {
            String depId = entry.getKey();
            ModInfo.Dependency dep = entry.getValue();
            for (ModContainer mod2 : mods) {
                if (mod == mod2) {
                    continue;
                }
                if (!depId.equalsIgnoreCase(mod2.getInfo().getId()) || !dep.satisfiedBy(mod2.getInfo())) {
                    continue conflicts;
                }
            }

            throw new DependencyException(String.format("Mod %s conflicts with %s @ %s", mod.getInfo().getId(),
                    depId, String.join(", ", dep.getVersionMatchers())));
        }
    }
}

From source file:com.wx3.galacdecks.Bootstrap.java

private void importAiHints(GameDatastore datastore, String path) throws IOException {
    Files.walk(Paths.get(path)).forEach(filePath -> {
        if (Files.isRegularFile(filePath)) {
            try {
                if (FilenameUtils.getExtension(filePath.getFileName().toString()).toLowerCase().equals("js")) {
                    String id = FilenameUtils.removeExtension(filePath.getFileName().toString());
                    List<String> lines = Files.readAllLines(filePath);
                    String script = String.join("\n", lines);
                    AiHint hint = new AiHint(id, script);
                    //datastore.createAiHint(hint);
                    aiHintCache.put(id, hint);
                    logger.info("Imported hint " + id);
                }/*from  ww  w .jav  a  2 s.c  o m*/
            } catch (Exception e) {
                throw new RuntimeException("Failed to parse " + filePath + ": " + e.getMessage());
            }
        }
    });
}

From source file:com.example.dlp.RiskAnalysis.java

private static void calculateKAnonymity(String projectId, String datasetId, String tableId,
        List<String> quasiIds) throws Exception {
    // [START dlp_k_anonymity]
    /**// w w w  .  j ava2  s  . c  o  m
     * Calculate k-anonymity for quasi-identifiers in a BigQuery table using the DLP API.
     * @param projectId The Google Cloud Platform project ID to run the API call under.
     * @param datasetId The BigQuery dataset to analyze.
     * @param tableId The BigQuery table to analyze.
     * @param quasiIds The names of columns that form a composite key ('quasi-identifiers').
     */

    // instantiate a client
    try (DlpServiceClient dlpServiceClient = DlpServiceClient.create()) {

        // projectId = process.env.GCLOUD_PROJECT;
        // datasetId = 'my_dataset';
        // tableId = 'my_table';
        // quasiIds = [{ columnName: 'age' }, { columnName: 'city' }];

        List<FieldId> quasiIdFields = quasiIds.stream()
                .map(columnName -> FieldId.newBuilder().setColumnName(columnName).build())
                .collect(Collectors.toList());

        KAnonymityConfig kanonymityConfig = KAnonymityConfig.newBuilder().addAllQuasiIds(quasiIdFields).build();

        BigQueryTable bigQueryTable = BigQueryTable.newBuilder().setProjectId(projectId).setDatasetId(datasetId)
                .setTableId(tableId).build();

        PrivacyMetric privacyMetric = PrivacyMetric.newBuilder().setKAnonymityConfig(kanonymityConfig).build();

        AnalyzeDataSourceRiskRequest request = AnalyzeDataSourceRiskRequest.newBuilder()
                .setPrivacyMetric(privacyMetric).setSourceTable(bigQueryTable).build();

        // asynchronously submit a risk analysis operation
        OperationFuture<RiskAnalysisOperationResult, RiskAnalysisOperationMetadata, Operation> responseFuture = dlpServiceClient
                .analyzeDataSourceRiskAsync(request);

        // ...
        // block on response
        RiskAnalysisOperationResult response = responseFuture.get();
        KAnonymityHistogramBucket results = response.getKAnonymityResult()
                .getEquivalenceClassHistogramBuckets(0);

        System.out.println("Bucket size range: [" + results.getEquivalenceClassSizeLowerBound() + ", "
                + results.getEquivalenceClassSizeUpperBound() + "]");

        for (KAnonymityEquivalenceClass bucket : results.getBucketValuesList()) {
            List<String> quasiIdValues = bucket.getQuasiIdsValuesList().stream().map(v -> v.toString())
                    .collect(Collectors.toList());

            System.out.println("\tQuasi-ID values: " + String.join(", ", quasiIdValues));
            System.out.println("\tClass size: " + bucket.getEquivalenceClassSize());
        }
    } catch (Exception e) {
        System.out.println("Error in kAnonymityAnalysis: " + e.getMessage());
    }
    // [END dlp_k_anonymity]
}

From source file:multiplayer.pong.client.LobbyFrame.java

private void pendingRequests() {
    Vector<String> req = daoReq.pendingRequests(SocketHandler.username);
    int count = req.size();
    if (count == 0)
        return;//from w  w w.  j  av  a2s . c o  m
    displayWarning("Vous avez " + count + " demande" + (count != 1 ? "s" : "") + " d'ajout de: "
            + String.join(", ", req) + "\n");
    displayWarning("Utilisez la commande '/accepterAmi [nom]' pour accepter une demande.\n");
}

From source file:com.searchcode.app.jobs.repository.IndexGitRepoJob.java

/**
 * Uses the inbuilt git//w w w. j av a 2s.  c  o  m
 * TODO this method appears to leak memory like crazy... need to investigate
 * TODO lots of hairy bits in here need tests to capture issues
 */
public List<CodeOwner> getBlameInfo(int codeLinesSize, String repoName, String repoLocations, String fileName) {
    List<CodeOwner> codeOwners = new ArrayList<>(codeLinesSize);
    try {
        // The / part is required due to centos bug for version 1.1.1
        // This appears to be correct
        String repoLoc = repoLocations + "/" + repoName + "/.git";

        Repository localRepository = new FileRepository(new File(repoLoc));
        BlameCommand blamer = new BlameCommand(localRepository);

        ObjectId commitID = localRepository.resolve("HEAD");

        if (commitID == null) {
            Singleton.getLogger().info("getBlameInfo commitID is null for " + repoLoc + " " + fileName);
            return codeOwners;
        }

        BlameResult blame;

        // Somewhere in here appears to be wrong...
        blamer.setStartCommit(commitID);
        blamer.setFilePath(fileName);
        blame = blamer.call();

        // Hail mary attempt to solve issue on CentOS Attempt to set at all costs
        if (blame == null) { // This one appears to solve the issue so don't remove it
            String[] split = fileName.split("/");
            blamer.setStartCommit(commitID);
            if (split.length != 1) {
                blamer.setFilePath(String.join("/", Arrays.asList(split).subList(1, split.length)));
            }
            blame = blamer.call();
        }
        if (blame == null) {
            String[] split = fileName.split("/");
            blamer.setStartCommit(commitID);
            if (split.length != 1) {
                blamer.setFilePath("/" + String.join("/", Arrays.asList(split).subList(1, split.length)));
            }
            blame = blamer.call();
        }

        if (blame == null) {
            Singleton.getLogger().info("getBlameInfo blame is null for " + repoLoc + " " + fileName);
        }

        if (blame != null) {
            // Get all the owners their number of commits and most recent commit
            HashMap<String, CodeOwner> owners = new HashMap<>();
            RevCommit commit;
            PersonIdent authorIdent;

            try {
                for (int i = 0; i < codeLinesSize; i++) {
                    commit = blame.getSourceCommit(i);
                    authorIdent = commit.getAuthorIdent();

                    if (owners.containsKey(authorIdent.getName())) {
                        CodeOwner codeOwner = owners.get(authorIdent.getName());
                        codeOwner.incrementLines();

                        int timestamp = codeOwner.getMostRecentUnixCommitTimestamp();

                        if (commit.getCommitTime() > timestamp) {
                            codeOwner.setMostRecentUnixCommitTimestamp(commit.getCommitTime());
                        }
                        owners.put(authorIdent.getName(), codeOwner);
                    } else {
                        owners.put(authorIdent.getName(),
                                new CodeOwner(authorIdent.getName(), 1, commit.getCommitTime()));
                    }
                }
            } catch (IndexOutOfBoundsException ex) {
                // Ignore this as its not really a problem or is it?
                Singleton.getLogger().info(
                        "IndexOutOfBoundsException when trying to get blame for " + repoName + " " + fileName);
            }

            codeOwners = new ArrayList<>(owners.values());
        }

    } catch (IOException ex) {
        Singleton.getLogger().info("IOException getBlameInfo when trying to get blame for " + repoName + " "
                + fileName + " " + ex.toString());
    } catch (GitAPIException ex) {
        Singleton.getLogger().info("GitAPIException getBlameInfo when trying to get blame for " + repoName + " "
                + fileName + " " + ex.toString());
    } catch (IllegalArgumentException ex) {
        Singleton.getLogger().info("IllegalArgumentException getBlameInfo when trying to get blame for "
                + repoName + " " + fileName + " " + ex.toString());
    }

    System.gc(); // Try to clean up
    return codeOwners;
}

From source file:com.sisrni.dao.BecaDao.java

public List<PojoMapaInteractivoBecas> getBecastListToCharts(List<String> paisSelected,
        List<String> tipoBecaSelected, String desde, String hasta) {
    String wherePais = "";
    String whereTipoBeca = "";
    String groupBy = " GROUP BY b.ID_PAIS_DESTINO";
    String limite = "";
    List<String> paisesFinales = new ArrayList<String>();

    if (paisSelected.size() > 0) {
        wherePais = wherePais + " AND pa.ID_PAIS IN (" + String.join(",", paisSelected) + ")";
    } else {//w ww . ja  v  a  2s.  co  m
        limite += " LIMIT 5";
    }

    if (tipoBecaSelected.size() > 0) {
        whereTipoBeca += " AND b.ID_TIPO_BECA IN (" + String.join(",", tipoBecaSelected) + ")";
    }

    String query = "SELECT pa.ID_PAIS idPais,\n" + "  pa.CODIGO_PAIS codigoPais,\n"
            + "  pa.NOMBRE_PAIS nombrePais,\n" + "  COUNT(b.ID_BECA) cantidadBecas,\n"
            + "  SUM(b.MONTO_TOTAL) montoCooperacion\n"
            + "FROM BECA b INNER  JOIN PAIS pa   ON b.ID_PAIS_DESTINO= pa.ID_PAIS\n"
            + "WHERE b.OTORGADA=1 AND b.ANIO_GESTION BETWEEN " + Integer.parseInt(desde) + " AND "
            + Integer.parseInt(hasta) + "\n" + wherePais + whereTipoBeca + groupBy;//
    try {
        Query q = getSessionFactory().getCurrentSession().createSQLQuery(query)
                .addScalar("idPais", new IntegerType()).addScalar("codigoPais", new StringType())
                .addScalar("nombrePais", new StringType()).addScalar("montoCooperacion", new DoubleType())
                .addScalar("cantidadBecas", new IntegerType())
                .setResultTransformer(Transformers.aliasToBean(PojoMapaInteractivoBecas.class));

        List<PojoMapaInteractivoBecas> listPojos = q.list();

        if (listPojos.size() > 0) {
            for (PojoMapaInteractivoBecas pj : listPojos) {
                paisesFinales.add(pj.getIdPais() + "");
            }
            String qt = "SELECT  tb.ID_TIPO_BECA idTipoBeca,\n" + "  tb.NOMBRE_TIPO_BECA nombreTipoBeca,\n"
                    + "  count(b.ID_BECA) cantidad\n" + "from BECA b INNER JOIN TIPO_BECA tb\n"
                    + "ON b.ID_TIPO_BECA = tb.ID_TIPO_BECA\n" + "WHERE b.ANIO_GESTION BETWEEN "
                    + Integer.parseInt(desde) + " AND " + Integer.parseInt(hasta) + "\n"
                    + "  AND b.ID_PAIS_DESTINO IN (" + String.join(",", paisesFinales) + ")\n"
                    + "AND b.ID_TIPO_BECA IN (" + String.join(",", tipoBecaSelected) + ")\n"
                    + "      AND b.OTORGADA=1\n" + "GROUP BY b.ID_TIPO_BECA";
            Query rtp = getSessionFactory().getCurrentSession().createSQLQuery(qt)
                    .addScalar("idTipoBeca", new IntegerType()).addScalar("nombreTipoBeca", new StringType())
                    .addScalar("cantidad", new IntegerType())
                    .setResultTransformer(Transformers.aliasToBean(PojoBecasByTipo.class));
            List<PojoBecasByTipo> listTipos = rtp.list();

            for (PojoMapaInteractivoBecas pj : listPojos) {
                String qp = "SELECT * FROM BECA pr \n" + " WHERE pr.ANIO_GESTION BETWEEN "
                        + Integer.parseInt(desde) + " AND " + Integer.parseInt(hasta) + "\n"
                        + "AND pr.OTORGADA=1 AND pr.ID_PAIS_DESTINO=" + pj.getIdPais();

                //String qp = "from Proyect pr Where pr.idPaisCooperante='" + pj.getCodigoPais() + "' and pr.idTipoProyecto in (" + String.join(",", tipoProyectoSelected) + ") and pr.anioGestion between " + Integer.parseInt(desde) + " AND " + Integer.parseInt(hasta);
                Query r = getSessionFactory().getCurrentSession().createSQLQuery(qp).addEntity(Beca.class);
                pj.setBecastList(r.list());
                pj.setSeries(listTipos);
            }
        }
        return listPojos;
    } catch (Exception e) {
        e.printStackTrace();
    }
    return null;
}

From source file:fr.pilato.elasticsearch.crawler.fs.client.ElasticsearchClient.java

public SearchResponse search(String index, String type, SearchRequest searchRequest) throws IOException {
    logger.debug("search [{}]/[{}], request [{}]", index, type, searchRequest);

    String path = "/";

    if (index != null) {
        path += index + "/";
    }//from w ww  .java 2  s . c  o  m
    if (type != null) {
        path += type + "/";
    }

    path += "_search";

    Map<String, String> params = new HashMap<>();
    if (searchRequest.getQuery() != null) {
        params.put("q", searchRequest.getQuery());
    }
    if (searchRequest.getFields() != null) {
        // If we never set elasticsearch behavior, it's time to do so
        if (FIELDS == null) {
            setElasticsearchBehavior();
        }
        params.put(FIELDS, String.join(",", (CharSequence[]) searchRequest.getFields()));
    }
    if (searchRequest.getSize() != null) {
        params.put("size", searchRequest.getSize().toString());
    }
    Response restResponse = client.performRequest("GET", path, params);
    SearchResponse searchResponse = JsonUtil.deserialize(restResponse, SearchResponse.class);

    logger.trace("search response: {}", searchResponse);
    return searchResponse;
}

From source file:org.shareok.data.lawlibrary.LawLibDataHandlerImpl.java

/**
 * The data provided by the Law Library have some problems:
 *  1. The PDF file names do NOT always match file names listed in the CSV file
 *  2. The file extension names are inconsistent in terms of capitalization
 *  3. Format the existing dates to be ISO-8601 format
 *  4. Remove some columns in the data file as they either have no data or are not recognized by the metadata schema
 *  5. Translate the columns into correct metadata schema
 *  6. When the item has no title, use the dcterms.alternative as dc.title
 *  7. Change the column name File_location to be filename for the SAFBuilder
 * /*from   ww w. j  ava 2  s  .co m*/
 * The solutions:
 *  1. Match the file names in the CSV file with the existing files in the folder
 *  2. All the file names have extension like .pdf
 *  4. Remove the unnecessary columns
 *  5. Convert the normal columns into correct metadata schema
 *  6. Cope the data to the column of dc.title and clear this column
 *  7. Just change it
 *  
 */
private void cleanData() {
    if (null == data) {
        csv.readData();
        data = csv.getData();
    }
    try {
        if (null == pdfFileList || pdfFileList.size() == 0) {
            getPdfFileListFromCleanedOutputPathFiles();
        }

        // Remove the unused columns
        csv.deleteColumnByColumnName(COULUMNS_TO_BE_DELETED);

        // Match the pdf files with the file names in the csv file
        // Only keep the data records that have the PDF files
        // Also update the column names to the metadata schema
        // At the same time, update the date format of the records
        Map<String, String> cleanData = new HashMap<>();
        String newKey = "";
        String value = "";
        int newRecordCount = 0;
        for (int i = 1; i < csv.getRecordCount(); i++) {
            String key = "file_location-" + String.valueOf(i);
            String csvFileName = DocumentProcessorUtil.getFileNameWithoutExtension((String) data.get(key));
            if (csvFileName.toLowerCase().endsWith(".pdf")) {
                csvFileName = csvFileName.replace(".pdf", "");
                csvFileName = csvFileName.replace(".PDF", "");
            } //              837 9004 kim 
            csvFileName += ".pdf";
            // Process the files that have the "-Serial-" pattern when the actual PDF files end with ***A.PDF
            String csvFileNameWithA = matchSerialAFiles(csvFileName);
            data.put("file_location-" + String.valueOf(i), csvFileName);
            if (pdfFileList.contains(csvFileName) || pdfFileList.contains(csvFileNameWithA)) {
                String fileInfo = "";
                if (pdfFileList.contains(csvFileName)) {
                    matchedPdfFileList.add(csvFileName);
                    fileInfo += csvFileName;
                    newRecordCount++;
                }
                if (pdfFileList.contains(csvFileNameWithA)) {
                    matchedPdfFileList.add(csvFileNameWithA);
                    if ("".equals(fileInfo)) {
                        fileInfo += csvFileNameWithA;
                    } else {
                        fileInfo += "||" + csvFileNameWithA;
                    }
                    newRecordCount++;
                }

                for (String column : csv.getFileHeadMapping()) {
                    if (null == column || column.equals("")) {
                        continue;
                    } else {
                        column = column.trim();
                        key = column + "-" + String.valueOf(i);
                        value = (String) data.get(key);
                        //                            if(null == value || "null" == value){
                        //                                System.out.println("null value for paper "+csvFileName+" with column "+column);
                        //                            }
                        String dcTerm = (String) (COLUMN_NAME_MAP_METADATA_SCHEMA.get(column));
                        newKey = dcTerm + "-" + String.valueOf(i);
                        if (column.equals("Document Date")) {
                            value = changeDataFormat(value);
                        } else if (column.contains("Document Title") && value.contains("Document not titled")) {
                            value = (String) data
                                    .get("Official treaty name (title-alternative)-" + String.valueOf(i));
                            data.put("Official treaty name (title-alternative)-" + String.valueOf(i), "");
                        } else if (column.contains("file_location")) {
                            value = fileInfo;
                        }
                        cleanData.put(newKey, value);
                    }

                }
            }
        }
        setData(cleanData);
        csv.setData((HashMap) cleanData);
        String[] newHeadingsArray = COLUMN_NAME_MAP_METADATA_SCHEMA.values()
                .toArray(new String[COLUMN_NAME_MAP_METADATA_SCHEMA.values().size()]);
        csv.setFileHeadMapping(newHeadingsArray);
        csv.setRecordCount(newRecordCount);
        outputCsvFilePath = csv.outputData(outputFilePath + File.separator + "metadata.csv");
        DocumentProcessorUtil.outputStringToFile(String.join("\n", matchedPdfFileList),
                new File(outputFilePath).getPath() + File.separator + "matchedPdfFiles.txt");
        DocumentProcessorUtil.outputStringToFile(String.join("\n", getUnmatchedFileList()),
                new File(outputFilePath).getPath() + File.separator + "unmatchedPdfFiles.txt");
    } catch (Exception ex) {
        logger.error("Cannot clean up the data.", ex);
    }
}