Example usage for java.lang System lineSeparator

List of usage examples for java.lang System lineSeparator

Introduction

In this page you can find the example usage for java.lang System lineSeparator.

Prototype

String lineSeparator

To view the source code for java.lang System lineSeparator.

Click Source Link

Usage

From source file:org.apache.openaz.xacml.admin.view.windows.PDPStatusWindow.java

protected void initialize() {
    ///*  w  ww. j a v  a 2  s.co  m*/
    // Setup the table - real simple
    //
    this.table.addContainerProperty("Property", Label.class, null);
    this.table.addContainerProperty("Value", TextArea.class, null);
    //
    // Set the status
    //
    StringBuilder builder;
    int rows;
    Integer id = 1;
    this.table.addItem(
            new Object[] { new Label("Status"), this.createTextArea(this.status.getStatus().toString(), 1) },
            id++);
    //
    // Setup Errors
    //
    builder = new StringBuilder();
    rows = 0;
    for (String error : this.status.getLoadErrors()) {
        builder.append(error);
        builder.append(System.lineSeparator());
        rows++;
    }
    if (rows == 0) {
        rows = 1;
    }
    this.table.addItem(new Object[] { new Label("Errors"), this.createTextArea(builder.toString(), rows) },
            id++);
    //
    // Setup Errors
    //
    builder = new StringBuilder();
    rows = 0;
    for (String error : this.status.getLoadWarnings()) {
        builder.append(error);
        builder.append(System.lineSeparator());
        rows++;
    }
    if (rows == 0) {
        rows = 1;
    }
    this.table.addItem(new Object[] { new Label("Warnings"), this.createTextArea(builder.toString(), rows) },
            id++);
    //
    // Setup the loaded policy table
    //
    builder = new StringBuilder();
    rows = 0;
    for (PDPPolicy policy : this.status.getLoadedPolicies()) {
        builder.append(policy.getName());
        builder.append(System.lineSeparator());
        rows++;
    }
    if (rows == 0) {
        rows = 1;
    }
    this.table.addItem(
            new Object[] { new Label("Policies Loaded"), this.createTextArea(builder.toString(), rows) }, id++);
    //
    // Setup the Failed policy table
    //
    builder = new StringBuilder();
    rows = 0;
    for (PDPPolicy policy : this.status.getFailedPolicies()) {
        builder.append(policy.getName());
        builder.append(System.lineSeparator());
        rows++;
    }
    if (rows == 0) {
        rows = 1;
    }
    this.table.addItem(new Object[] { new Label("Policies Failed To Load"),
            this.createTextArea(builder.toString(), rows) }, id++);
    //
    // Setup the Loaded PIP configuration table
    //
    builder = new StringBuilder();
    rows = 0;
    for (PDPPIPConfig config : this.status.getLoadedPipConfigs()) {
        builder.append(config.getName());
        builder.append(System.lineSeparator());
        rows++;
    }
    if (rows == 0) {
        rows = 1;
    }
    this.table.addItem(new Object[] { new Label("Loaded PIP Configurations"),
            this.createTextArea(builder.toString(), rows) }, id++);
    //
    // Setup the Failed PIP configuration table
    //
    builder = new StringBuilder();
    rows = 0;
    for (PDPPIPConfig config : this.status.getFailedPipConfigs()) {
        builder.append(config.getName());
        builder.append(System.lineSeparator());
        rows++;
    }
    if (rows == 0) {
        rows = 1;
    }
    this.table.addItem(new Object[] { new Label("Failed PIP Configurations"),
            this.createTextArea(builder.toString(), rows) }, id++);
    //
    // Set the length
    //
    table.setPageLength(id - 1);
    table.setReadOnly(true);
    table.setSizeFull();
}

From source file:it.sonarlint.cli.tools.SonarlintCli.java

public String[] getOutLines() {
    return getOut().split(System.lineSeparator());
}

From source file:de.static_interface.sinklibrary.api.command.SinkCommandOptions.java

public HelpFormatter getCliHelpFormatter(Writer writer) {
    if (cliHelpFormatter == null) {
        cliHelpFormatter = new HelpFormatter();
        cliHelpFormatter.setNewLine(System.lineSeparator());
        cliHelpFormatter.printHelp(new PrintWriter(writer), HelpFormatter.DEFAULT_WIDTH,
                getCmdLineSyntax().replaceAll("\\{ALIAS\\}", parentCommand.getCmdAlias())
                        .replaceAll("\\{PREFIX\\}", parentCommand.getCommandPrefix()),
                null, getCliOptions(), HelpFormatter.DEFAULT_LEFT_PAD, HelpFormatter.DEFAULT_DESC_PAD, null);
    }//  w  ww.ja  v  a 2  s.  c  o m
    return cliHelpFormatter;
}

From source file:com.hp.mqm.atrf.core.configuration.CliParser.java

public CliParser() {
    options.addOption(Option.builder(HELP_OPTION).longOpt(HELP_OPTION_LONG).desc("Show this help").build());
    options.addOption(Option.builder(VERSION_OPTION).longOpt(VERSION_OPTION_LONG)
            .desc("Show version of this tool").build());

    options.addOption(Option.builder(OUTPUT_FILE_OPTION).longOpt(OUTPUT_FILE_OPTION_LONG).desc(
            "Write output to file instead of sending it to ALM Octane. File path is optional. Default file name is '"
                    + DEFAULT_OUTPUT_FILE + "'." + System.lineSeparator()
                    + " When saving to a file, the tool saves first 1000 runs." + System.lineSeparator()
                    + "No ALM Octane URL or authentication configuration is required if you use this option.")
            .hasArg().argName("FILE").optionalArg(true).build());
    options.addOption(Option.builder(CONFIG_FILE_OPTION).longOpt(CONFIG_FILE_OPTION_LONG)
            .desc("Configuration file location. Default configuration file name is '" + DEFAULT_CONF_FILE + "'")
            .hasArg().argName("FILE").build());

    OptionGroup passAlmGroup = new OptionGroup();
    passAlmGroup.addOption(Option.builder(PASSWORD_ALM_OPTION).longOpt(PASSWORD_ALM_OPTION_LONG)
            .desc("Password for ALM user to use for retrieving test results").hasArg().argName("PASSWORD")
            .build());//from   w w w . j a  v  a2  s .  c  om
    passAlmGroup.addOption(Option.builder(PASSWORD_ALM_FILE_OPTION).longOpt(PASSWORD_ALM_FILE_OPTION_LONG)
            .desc("Location of file with password for ALM user").hasArg().argName("FILE").build());
    options.addOptionGroup(passAlmGroup);

    OptionGroup passOctaneGroup = new OptionGroup();
    passOctaneGroup.addOption(Option.builder(PASSWORD_OCTANE_OPTION).longOpt(PASSWORD_OCTANE_OPTION_LONG)
            .desc("Password for ALM Octane user").hasArg().argName("PASSWORD").optionalArg(true).build());
    passOctaneGroup.addOption(Option.builder(PASSWORD_OCTANE_FILE_OPTION)
            .longOpt(PASSWORD_OCTANE_FILE_OPTION_LONG)
            .desc("Location of file with password for ALM Octane user").hasArg().argName("FILE").build());
    options.addOptionGroup(passOctaneGroup);

    options.addOption(Option.builder(RUN_FILTER_ID_OPTION).longOpt(RUN_FILTER_ID_OPTION_LONG)
            .desc("Filter the ALM test results to retrieve only test runs with this run ID or higher").hasArg()
            .argName("ID").build());
    options.addOption(Option.builder(RUN_FILTER_DATE_OPTION).longOpt(RUN_FILTER_DATE_OPTION_LONG)
            .desc("Filter the ALM test results to retrieve only test runs from this date or later").hasArg()
            .argName("YYYY-MM-DD").build());

    options.addOption(Option.builder(RUN_FILTER_LIMIT_OPTION).longOpt(RUN_FILTER_LIMIT_OPTION_LONG)
            .desc("Limit number of ALM runs to retrieve ").hasArg().argName("NUMBER").build());

    argsWithSingleOccurrence.addAll(Arrays.asList(OUTPUT_FILE_OPTION, CONFIG_FILE_OPTION, PASSWORD_ALM_OPTION,
            PASSWORD_ALM_FILE_OPTION, PASSWORD_OCTANE_OPTION, PASSWORD_OCTANE_FILE_OPTION, RUN_FILTER_ID_OPTION,
            RUN_FILTER_DATE_OPTION, RUN_FILTER_LIMIT_OPTION));

}

From source file:de.static_interface.sinkscripts.scriptengine.scriptcommand.ScriptCommandBase.java

public HelpFormatter getCliHelpFormatter(Writer writer) {
    if (cliHelpFormatter == null) {
        cliHelpFormatter = new HelpFormatter();
        cliHelpFormatter.setNewLine(System.lineSeparator());
        cliHelpFormatter.printHelp(new PrintWriter(writer), HelpFormatter.DEFAULT_WIDTH,
                getSyntax().replaceAll("\\{COMMAND\\}", getName()), null, options,
                HelpFormatter.DEFAULT_LEFT_PAD, HelpFormatter.DEFAULT_DESC_PAD, null);
    }// w  ww .j av  a  2  s . co  m
    return cliHelpFormatter;
}

From source file:jmupen.MyListSelectionListener.java

private void removeLines(int removeLine, File text) throws IOException {
    List<String> textLines = FileUtils.readLines(text, StandardCharsets.UTF_8);

    textLines.remove(removeLine);//from w  w  w.j ava  2s.com

    StringBuilder builder = new StringBuilder();

    for (String line : textLines) {
        builder.append(line).append(System.lineSeparator());
    }

    FileUtils.writeStringToFile(text, builder.toString());

}

From source file:com.googlecode.jmxtrans.model.output.FileWriter.java

@Override
protected void internalWrite(Server server, Query query, ImmutableList<Result> results) throws Exception {
    try (PrintWriter outputTempPrintWriter = new PrintWriter(this.outputTempFile, "UTF-8")) {
        List<String> typeNames = this.getTypeNames();

        for (Result result : results) {
            log.debug(result.toString());

            for (Map.Entry<String, Object> values : result.getValues().entrySet()) {
                outputTempPrintWriter.printf(lineFormat + System.lineSeparator(),
                        KeyUtils.getKeyString(query, result, values, typeNames), values.getValue());
            }/* ww  w . j ava 2  s. c  om*/
        }
    }
    assert (this.outputTempFile.renameTo(this.outputFile));
}

From source file:com.netflix.genie.web.jobs.workflow.impl.JobTask.java

/**
 * {@inheritDoc}//  w w w . j av a2s  .c om
 */
@Override
public void executeTask(@NotNull final Map<String, Object> context) throws GenieException, IOException {
    final long start = System.nanoTime();
    final Set<Tag> tags = Sets.newHashSet();
    try {
        final JobExecutionEnvironment jobExecEnv = (JobExecutionEnvironment) context
                .get(JobConstants.JOB_EXECUTION_ENV_KEY);
        final String jobWorkingDirectory = jobExecEnv.getJobWorkingDir().getCanonicalPath();
        final Writer writer = (Writer) context.get(JobConstants.WRITER_KEY);
        final String jobId = jobExecEnv.getJobRequest().getId()
                .orElseThrow(() -> new GeniePreconditionException("No job id found. Unable to continue"));
        log.info("Starting Job Task for job {}", jobId);

        final Optional<String> setupFile = jobExecEnv.getJobRequest().getSetupFile();
        if (setupFile.isPresent()) {
            final String jobSetupFile = setupFile.get();
            if (StringUtils.isNotBlank(jobSetupFile)) {
                final String localPath = jobWorkingDirectory + JobConstants.FILE_PATH_DELIMITER + jobSetupFile
                        .substring(jobSetupFile.lastIndexOf(JobConstants.FILE_PATH_DELIMITER) + 1);

                fts.getFile(jobSetupFile, localPath);

                writer.write("# Sourcing setup file specified in job request" + System.lineSeparator());
                writer.write(
                        JobConstants.SOURCE
                                + localPath.replace(jobWorkingDirectory,
                                        "${" + JobConstants.GENIE_JOB_DIR_ENV_VAR + "}")
                                + System.lineSeparator());

                // Append new line
                writer.write(System.lineSeparator());
            }
        }

        // Iterate over and get all configs and dependencies
        final Collection<String> configsAndDependencies = Sets.newHashSet();
        configsAndDependencies.addAll(jobExecEnv.getJobRequest().getDependencies());
        configsAndDependencies.addAll(jobExecEnv.getJobRequest().getConfigs());
        for (final String dependentFile : configsAndDependencies) {
            if (StringUtils.isNotBlank(dependentFile)) {
                final String localPath = jobWorkingDirectory + JobConstants.FILE_PATH_DELIMITER + dependentFile
                        .substring(dependentFile.lastIndexOf(JobConstants.FILE_PATH_DELIMITER) + 1);

                fts.getFile(dependentFile, localPath);
            }
        }

        // Copy down the attachments if any to the current working directory
        this.attachmentService.copy(jobId, jobExecEnv.getJobWorkingDir());
        // Delete the files from the attachment service to save space on disk
        this.attachmentService.delete(jobId);

        // Print out the current Envrionment to a env file before running the command.
        writer.write("# Dump the environment to a env.log file" + System.lineSeparator());
        writer.write("env | sort > " + "${" + JobConstants.GENIE_JOB_DIR_ENV_VAR + "}"
                + JobConstants.GENIE_ENV_PATH + System.lineSeparator());

        // Append new line
        writer.write(System.lineSeparator());

        writer.write("# Kick off the command in background mode and wait for it using its pid"
                + System.lineSeparator());

        writer.write(StringUtils.join(jobExecEnv.getCommand().getExecutable(), StringUtils.SPACE)
                + JobConstants.WHITE_SPACE + jobExecEnv.getJobRequest().getCommandArgs().orElse(EMPTY_STRING)
                + JobConstants.STDOUT_REDIRECT + "${" + JobConstants.GENIE_JOB_DIR_ENV_VAR + "}/"
                + JobConstants.STDOUT_LOG_FILE_NAME + JobConstants.STDERR_REDIRECT + "${"
                + JobConstants.GENIE_JOB_DIR_ENV_VAR + "}/" + JobConstants.STDERR_LOG_FILE_NAME + " &"
                + System.lineSeparator());

        // Save PID of children process, used in trap handlers to kill and verify termination
        writer.write(JobConstants.EXPORT + JobConstants.CHILDREN_PID_ENV_VAR + "=$!" + System.lineSeparator());
        // Wait for the above process started in background mode. Wait lets us get interrupted by kill signals.
        writer.write("wait ${" + JobConstants.CHILDREN_PID_ENV_VAR + "}" + System.lineSeparator());

        // Append new line
        writer.write(System.lineSeparator());

        // capture exit code and write to temporary genie.done
        writer.write("# Write the return code from the command in the done file." + System.lineSeparator());
        writer.write(JobConstants.GENIE_DONE_FILE_CONTENT_PREFIX + "${" + JobConstants.GENIE_JOB_DIR_ENV_VAR
                + "}" + "/" + JobConstants.GENIE_TEMPORARY_DONE_FILE_NAME + System.lineSeparator());

        // atomically swap temporary and actual genie.done file if one doesn't exist
        writer.write(
                "# Swapping done file, unless one exist created by trap handler." + System.lineSeparator());
        writer.write("mv -n " + "${" + JobConstants.GENIE_JOB_DIR_ENV_VAR + "}" + "/"
                + JobConstants.GENIE_TEMPORARY_DONE_FILE_NAME + " " + "${" + JobConstants.GENIE_JOB_DIR_ENV_VAR
                + "}" + "/" + JobConstants.GENIE_DONE_FILE_NAME + System.lineSeparator());

        // Print the timestamp once its done running.
        writer.write("echo End: `date '+%Y-%m-%d %H:%M:%S'`\n");

        log.info("Finished Job Task for job {}", jobId);
        MetricsUtils.addSuccessTags(tags);
    } catch (final Throwable t) {
        MetricsUtils.addFailureTagsWithException(tags, t);
        throw t;
    } finally {
        this.getRegistry().timer(JOB_TASK_TIMER_NAME, tags).record(System.nanoTime() - start,
                TimeUnit.NANOSECONDS);
    }
}

From source file:com.pearson.eidetic.driver.threads.subthreads.SnapshotVolumeSyncValidator.java

@Override
public void run() {
    isFinished_ = false;//from   w w  w.  j a  v a2 s . co m
    AmazonEC2Client ec2Client = connect(region_, awsAccessKeyId_, awsSecretKey_);

    for (Volume vol : VolumeSyncValidate_) {
        try {

            JSONParser parser = new JSONParser();

            String inttagvalue = getIntTagValue(vol);
            if (inttagvalue == null) {
                continue;
            }

            JSONObject eideticParameters;
            try {
                Object obj = parser.parse(inttagvalue);
                eideticParameters = (JSONObject) obj;
            } catch (Exception e) {
                logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_
                        + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId()
                        + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                        + StackTrace.getStringFromStackTrace(e) + "\"");
                continue;
            }
            if (eideticParameters == null) {
                continue;
            }

            //Same
            Integer keep = getKeep(eideticParameters, vol);
            if (keep == null) {
                continue;
            }

            JSONObject syncSnapshot = null;
            if (eideticParameters.containsKey("SyncSnapshot")) {
                syncSnapshot = (JSONObject) eideticParameters.get("SyncSnapshot");
            }
            if (syncSnapshot == null) {
                logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_
                        + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId()
                        + "\"");
                continue;
            }

            JSONObject validateParameters;
            try {
                validateParameters = (JSONObject) syncSnapshot.get("Validate");
            } catch (Exception e) {
                logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_
                        + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId()
                        + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                        + StackTrace.getStringFromStackTrace(e) + "\"");
                continue;
            }

            Integer createAfter = getCreateAfter(validateParameters, vol);

            String cluster = getCluster(validateParameters, vol);

            if (validateCluster_.containsKey(cluster)) {
                validateCluster_.get(cluster).add(new MutableTriple(vol, createAfter, keep));
            } else {
                validateCluster_.put(cluster, new ArrayList<MutableTriple<Volume, Integer, Integer>>());
                validateCluster_.get(cluster).add(new MutableTriple(vol, createAfter, keep));
            }

        } catch (Exception e) {
            logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event=\"Error\", Error=\"error in SnapshotVolumeSync workflow\", stacktrace=\""
                    + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\"");
        }

    }

    for (String cluster : validateCluster_.keySet()) {

        try {
            ArrayList<MutableTriple<Volume, Integer, Integer>> myList = validateCluster_.get(cluster);

            Boolean snapshotCluster = false;
            Integer min = Integer.MAX_VALUE;
            for (MutableTriple trip : myList) {
                if (((Integer) trip.getMiddle()) < min) {
                    min = (Integer) trip.getMiddle();
                }
            }

            for (MutableTriple trip : myList) {
                if (snapshotDecision(ec2Client, (Volume) trip.getLeft(), min)) {
                    snapshotCluster = true;
                }
            }

            if (snapshotCluster) {
                ArrayList<Volume> vols = new ArrayList<>();
                for (MutableTriple trip : myList) {
                    vols.add((Volume) trip.getLeft());
                }

                SnapshotVolumeSync thread = new SnapshotVolumeSync(awsAccessKeyId_, awsSecretKey_,
                        uniqueAwsAccountIdentifier_, maxApiRequestsPerSecond_,
                        ApplicationConfiguration.getAwsCallRetryAttempts(), region_, vols, true);

                try {
                    thread.run();
                } catch (Exception e) {
                    String responseMessage = "Error running cluster validator thread for cluster " + cluster;
                    logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_ + "\",Error=\""
                            + responseMessage + "\"" + e.toString() + System.lineSeparator()
                            + StackTrace.getStringFromStackTrace(e) + "\"");
                }

                Integer wait = 0;
                while (!(thread.isFinished()) && (wait <= 200)) {
                    Threads.sleepMilliseconds(250);
                    //break after 50 seconds
                    wait = wait + 1;
                }

            }

        } catch (Exception e) {
            logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event=\"Error\", Error=\"error in SnapshotVolumeSync workflow\", stacktrace=\""
                    + e.toString() + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\"");
        }

    }

    ec2Client.shutdown();
    isFinished_ = true;
}

From source file:com.gargoylesoftware.htmlunit.TestCaseTest.java

private void checkLines(final String relativePath, final String line, final List<String> lines,
        final String elementName, final List<String> allElements) {
    final List<String> allExpectedLines = new ArrayList<>();
    for (final String element : allElements) {
        allExpectedLines.add(line.replace(elementName, element));
    }/* w w w  . j a v  a2  s.c  o m*/
    allExpectedLines.removeAll(lines);
    if (!allExpectedLines.isEmpty()) {
        fail("You must specify the following line in " + relativePath + ":\n"
                + StringUtils.join(allExpectedLines, System.lineSeparator()));
    }
}