Example usage for org.apache.commons.io FileUtils writeLines

List of usage examples for org.apache.commons.io FileUtils writeLines

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils writeLines.

Prototype

public static void writeLines(File file, Collection lines) throws IOException 

Source Link

Document

Writes the toString() value of each item in a collection to the specified File line by line.

Usage

From source file:io.seqware.pipeline.plugins.WorkflowSchedulerTest.java

@Test
public void testLeftToRightOverrideByIniFiles() throws IOException {
    String[] iniFileContents1 = { "min_qual_score=30", "min_percent_bases=90", "cat=dog" };
    String[] iniFileContents2 = { "min_qual_score=40", "min_percent_bases=100" };
    String[] iniFileContents3 = { "min_qual_score=50" };
    File ini1 = File.createTempFile("ini", "ini");
    File ini2 = File.createTempFile("ini", "ini");
    File ini3 = File.createTempFile("ini", "ini");
    ini1.deleteOnExit();//  w  w  w .  jav a2s .  co m
    ini2.deleteOnExit();
    ini3.deleteOnExit();
    FileUtils.writeLines(ini1, Arrays.asList(iniFileContents1));
    FileUtils.writeLines(ini2, Arrays.asList(iniFileContents2));
    FileUtils.writeLines(ini3, Arrays.asList(iniFileContents3));

    launchPlugin("--workflow-accession", "2861", "--host", FileTools.getLocalhost(null).hostname, "--ini-files",
            ini1.getAbsolutePath() + "," + ini2.getAbsolutePath() + "," + ini3.getAbsolutePath());

    String s = getOut();
    String firstWorkflowRun = getAndCheckSwid(s);

    BasicTestDatabaseCreator dbCreator = new BasicTestDatabaseCreator();
    List<Object[]> runQuery = dbCreator.runQuery(new ArrayListHandler(),
            "select r.status, r.workflow_id, r.ini_file from workflow_run r\n" + "WHERE \n"
                    + "r.sw_accession = ?\n" + "; ",
            Integer.valueOf(firstWorkflowRun));
    Assert.assertTrue(
            "schedule workflow is incorrect " + runQuery.get(0)[0].toString() + " "
                    + runQuery.get(0)[1].toString(),
            runQuery.get(0)[0].equals(WorkflowRunStatus.submitted.toString()) && runQuery.get(0)[1].equals(16));
    WorkflowRun workflowRun = metadata.getWorkflowRun(Integer.valueOf(firstWorkflowRun));
    // check that default keys are present
    Map<String, String> baseMap = MapTools.iniString2Map(workflowRun.getIniFile());
    Assert.assertTrue("overridden map is missing variables",
            baseMap.containsKey("min_qual_score") && baseMap.containsKey("inputs_read_1")
                    && baseMap.containsKey("inputs_read_2") && baseMap.containsKey("cat")
                    && baseMap.containsKey("output_prefix") && baseMap.containsKey("output_dir")
                    && baseMap.containsKey("min_percent_bases"));
    Assert.assertTrue("overridden map has incorrect values",
            baseMap.get("min_qual_score").equals("50") && baseMap.get("cat").equals("dog")
                    && baseMap.get("min_percent_bases").equals("100")
                    && baseMap.get("output_dir").equals("results"));
}

From source file:com.tesora.dve.tools.DVEAnalyzerCLITest.java

private String getTempFile(final String name, final List<String> lines) throws IOException {
    final File tempFile = File.createTempFile("PEDBAnalyzerTest_" + name, ".tmp");

    if (lines != null) {
        FileUtils.writeLines(tempFile, lines);
    }//  w w  w .  j a v  a  2  s  . co m

    return tempFile.getCanonicalPath();
}

From source file:com.daphne.es.maintain.staticresource.web.controller.StaticResourceVersionController.java

private String versionedStaticResourceContent(String fileRealPath, String content, String newVersion)
        throws IOException {

    content = StringEscapeUtils.unescapeXml(content);
    if (newVersion != null && newVersion.equals("1")) {
        newVersion = "?" + newVersion;
    }//www.  j  av  a  2  s . c  o  m

    File file = new File(fileRealPath);

    List<String> contents = FileUtils.readLines(file);

    for (int i = 0, l = contents.size(); i < l; i++) {
        String fileContent = contents.get(i);
        if (content.equals(fileContent)) {
            Matcher matcher = scriptPattern.matcher(content);
            if (!matcher.matches()) {
                matcher = linkPattern.matcher(content);
            }
            if (newVersion == null) { //
                content = matcher.replaceAll("$1$2$5");
            } else {
                content = matcher.replaceAll("$1$2$3" + newVersion + "$5");
            }
            contents.set(i, content);
            break;
        }
    }
    FileUtils.writeLines(file, contents);

    return content;
}

From source file:es.uvigo.ei.sing.adops.operations.running.ExecuteExperimentBySteps.java

private void replaceOGaps(File inputFile, LinesFilter filter) throws IOException {
    FileUtils.writeLines(inputFile,
            Utils.replaceNames(Collections.singletonMap("o", "-"), FileUtils.readLines(inputFile), filter));
}

From source file:de.baumann.hhsmoodle.activities.Activity_count.java

private void writeItemsCount() {
    try {/*  ww w  .j av a 2 s  .c o  m*/
        FileUtils.writeLines(newFileCount(), itemsCount);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:de.unisb.cs.st.javalanche.mutation.runtime.testDriver.MutationTestDriver.java

/**
 * Runs the tests without applying any changes.And executes each test
 * multiple times and in a different order. This method is used to check if
 * the driver works correctly.//from ww  w.j a v a  2s .  c o  m
 */
private void runPermutedTests() {
    logger.info("Running permuted tests for project " + configuration.getProjectPrefix());
    addListenersFromProperty();
    List<String> allTests = new ArrayList<String>(getAllTests());
    timeout = Integer.MAX_VALUE;
    List<SingleTestResult> allFailingTests = new ArrayList<SingleTestResult>();
    coldRun(allTests);
    testsStart();
    int permutations = configuration.getTestPermutations();
    for (int i = 0; i < permutations; i++) {
        logger.info("Shuffling tests. Round " + (i + 1));
        Collections.shuffle(allTests);
        List<SingleTestResult> failingTests = runNormalTests(allTests);
        allFailingTests.addAll(failingTests);
    }
    testsEnd();
    if (allFailingTests.size() == 0) {
        String message = "All " + allTests.size() + " tests passed for " + permutations + " permutations.";
        System.out.println(message);
        logger.info(message);
    } else {
        logger.warn("Not all tests passed");
        Set<String> failingTests = new HashSet<String>();
        for (SingleTestResult str : allFailingTests) {
            String testCaseName = str.getTestMessage().getTestCaseName();
            logger.warn("Test Failed: " + testCaseName + ": " + str.getTestMessage());
            failingTests.add(testCaseName);
        }
        try {
            FileUtils.writeLines(new File(configuration.getOutputDir(), "/failing-tests-permuted.txt"),
                    failingTests);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
}

From source file:de.baumann.hhsmoodle.activities.Activity_count.java

private void writeItemsTitle() {
    try {/*from   w  w  w .j  ava 2 s. c  o  m*/
        FileUtils.writeLines(newFileTitle(), itemsTitle);
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:io.druid.indexer.IndexGeneratorJobTest.java

@Before
public void setUp() throws Exception {
    mapper = HadoopDruidIndexerConfig.jsonMapper;
    mapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed"));
    mapper.registerSubtypes(new NamedType(SingleDimensionShardSpec.class, "single"));

    dataFile = temporaryFolder.newFile();
    tmpDir = temporaryFolder.newFolder();

    HashMap<String, Object> inputSpec = new HashMap<String, Object>();
    inputSpec.put("paths", dataFile.getCanonicalPath());
    inputSpec.put("type", "static");
    if (inputFormatName != null) {
        inputSpec.put("inputFormat", inputFormatName);
    }/*from  ww w  .ja va 2 s  . c  om*/

    if (SequenceFileInputFormat.class.getName().equals(inputFormatName)) {
        writeDataToLocalSequenceFile(dataFile, data);
    } else {
        FileUtils.writeLines(dataFile, data);
    }

    config = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(
            new DataSchema("website", mapper.convertValue(inputRowParser, Map.class),
                    new AggregatorFactory[] { new LongSumAggregatorFactory("visited_num", "visited_num"),
                            new HyperUniquesAggregatorFactory("unique_hosts", "host") },
                    new UniformGranularitySpec(Granularity.DAY, QueryGranularity.NONE,
                            ImmutableList.of(this.interval)),
                    mapper),
            new HadoopIOConfig(ImmutableMap.copyOf(inputSpec), null, tmpDir.getCanonicalPath()),
            new HadoopTuningConfig(tmpDir.getCanonicalPath(), null, null, null, null, null, false, false, false,
                    false, ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
                    false, false, false, null, null, useCombiner)));

    config.setShardSpecs(loadShardSpecs(partitionType, shardInfoForEachSegment));
    config = HadoopDruidIndexerConfig.fromSpec(config.getSchema());
}

From source file:es.uvigo.ei.sing.adops.operations.running.ExecuteExperimentBySteps.java

private void replaceSequenceNames(File inputFile, File outputFile, boolean isTree) throws IOException {
    final Map<String, String> names = this.experiment.getNames();

    if (isTree) {
        for (Map.Entry<String, String> name : names.entrySet()) {
            name.setValue(name.getValue().replaceAll("[():,]", "_"));
        }//from w  w  w  .  j a  va 2  s  .  c  om
    }

    final List<String> lines = FileUtils.readLines(inputFile);

    FileUtils.writeLines(outputFile, Utils.replaceNames(names, lines));
}

From source file:it.drwolf.ridire.index.cwb.scripts.VRTFilesBuilder.java

public void createVRTFile(String posFileName, StrTokenizer strTokenizer, CrawledResource cr, File destDir) {
    File posFile = new File(posFileName);
    if (posFile.exists() && posFile.canRead()) {
        try {/*ww w. j  a  va 2  s.  com*/
            List<String> posFileLines = FileUtils.readLines(posFile);
            if (this.haveStrangeChars(posFileLines)) {
                this.log.warn("File with strange chars {0}", posFileName);
                return;
            }
            List<String> newLines = new ArrayList<String>();
            for (String l : posFileLines) {
                strTokenizer.reset(l);
                String[] tokens = strTokenizer.getTokenArray();
                if (tokens.length != 3) {
                    System.err.println("File: " + posFileName + " Stringa malformed: " + l);
                    continue;
                }
                String nl = tokens[0] + "\t";
                nl += tokens[1].replaceAll(":", "") + "\t";
                nl += this.getEasyPos(tokens[1]).replaceAll(":", "") + "\t";
                nl += tokens[2];
                newLines.add(nl);
            }
            String functionalMetadatum = cr.getFunctionalMetadatum() != null
                    ? cr.getFunctionalMetadatum().getDescription()
                    : "";
            String semanticMetadatum = cr.getSemanticMetadatum() != null
                    ? cr.getSemanticMetadatum().getDescription()
                    : "";
            String url = cr.getUrl();
            if (url == null) {
                url = "";
            }
            String header = this.getHeaderFromResource(cr.getJob().getName(), functionalMetadatum,
                    semanticMetadatum, url, posFile);
            newLines.add(0, header);
            newLines.add("</text>");
            File vrtFile = new File(destDir, cr.getDigest() + ".vrt");
            FileUtils.writeLines(vrtFile, newLines);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    } else {
        System.err.println("Warning - File " + posFileName + " doesn't exist.");
    }
}