Example usage for org.apache.commons.io FileUtils writeLines

List of usage examples for org.apache.commons.io FileUtils writeLines

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils writeLines.

Prototype

public static void writeLines(File file, Collection lines) throws IOException 

Source Link

Document

Writes the toString() value of each item in a collection to the specified File line by line.

Usage

From source file:org.datavec.api.records.reader.impl.LineReaderTest.java

@Test
public void testLineReader() throws Exception {
    String tempDir = System.getProperty("java.io.tmpdir");
    File tmpdir = new File(tempDir, "tmpdir-testLineReader");
    if (tmpdir.exists())
        tmpdir.delete();//from  w ww.j  a v a 2s  .com
    tmpdir.mkdir();

    File tmp1 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp1.txt"));
    File tmp2 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp2.txt"));
    File tmp3 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp3.txt"));

    FileUtils.writeLines(tmp1, Arrays.asList("1", "2", "3"));
    FileUtils.writeLines(tmp2, Arrays.asList("4", "5", "6"));
    FileUtils.writeLines(tmp3, Arrays.asList("7", "8", "9"));

    InputSplit split = new FileSplit(tmpdir);

    RecordReader reader = new LineRecordReader();
    reader.initialize(split);

    int count = 0;
    List<List<Writable>> list = new ArrayList<>();
    while (reader.hasNext()) {
        List<Writable> l = reader.next();
        assertEquals(1, l.size());
        list.add(l);
        count++;
    }

    assertEquals(9, count);

    try {
        FileUtils.deleteDirectory(tmpdir);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.datavec.api.records.reader.impl.LineReaderTest.java

@Test
public void testLineReaderMetaData() throws Exception {
    String tempDir = System.getProperty("java.io.tmpdir");
    File tmpdir = new File(tempDir, "tmpdir-testLineReader");
    if (tmpdir.exists())
        tmpdir.delete();/*  w ww  .  j  av  a 2s.c  o  m*/
    tmpdir.mkdir();

    File tmp1 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp1.txt"));
    File tmp2 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp2.txt"));
    File tmp3 = new File(FilenameUtils.concat(tmpdir.getPath(), "tmp3.txt"));

    FileUtils.writeLines(tmp1, Arrays.asList("1", "2", "3"));
    FileUtils.writeLines(tmp2, Arrays.asList("4", "5", "6"));
    FileUtils.writeLines(tmp3, Arrays.asList("7", "8", "9"));

    InputSplit split = new FileSplit(tmpdir);

    RecordReader reader = new LineRecordReader();
    reader.initialize(split);

    List<List<Writable>> list = new ArrayList<>();
    while (reader.hasNext()) {
        list.add(reader.next());
    }
    assertEquals(9, list.size());

    List<List<Writable>> out2 = new ArrayList<>();
    List<Record> out3 = new ArrayList<>();
    List<RecordMetaData> meta = new ArrayList<>();
    reader.reset();
    int count = 0;
    while (reader.hasNext()) {
        Record r = reader.nextRecord();
        out2.add(r.getRecord());
        out3.add(r);
        meta.add(r.getMetaData());
        int fileIdx = count / 3 + 1;
        String uri = r.getMetaData().getURI().toString();
        assertTrue(uri.endsWith("tmp" + fileIdx + ".txt"));
        count++;
    }

    assertEquals(list, out2);

    List<Record> fromMeta = reader.loadFromMetaData(meta);
    assertEquals(out3, fromMeta);

    //try: second line of second and third files only...
    List<RecordMetaData> subsetMeta = new ArrayList<>();
    subsetMeta.add(meta.get(4));
    subsetMeta.add(meta.get(7));
    List<Record> subset = reader.loadFromMetaData(subsetMeta);
    assertEquals(2, subset.size());
    assertEquals(out3.get(4), subset.get(0));
    assertEquals(out3.get(7), subset.get(1));

    try {
        FileUtils.deleteDirectory(tmpdir);
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.datavec.spark.transform.utils.SparkExport.java

public static void exportCSVLocal(File outputFile, String delimiter, JavaRDD<List<Writable>> data, int rngSeed)
        throws Exception {

    JavaRDD<String> lines = data.map(new WritablesToStringFunction(delimiter));
    List<String> linesList = lines.collect(); //Requires all data in memory
    if (!(linesList instanceof ArrayList))
        linesList = new ArrayList<>(linesList);
    Collections.shuffle(linesList, new Random(rngSeed));

    FileUtils.writeLines(outputFile, linesList);
}

From source file:org.datavec.spark.transform.utils.SparkExport.java

public static void exportCSVLocal(String outputDir, String baseFileName, int numFiles, String delimiter,
        JavaRDD<List<Writable>> data, int rngSeed) throws Exception {

    JavaRDD<String> lines = data.map(new WritablesToStringFunction(delimiter));
    double[] split = new double[numFiles];
    for (int i = 0; i < split.length; i++)
        split[i] = 1.0 / numFiles;/*from  w  ww .  ja v a  2 s . co  m*/
    JavaRDD<String>[] splitData = lines.randomSplit(split);

    int count = 0;
    Random r = new Random(rngSeed);
    for (JavaRDD<String> subset : splitData) {
        String path = FilenameUtils.concat(outputDir, baseFileName + (count++) + ".csv");
        List<String> linesList = subset.collect();
        if (!(linesList instanceof ArrayList))
            linesList = new ArrayList<>(linesList);
        Collections.shuffle(linesList, r);
        FileUtils.writeLines(new File(path), linesList);
    }
}

From source file:org.datavec.spark.transform.utils.SparkExport.java

public static void exportCSVLocal(String outputDir, String baseFileName, int numFiles, String delimiter,
        JavaRDD<List<Writable>> data) throws Exception {

    JavaRDD<String> lines = data.map(new WritablesToStringFunction(delimiter));
    double[] split = new double[numFiles];
    for (int i = 0; i < split.length; i++)
        split[i] = 1.0 / numFiles;//from   w  w w  .  j a  v a  2s  . c  o  m
    JavaRDD<String>[] splitData = lines.randomSplit(split);

    int count = 0;
    for (JavaRDD<String> subset : splitData) {
        String path = FilenameUtils.concat(outputDir, baseFileName + (count++) + ".csv");
        //            subset.saveAsTextFile(path);
        List<String> linesList = subset.collect();
        FileUtils.writeLines(new File(path), linesList);
    }
}

From source file:org.datavec.spark.transform.utils.SparkExport.java

public static void exportStringLocal(File outputFile, JavaRDD<String> data, int rngSeed) throws Exception {
    List<String> linesList = data.collect(); //Requires all data in memory
    if (!(linesList instanceof ArrayList))
        linesList = new ArrayList<>(linesList);
    Collections.shuffle(linesList, new Random(rngSeed));

    FileUtils.writeLines(outputFile, linesList);
}

From source file:org.deeplearning4j.plot.NeuralNetPlotter.java

private static String loadIntoTmp() {
    setupDirectory(dataFilePath);/*from  w w w.ja v a2 s. c  o  m*/
    setupDirectory(graphFilePath);
    printDataFilePath();
    printGraphFilePath();

    File plotPath = new File(graphPath, "plot.py");
    plotPath.deleteOnExit();
    if (!plotPath.exists()) {
        try {
            List<String> lines = IOUtils.readLines(script.getInputStream());
            FileUtils.writeLines(plotPath, lines);

        } catch (IOException e) {
            throw new IllegalStateException("Unable to load python file");

        }
    }
    return plotPath.getAbsolutePath();
}

From source file:org.deeplearning4j.text.sentenceiterator.SentenceIteratorTest.java

@Before
public void before() throws Exception {
    File test = new File("dir");
    test.mkdir();//from  w  w w  . j  a  va 2 s. c  o m
    File testFile = new File(test, "test.txt");
    FileUtils.writeLines(testFile, Arrays.asList("Hello", "My", "Name"));

    File multiDir = new File("multidir");
    for (int i = 0; i < 2; i++) {
        File newTestFile = new File(multiDir, "testfile-" + i);
        FileUtils.writeLines(newTestFile, Arrays.asList("Sentence 1.", "Sentence 2.", "Sentence 3."));

    }

}

From source file:org.deeplearning4j.util.StringGrid.java

public void writeLinesTo(String path) throws IOException {
    FileUtils.writeLines(new File(path), toLines());
}

From source file:org.dkpro.tc.core.task.uima.ExtractFeaturesConnector.java

private void writeFeatureNames() throws AnalysisEngineProcessException {
    try {//from   www  . j  av a 2  s  . c  o  m
        FileUtils.writeLines(new File(outputDirectory, Constants.FILENAME_FEATURES),
                featureStore.getFeatureNames());
    } catch (IOException e) {
        throw new AnalysisEngineProcessException(e);
    }
}