Example usage for org.apache.commons.io FileUtils readLines

List of usage examples for org.apache.commons.io FileUtils readLines

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils readLines.

Prototype

public static List readLines(File file) throws IOException 

Source Link

Document

Reads the contents of a file line by line to a List of Strings using the default encoding for the VM.

Usage

From source file:de.unidue.ltl.flextag.core.reports.adapter.cv.CvAbstractAvgKnownUnknownAccuracyReport.java

private List<List<String>> getFoldersOfSingleRuns(File attributesTXT) throws Exception {
    List<String> readLines = FileUtils.readLines(attributesTXT);

    int idx = 0;/*  ww w. ja v a  2s.c  o  m*/
    for (String line : readLines) {
        if (line.startsWith("Subtask")) {
            break;
        }
        idx++;
    }
    String line = readLines.get(idx);
    int start = line.indexOf("[") + 1;
    int end = line.indexOf("]");
    String subTasks = line.substring(start, end);

    String[] tasks = subTasks.split(",");

    List<List<String>> results = new ArrayList<>();

    List<String> t = new ArrayList<>();
    for (String task : tasks) {
        task = task.trim();
        if (TcTaskTypeUtil.isMachineLearningAdapterTask(getContext().getStorageService(), task)
                || TcTaskTypeUtil.isFeatureExtractionTrainTask(getContext().getStorageService(), task)
                || TcTaskTypeUtil.isFeatureExtractionTestTask(getContext().getStorageService(), task)) {
            t.add(task.trim());
        }
        if (t.size() == 3) {
            results.add(t);
            t = new ArrayList<>();
        }
    }

    return results;
}

From source file:gov.nih.nci.firebird.selenium2.pages.sponsor.representative.protocol.ExportProtocolsTabHelper.java

private List<String> getCsvFileContents() throws IOException {
    File csvFile = tab.clickExportButton();
    return FileUtils.readLines(csvFile);
}

From source file:de.tudarmstadt.ukp.dkpro.tc.core.task.uima.ExtractFeaturesConnector.java

@Override
public void collectionProcessComplete() throws AnalysisEngineProcessException {
    super.collectionProcessComplete();

    // apply filters that influence the whole feature store
    // filters are applied in the order that they appear as parameters
    for (String filterString : featureFilters) {
        FeatureStoreFilter filter;/*from   ww w.  ja  v  a  2 s.  c  o  m*/
        try {
            filter = (FeatureStoreFilter) Class.forName(filterString).newInstance();
        } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
            throw new AnalysisEngineProcessException(e);
        }

        if (filter.isApplicableForTraining() && !isTesting || filter.isApplicableForTesting() && isTesting) {
            filter.applyFilter(featureStore);
        }
    }

    // write feature names file if in training mode
    if (!isTesting) {
        try {
            FileUtils.writeLines(new File(outputDirectory, Constants.FILENAME_FEATURES),
                    featureStore.getFeatureNames());
        } catch (IOException e) {
            throw new AnalysisEngineProcessException(e);
        }
    }
    // apply the feature names filter
    else {
        File featureNamesFile = new File(outputDirectory, Constants.FILENAME_FEATURES);
        TreeSet<String> trainFeatureNames;
        try {
            trainFeatureNames = new TreeSet<>(FileUtils.readLines(featureNamesFile));
        } catch (IOException e) {
            throw new AnalysisEngineProcessException(e);
        }

        AdaptTestToTrainingFeaturesFilter filter = new AdaptTestToTrainingFeaturesFilter();
        // if feature space from training set and test set differs, apply the filter
        // to keep only features seen during training
        if (!trainFeatureNames.equals(featureStore.getFeatureNames())) {
            filter.setFeatureNames(trainFeatureNames);
            filter.applyFilter(featureStore);
        }
    }

    // FIXME if the feature store now determines whether to use dense or sparse instances, 
    // we might get rid of the corresponding parameter here
    // addInstanceId requires dense instances
    try {
        DataWriter writer = (DataWriter) Class.forName(dataWriterClass).newInstance();
        writer.write(outputDirectory, featureStore, true, learningMode, applyWeighting);
    } catch (Exception e) {
        throw new AnalysisEngineProcessException(e);
    }
}

From source file:edu.cornell.med.icb.goby.alignments.TestExportableAlignmentEntryData.java

private Int2ObjectMap<Map<String, String>> readSamFileToMap(final RandomAccessSequenceCache genome,
        final String filename) throws IOException {
    final List<String> lines = FileUtils.readLines(new File(filename));
    final Int2ObjectMap<Map<String, String>> result = new Int2ObjectArrayMap<Map<String, String>>();
    for (final String line : lines) {
        if (line.startsWith("@")) {
            continue;
        }//from   w  w  w.j av  a2 s. com
        final String[] parts = line.split("\t");
        final String target = parts[2];
        if ("*".equals(target)) {
            continue;
        }
        final int queryIndex = Integer.valueOf(parts[0]);
        final Map<String, String> entry = new HashMap<String, String>();
        result.put(queryIndex, entry);
        entry.put("pairFlags", parts[1]);
        entry.put("targetIndex", String.valueOf(genome.getReferenceIndex(target)));
        entry.put("position", parts[3]);
        entry.put("mapq", parts[4]);
        entry.put("cigar", parts[5]);
        entry.put("read", parts[9]);
        for (int i = 10; i < parts.length; i++) {
            if (parts[i].startsWith("MD:Z")) {
                entry.put("mismatches", SamHelper.canonicalMdz(parts[i].substring(5)));
                break;
            }
        }
    }
    return result;
}

From source file:de.tudarmstadt.ukp.dkpro.core.mallet.lda.io.MalletLdaTopicProportionsWriterTest.java

@Test
public void testMultipleTargetsNoDocids() throws IOException, UIMAException {
    File targetDir = testContext.getTestOutputFolder();
    File expectedFile0 = new File(targetDir, "dummy1.txt.topics");
    File expectedFile1 = new File(targetDir, "dummy2.txt.topics");
    File modelFile = new File(testContext.getTestOutputFolder(), "model");
    MalletLdaUtil.trainModel(modelFile);

    int expectedLines = 1;
    String expectedLine0Regex = "(0\\.[0-9]{4}\\t){9}0\\.[0-9]{4}";
    String expectedLine1Regex = "(0\\.[0-9]{4}\\t){9}0\\.[0-9]{4}";

    CollectionReaderDescription reader = createReaderDescription(TextReader.class,
            TextReader.PARAM_SOURCE_LOCATION, MalletLdaUtil.CAS_DIR, TextReader.PARAM_PATTERNS,
            MalletLdaUtil.CAS_FILE_PATTERN, TextReader.PARAM_LANGUAGE, MalletLdaUtil.LANGUAGE);
    AnalysisEngineDescription segmenter = createEngineDescription(BreakIteratorSegmenter.class);

    AnalysisEngineDescription inferencer = createEngineDescription(MalletLdaTopicModelInferencer.class,
            MalletLdaTopicModelInferencer.PARAM_MODEL_LOCATION, modelFile);

    AnalysisEngineDescription writer = createEngineDescription(MalletLdaTopicProportionsWriter.class,
            MalletLdaTopicProportionsWriter.PARAM_TARGET_LOCATION, targetDir,
            MalletLdaTopicProportionsWriter.PARAM_OVERWRITE, true,
            MalletLdaTopicProportionsWriter.PARAM_SINGULAR_TARGET, false,
            MalletLdaTopicProportionsWriter.PARAM_WRITE_DOCID, false);

    SimplePipeline.runPipeline(reader, segmenter, inferencer, writer);

    assertTrue(expectedFile0.exists());/*from  w  ww. jav a2 s.  c o m*/
    assertTrue(expectedFile1.exists());

    List<String> lines = FileUtils.readLines(expectedFile0);
    assertTrue(lines.get(0).matches(expectedLine0Regex));
    assertEquals(expectedLines, lines.size());
    lines = FileUtils.readLines(expectedFile1);
    assertEquals(expectedLines, lines.size());
    assertTrue(lines.get(0).matches(expectedLine1Regex));
}

From source file:de.nbi.ontology.test.OntologyMatchTest.java

/**
 * Test, if terms are properly match to concept labels. The a list of terms
 * contains a term in each line./* w  w w .  j a  v a 2  s  .c  o  m*/
 * 
 * @param inFile
 *            a list of terms
 * @throws IOException
 */
@SuppressWarnings("unchecked")
@Test(dataProviderClass = TestFileProvider.class, dataProvider = "similarTestFiles", groups = { "functest" })
public void similar(File inFile) throws IOException {
    log.info("Processing " + inFile.getName());
    String basename = FilenameUtils.removeExtension(inFile.getAbsolutePath());
    File outFile = new File(basename + ".out");
    File resFile = new File(basename + ".res");

    List<String> terms = FileUtils.readLines(inFile);
    PrintWriter w = new PrintWriter(new FileWriter(outFile));
    for (String term : terms) {
        log.trace("** matching " + term);
        w.println(index.getSimilarMatches(term));
    }
    w.flush();
    w.close();

    Assert.assertTrue(FileUtils.contentEquals(outFile, resFile));
}

From source file:de.tudarmstadt.ukp.dkpro.core.io.ditop.DiTopWriterTest.java

@Test
public void testCollectionValuesExact() throws UIMAException, IOException {
    int expectedNDocuments = 2;
    String exactName = new File(CAS_DIR).toURI().toString();
    String[] collectionValues = new String[] { exactName };
    boolean exactMatch = true;

    CollectionReaderDescription reader = createReaderDescription(TextReader.class,
            TextReader.PARAM_SOURCE_LOCATION, CAS_DIR, TextReader.PARAM_PATTERNS, CAS_FILE_PATTERN,
            TextReader.PARAM_LANGUAGE, LANGUAGE);
    AnalysisEngineDescription segmenter = createEngineDescription(BreakIteratorSegmenter.class);
    AnalysisEngineDescription inferencer = createEngineDescription(MalletTopicModelInferencer.class,
            MalletTopicModelInferencer.PARAM_MODEL_LOCATION, MODEL_FILE);
    AnalysisEngineDescription ditopwriter = createEngineDescription(DiTopWriter.class,
            DiTopWriter.PARAM_TARGET_LOCATION, TARGET_DITOP, DiTopWriter.PARAM_MODEL_LOCATION, MODEL_FILE,
            DiTopWriter.PARAM_CORPUS_NAME, DITOP_CORPUSNAME, DiTopWriter.PARAM_COLLECTION_VALUES,
            collectionValues, DiTopWriter.PARAM_COLLECTION_VALUES_EXACT_MATCH, exactMatch);

    SimplePipeline.runPipeline(reader, segmenter, inferencer, ditopwriter);

    /* test whether target files and dirs exist */
    File contentDir = new File(TARGET_DITOP, DITOP_CORPUSNAME + "_" + N_TOPICS);
    File topicsFile = new File(contentDir, "topics.csv");
    File topicTermT15File = new File(contentDir, "topicTerm-T15.txt");
    File topicTermFile = new File(contentDir, "topicTerm.txt");
    File topicTermMatrixFile = new File(contentDir, "topicTermMatrix.txt");

    assertTrue(new File(TARGET_DITOP, "config.all").exists());
    assertTrue(contentDir.isDirectory());
    assertTrue(topicTermT15File.exists());
    assertTrue(topicTermFile.exists());/*from w w w .  j a  va2s .  c  o m*/
    assertTrue(topicTermMatrixFile.exists());
    assertTrue(topicsFile.exists());

    /* check that file lengths are correct */
    assertEquals(expectedNDocuments + 1, FileUtils.readLines(topicsFile).size());
    assertEquals(N_TOPICS, FileUtils.readLines(topicTermT15File).size());
    assertEquals(N_TOPICS, FileUtils.readLines(topicTermFile).size());
    assertEquals(N_TOPICS, FileUtils.readLines(topicTermMatrixFile).size());
}

From source file:edu.isi.pfindr.learn.util.PairsFileIO.java

public static void generatePairsFromStringAndFileContentWithNoClass(String userInput, String inputFilePath,
        String outputFilePath) {//from w  w w .j a  va2 s . c o m

    List<String> phenotypeList = new ArrayList<String>();

    try {
        phenotypeList = FileUtils.readLines(new File(inputFilePath));
    } catch (IOException ioe) {
        ioe.printStackTrace();
    }
    String[] phenotype2;
    StringBuffer outputBuffer = new StringBuffer();

    BufferedWriter bw = null;
    try {
        bw = new BufferedWriter(new FileWriter(outputFilePath));
        for (int j = 0; j < phenotypeList.size(); j++) {
            phenotype2 = phenotypeList.get(j).split("\t");

            outputBuffer.append(String.format("%s\t%s\t%d\n", userInput, phenotype2[3], 0));
            bw.append(outputBuffer.toString());
            outputBuffer.setLength(0);
        }
    } catch (IOException io) {
        try {
            if (bw != null) {
                bw.close();
                bw = null;
            }
            io.printStackTrace();
        } catch (IOException e) {
            System.out.println("Problem occured while closing output stream  " + bw);
            e.printStackTrace();
        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        try {
            if (bw != null) {
                bw.close();
                bw = null;
            }
        } catch (IOException e) {
            System.out.println("Problem occured while closing output stream  " + bw);
            e.printStackTrace();
        }
    }
}

From source file:it.drwolf.ridire.index.cwb.scripts.VRTFilesBuilder.java

@Asynchronous
public void buildFilesFromFiles(VRTFilesBuilderData vrtFilesBuilderData) {
    String origDir = vrtFilesBuilderData.getOrigDir();
    Collection<File> files = FileUtils.listFiles(new File(origDir), new String[] { "pos" }, true);
    System.out.println("Files to be processed: " + files.size());
    File destDir = new File(vrtFilesBuilderData.getDestDir());
    destDir.mkdir();/*from   www  .ja v a  2s. c om*/
    int i = 0;
    for (File f : files) {
        ++i;
        List<String> lines = null;
        try {
            lines = FileUtils.readLines(f);
            List<String> newLines = new ArrayList<String>();
            for (String l : lines) {
                newLines.add(l.replaceAll(":", ""));
            }
            String header = this.getHeaderFromFile(i, f);
            newLines.add(0, header);
            newLines.add("</text>");
            File vrtFile = new File(destDir, FilenameUtils.getBaseName(f.getName()) + ".vrt");
            FileUtils.writeLines(vrtFile, newLines);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        if (i % 100 == 0) {
            System.out.println("Processed files: " + i);
        }
    }
}

From source file:FinalProject.Employee_Login.java

private void Submit_ButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_Submit_ButtonActionPerformed
    String Password = Arrays.toString(Password_Input.getPassword());
    String password = Password.replaceAll("\\[", "").replaceAll("\\]", "").replaceAll("\\, ", "");
    String username = Employee_Input.getText();

    try {/*from   ww  w  . j a  v  a 2  s. c o m*/
        File user = new File("Username.txt");
        File pass = new File("Password.txt");
        Scanner scanner = new Scanner(user);
        FileReader frU = new FileReader(user);
        LineNumberReader u = new LineNumberReader(frU);
        FileReader frP = new FileReader(pass);
        LineNumberReader p = new LineNumberReader(frP);

        int linenumberU = 0;
        while (scanner.hasNextLine() && u.readLine() != null) {
            linenumberU++;
            String lineFromFile = scanner.nextLine();

            if (lineFromFile.contains(username)) // a match!
            {
                break;
            }
        }

        String pssLine = (String) FileUtils.readLines(pass).get(linenumberU - 1);
        String usrLine = (String) FileUtils.readLines(user).get(linenumberU - 1);

        if (username.equals(usrLine) && password.equals(pssLine)) {
            this.setVisible(false);
            Employee_Interface f = new Employee_Interface();
            f.setVisible(true);
            f.ID_Number.setText(usrLine + "!");
        }

        else {
            Error_Message.setVisible(true);
        }

    } catch (FileNotFoundException ex) {
    } catch (IOException ex) {
    }
}