Example usage for org.apache.commons.io FileUtils readLines

List of usage examples for org.apache.commons.io FileUtils readLines

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils readLines.

Prototype

public static List readLines(File file) throws IOException 

Source Link

Document

Reads the contents of a file line by line to a List of Strings using the default encoding for the VM.

Usage

From source file:net.refractions.udig.catalog.internal.wmt.ui.wizard.controls.JGrasstoolsTmsFolderControl.java

@Override
public IService getService() {
    if (txtUrl == null || txtUrl.isDisposed())
        return null;

    String urlText = txtUrl.getText().trim();
    File urlFile = new File(urlText);
    if (!urlFile.exists()) {
        return null;
    }/*from  w  w w  . j a  v  a  2 s  .  c om*/
    String urlString = null;
    String zoomMin = "0";
    String zoomMax = "18";
    List<String> fileLines = null;
    try {
        fileLines = FileUtils.readLines(urlFile);
    } catch (IOException e) {
        e.printStackTrace();
        return null;
    }
    for (String line : fileLines) {
        line = line.trim();
        if (line.length() == 0) {
            continue;
        }

        int split = line.indexOf('=');
        if (split != -1) {
            String value = line.substring(split + 1).trim();
            if (line.startsWith("url")) {
                int indexOfZ = value.indexOf("ZZZ"); //$NON-NLS-1$
                String folderName = value.substring(0, indexOfZ);
                urlString = urlFile.getParent() + "/" + folderName + "/{z}/{x}/{y}.png";
            }
            if (line.startsWith("minzoom")) {
                zoomMin = value;
            }
            if (line.startsWith("maxzoom")) {
                zoomMax = value;
            }
            // if (line.startsWith("center")) {
            // try {
            //                        String[] coord = value.split("\\s+"); //$NON-NLS-1$
            // double x = Double.parseDouble(coord[0]);
            // double y = Double.parseDouble(coord[1]);
            // centerPoint = new GeoPoint(y, x);
            // } catch (NumberFormatException e) {
            // // use default
            // }
            // }
            if (line.startsWith("type")) {
                if (value.toLowerCase().equals("tms")) {
                    isTMS = true;
                }
            }
        }
    }

    URL url = WMTSource.getCustomServerServiceUrl(urlString, zoomMin, zoomMax, isTMS ? "TMS" : null);
    WMTService service = serviceExtension.createService(url, serviceExtension.createParams(url));
    return service;
}

From source file:de.tudarmstadt.ukp.dkpro.tc.svmhmm.random.SVMHMMRandomTestTask.java

@Override
protected void testModel(TaskContext taskContext, File testFile) throws Exception {
    // file to hold prediction results
    File predictionsFile = new File(
            taskContext.getStorageLocation(TEST_TASK_OUTPUT_KEY, StorageService.AccessMode.READWRITE),
            new SVMHMMAdapter()
                    .getFrameworkFilename(TCMachineLearningAdapter.AdapterNameEntries.predictionsFile));

    // number of expected outcomes
    List<String> strings = FileUtils.readLines(testFile);
    int numberOfTestInstances = strings.size();

    PrintWriter pw = new PrintWriter(new FileWriter(predictionsFile.getAbsolutePath()));

    for (int i = 0; i < numberOfTestInstances; i++) {
        pw.println(getRandomOutcome());/*from w w  w.  ja  v  a2 s.co  m*/
    }

    IOUtils.closeQuietly(pw);
}

From source file:com.intuit.tank.service.impl.v1.report.FileReader.java

/**
 * @param f/*from  w  ww  . j  a  v  a  2s .  c om*/
 * @param l
 * @return
 * @throws IOException
 */
@SuppressWarnings({ "unchecked" })
private static long getStartChar(File f, long numLines, long total) throws IOException {
    List<String> lines = FileUtils.readLines(f);
    long count = 0;
    if (lines.size() > numLines) {
        Collections.reverse(lines);
        for (int i = 0; i < numLines; i++) {
            count += lines.get(i).length() + 1;
        }
        count = total - (count + 1);
    }
    return count;
}

From source file:com.gargoylesoftware.htmlunit.TestCaseTest.java

private void generateTestForHtmlElements(final File dir) throws Exception {
    final File[] files = dir.listFiles();
    if (files != null) {
        for (final File file : files) {
            if (file.isDirectory() && !".svn".equals(file.getName())) {
                generateTestForHtmlElements(file);
            } else if (file.getName().endsWith(".java")) {
                final List<String> lines = FileUtils.readLines(file);
                for (final String line : lines) {
                    if (line.contains("(\"xmp\")")) {
                        final String relativePath = file.getAbsolutePath()
                                .substring(new File(".").getAbsolutePath().length() - 1);
                        checkLines(relativePath, line, lines, "xmp", HtmlPageTest.HTML_TAGS_);
                    } else if (line.contains("(\"ClientRect\")")) {
                        final String relativePath = file.getAbsolutePath()
                                .substring(new File(".").getAbsolutePath().length() - 1);
                        checkLines(relativePath, line, lines, "ClientRect", allClassNames_);
                    }//  w w  w .  j a  v  a2  s.c o m
                }
            }
        }
    }
}

From source file:de.tudarmstadt.ukp.similarity.experiments.coling2012.util.CharacterNGramIdfValuesGenerator.java

@SuppressWarnings("unchecked")
public static void computeIdfScores(Dataset dataset, int n) throws Exception {
    File outputFile = new File(UTILS_DIR + "/character-ngrams-idf/" + n + "/" + dataset.toString() + ".txt");

    System.out.println("Computing character " + n + "-grams");

    if (outputFile.exists()) {
        System.out.println(" - skipping, already exists");
    } else {/*from  w w w  .j a  v  a  2  s .  co  m*/
        System.out.println(" - this may take a while...");

        CollectionReader reader = ColingUtils.getCollectionReader(dataset);

        // Tokenization
        AnalysisEngineDescription seg = createPrimitiveDescription(BreakIteratorSegmenter.class);
        AggregateBuilder builder = new AggregateBuilder();
        builder.add(seg, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_1);
        builder.add(seg, CombinationReader.INITIAL_VIEW, CombinationReader.VIEW_2);
        AnalysisEngine aggr_seg = builder.createAggregate();

        // Output Writer
        AnalysisEngine writer = createPrimitive(CharacterNGramIdfValuesGeneratorWriter.class,
                CharacterNGramIdfValuesGeneratorWriter.PARAM_OUTPUT_FILE, outputFile.getAbsolutePath());

        SimplePipeline.runPipeline(reader, aggr_seg, writer);

        // We now have plain text format
        List<String> lines = FileUtils.readLines(outputFile);

        Map<String, Double> idfValues = new HashMap<String, Double>();

        CharacterNGramMeasure measure = new CharacterNGramMeasure(n, new HashMap<String, Double>());

        // Get n-gram representations of texts
        List<Set<String>> docs = new ArrayList<Set<String>>();

        for (String line : lines) {
            Set<String> ngrams = measure.getNGrams(line);

            docs.add(ngrams);
        }

        // Get all ngrams
        Set<String> allNGrams = new HashSet<String>();
        for (Set<String> doc : docs)
            allNGrams.addAll(doc);

        // Compute idf values         
        for (String ngram : allNGrams) {
            double count = 0;
            for (Set<String> doc : docs) {
                if (doc.contains(ngram))
                    count++;
            }
            idfValues.put(ngram, count);
        }

        // Compute the idf
        for (String lemma : idfValues.keySet()) {
            double idf = Math.log10(lines.size() / idfValues.get(lemma));
            idfValues.put(lemma, idf);
        }

        // Store persistently
        StringBuilder sb = new StringBuilder();
        for (String key : idfValues.keySet()) {
            sb.append(key + "\t" + idfValues.get(key) + LF);
        }
        FileUtils.writeStringToFile(outputFile, sb.toString());

        System.out.println(" - done");
    }
}

From source file:es.uvigo.ei.sing.adops.operations.running.mrbayes.MrBayes3_2ProcessManager.java

@Override
public void buildSummary(MrBayesOutput output) throws OperationException {
    try {//from w w  w.ja v a 2s. c om
        FileUtils.moveFile(new File(output.getConFile().getAbsolutePath() + ".tre"), output.getConFile());

        final List<String> lines = FileUtils.readLines(output.getConFile());
        final ListIterator<String> itLines = lines.listIterator();
        while (itLines.hasNext()) {
            final String line = itLines.next();

            if (line.contains("tree con_50_majrule")) {
                final String[] lineSplit = line.split("=");
                final String tree = lineSplit[1].trim();

                itLines.set(lineSplit[0] + "= " + Newick.parse(tree.trim()));
            }
        }

        FileUtils.writeLines(output.getConFile(), lines);

        super.buildSummary(output);
    } catch (Exception e) {
        throw new OperationException("Error while working with consensus tree", e);
    }
}

From source file:com.pddstudio.simplerequestparser.FolderLoader.java

public void parseConfig(boolean printOutput) {
    singleApplicationList.clear();/*from  w  w w.  j  a v  a2s.c  o  m*/
    if (configFile.exists() && configFile.isFile()) {
        try {
            List<String> configList = FileUtils.readLines(configFile);
            if (configList == null) {
                Logger.log(Logger.LogType.ERROR, "configList<> is null!", Logger.ERR);
            } else if (configList.isEmpty()) {
                Logger.log(Logger.LogType.WARNING, "configList<> is empty.");
            } else {
                Logger.log(Logger.LogType.INFO, "loaded " + configList.size() + " lines from config.");
                for (String jsonLine : configList) {
                    Gson gson = new Gson();
                    SingleApplication singleApplication = gson.fromJson(jsonLine, SingleApplication.class);
                    if (singleApplication == null) {
                        Logger.log(Logger.LogType.ERROR, "unable to create POJO object from config line.",
                                Logger.ERR);
                    } else {
                        if (printOutput) {
                            System.out.println();
                            Logger.log(Logger.LogType.INFO,
                                    "Application Name: " + singleApplication.getApplicationName());
                            Logger.log(Logger.LogType.INFO,
                                    "Application PlayStore Url: " + singleApplication.getPlaystoreUrl());
                            Logger.log(Logger.LogType.INFO,
                                    "Application Package: " + singleApplication.getApplicationPackage());
                            Logger.log(Logger.LogType.INFO, "Application Launcher Activity: "
                                    + singleApplication.getApplicationLaunchIntent());
                            Logger.log(Logger.LogType.INFO, "Application Exported Icon Name: "
                                    + singleApplication.getApplicationExportedIconName()
                                    + " || Local Icon-Resource exist: "
                                    + (new File(workDir, singleApplication.getApplicationExportedIconName())
                                            .exists() ? "[FOUND FILE]" : "[NOT FOUND]"));
                            Logger.log(Logger.LogType.INFO,
                                    "Application Hash-Value : " + singleApplication.getApplicationHashValue());
                        }
                        singleApplicationList.add(singleApplication);
                    }
                }
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:edu.cornell.med.icb.ant.TestConcatLogsTask.java

@Test
public void testConcatLogs() throws IOException {

    // delete the file we will be writing
    File output = new File(CONCAT_LOGS_OUTPUT);
    output.delete();//from  w ww.j a  va  2  s .c o  m

    Project project = new Project();
    project.setBasedir(".");

    ConcatLogsTask task = new ConcatLogsTask();
    task.setProject(project);
    task.setOutput(CONCAT_LOGS_OUTPUT);

    FilenameSelector selector = new FilenameSelector();
    selector.setName("*.log");
    FileSet fileset = new FileSet();
    fileset.setDir(new File("test-data"));
    fileset.addFilename(selector);
    task.addFileset(fileset);

    // Write the new log file
    task.execute();

    assertTrue(CONCAT_LOGS_OUTPUT + " should exist.", output.exists());

    List lines = FileUtils.readLines(output);
    assertEquals("Wrong number of lines", 31, lines.size());
}

From source file:com.datatorrent.demos.mroperator.WordCountMRApplicationTest.java

@Test
public void testSomeMethod() throws Exception {
    LocalMode lma = LocalMode.newInstance();
    Configuration conf = new Configuration(false);
    conf.set("dt.application.WordCountDemo.operator.Mapper.dirName", testMeta.testDir);
    conf.setInt("dt.application.WordCountDemo.operator.Mapper.partitionCount", 1);
    conf.set("dt.application.WordCountDemo.operator.Console.filePath", testMeta.testDir);
    conf.set("dt.application.WordCountDemo.operator.Console.outputFileName", "output.txt");
    lma.prepareDAG(new NewWordCountApplication(), conf);
    LocalMode.Controller lc = lma.getController();
    lc.setHeartbeatMonitoringEnabled(false);
    lc.run(5000);// w  w w.  ja  v  a 2s .com
    lc.shutdown();
    List<String> readLines = FileUtils.readLines(new File(testMeta.testDir + "/output.txt"));
    Map<String, Integer> readMap = Maps.newHashMap();
    Iterator<String> itr = readLines.iterator();
    while (itr.hasNext()) {
        String[] splits = itr.next().split("=");
        readMap.put(splits[0], Integer.valueOf(splits[1]));
    }
    Map<String, Integer> expectedMap = Maps.newHashMap();
    expectedMap.put("1", 2);
    expectedMap.put("2", 2);
    expectedMap.put("3", 2);
    Assert.assertEquals("expected reduced data ", expectedMap, readMap);
    LOG.info("read lines {}", readLines);
}

From source file:dk.nsi.haiba.lprimporter.testdata.SQLStatementsFromCSVFiles.java

private void generateAdministrationData() throws IOException {
    File file = FileUtils.toFile(getClass().getClassLoader().getResource("data/T_ADM.csv"));
    boolean first = true;
    List<String> lines = FileUtils.readLines(file);

    for (String line : lines) {
        if (first) {
            // first row is column metadata
            first = false;/*w ww.j a  v  a2  s.  c  o  m*/
            continue;
        }

        String[] splits = line.split(",");
        String cpr = splits[0];
        String sygehus = splits[1];
        String afdeling = splits[2];
        String idate = splits[3];

        String itime = splits[4];
        if (itime.length() == 0) {
            itime = "0";
        }
        String udate = splits[5];
        String utime = splits[6];
        if (utime.length() == 0) {
            utime = "0";
        }
        String dummy = splits[7];
        String recnum = splits[8];

        StringBuffer sql = new StringBuffer();
        sql.append(
                "INSERT INTO T_ADM (v_RECNUM, C_SGH, C_AFD, V_CPR, D_INDDTO,D_UDDTO,V_INDTIME,V_UDTIME) VALUES (");
        sql.append(recnum);
        sql.append(", '");
        sql.append(sygehus);
        sql.append("', '");
        sql.append(afdeling);
        sql.append("', '");
        sql.append(cpr);
        sql.append("', '");
        sql.append(idate);
        sql.append("', '");
        sql.append(udate);
        sql.append("', ");
        sql.append(itime);
        sql.append(", ");
        sql.append(utime);
        sql.append(");");

        System.out.println(sql.toString());
    }
}