List of usage examples for org.apache.commons.io FileUtils readLines
public static List readLines(File file) throws IOException
From source file:dk.nsi.haiba.lprimporter.testdata.SQLStatementsFromCSVFiles.java
private void generateDiagnosesData() throws IOException { File file = FileUtils.toFile(getClass().getClassLoader().getResource("data/T_DIAG.csv")); boolean first = true; List<String> lines = FileUtils.readLines(file); for (String line : lines) { if (first) { // first row is column metadata first = false;//from w w w . ja v a 2 s . co m continue; } String[] splits = line.split(","); String recnum = splits[0]; String code = splits[1]; String tillaeg = splits[2]; String type = "A"; StringBuffer sql = new StringBuffer(); sql.append("INSERT INTO T_DIAG (V_RECNUM, C_DIAG, C_TILDIAG, C_DIAGTYPE) VALUES ("); sql.append(recnum); sql.append(", '"); sql.append(code); sql.append("', '"); sql.append(tillaeg); sql.append("', '"); sql.append(type); sql.append("');"); System.out.println(sql.toString()); } }
From source file:com.bitplan.pdfindex.TestPdfindexer.java
@Test public void testSorting() throws IOException { String htmlOutputFileName = "test/cajunfiles.html"; String[] args = { "--sourceFileList", "test/cajunfiles.lst", "--idxfile", "test/indices/cajunfiles", "--keyWords", "Adobe,IBM,MS-DOS", "--outputfile", htmlOutputFileName }; this.testPdfIndexer(args); List<String> lines = FileUtils.readLines(new File(htmlOutputFileName)); checkLines(lines, 49, 16, "cajun.pdf#page=1"); }
From source file:edu.isi.pfindr.learn.util.PairsFileIO.java
public void generatePairsFromTwoDifferentFilesWithClass(String inputFilePath1, String inputFilePath2, String outputFilePath) {/*w ww .j a v a2 s. c o m*/ List<String> phenotypeList1 = new ArrayList<String>(); List<String> phenotypeList2 = new ArrayList<String>(); try { phenotypeList1 = FileUtils.readLines(new File(inputFilePath1)); phenotypeList2 = FileUtils.readLines(new File(inputFilePath2)); } catch (IOException ioe) { ioe.printStackTrace(); } String[] phenotype1, phenotype2; StringBuffer outputBuffer = new StringBuffer(); //List<String> resultList = new ArrayList<String>(); BufferedWriter bw = null; try { bw = new BufferedWriter(new FileWriter(outputFilePath)); int count = 0; for (int i = 0; i < phenotypeList1.size(); i++) { phenotype1 = phenotypeList1.get(i).split("\t"); for (int j = 0; j < phenotypeList2.size(); j++) { count++; phenotype2 = phenotypeList2.get(j).split("\t"); System.out.println("i " + i + "j " + j + " " + phenotype1[0] + " " + phenotype2[0]); if (phenotype1[1].equals(phenotype2[1])) { //if the classes are the same //if (phenotype1[1].equals(phenotype2[0])) { //if the classes are the same //resultList.add(String.format("%s\t%s\t%d", phenotype1[3], phenotype2[3], 1)); //resultList.add(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[1], 1)); outputBuffer.append(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[0], 1)) .append("\n"); //bw.write(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[0], 1) + "\n"); } else { //resultList.add(String.format("%s\t%s\t%d", phenotype1[3], phenotype2[3], 0)); //resultList.add(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[1], 0)); //bw.write(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[0], 0) + "\n"); outputBuffer.append(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[0], 0)) .append("\n"); } bw.append(outputBuffer.toString()); outputBuffer.setLength(0); } } bw.flush(); System.out.println("The count is: " + count); } catch (IOException io) { try { if (bw != null) bw.close(); io.printStackTrace(); } catch (IOException e) { System.out.println("Problem occured while closing output stream " + bw); e.printStackTrace(); } } catch (Exception e) { e.printStackTrace(); } finally { try { if (bw != null) bw.close(); } catch (IOException e) { System.out.println("Problem occured while closing output stream " + bw); e.printStackTrace(); } } }
From source file:es.uvigo.ei.sing.adops.operations.running.tcoffee.TCoffeeDefaultProcessManager.java
private void replaceOWithGaps(File inputFile) throws OperationException { if (this.getLogger() != null) this.getLogger().info("Command: Replacing o with gaps in: " + inputFile.getAbsolutePath()); try {/* w ww . j av a2 s . co m*/ FileUtils.writeLines(inputFile, Utils.replaceNames(Collections.singletonMap("o", "-"), FileUtils.readLines(inputFile))); } catch (IOException ioe) { throw new OperationException(ioe); } }
From source file:dk.nsi.haiba.lprimporter.integrationtest.RulesEngineIT.java
@Test public void CheckErrorMessagesAreLoggedCorrectly() { op1 = null;// w ww .j a va2 s .c o m List<Administration> contacts = setupContacts(); rulesEngine.processRuleChain(contacts, Statistics.getInstance()); assertEquals("Expected 1 row", 1, jdbc.queryForInt("select count(*) from RegelFejlbeskeder")); assertEquals(recordNummer, jdbc.queryForObject("select LPR_recordnummer from RegelFejlbeskeder", String.class)); assertEquals(dbId, jdbc.queryForInt("select LPR_dbId from RegelFejlbeskeder")); File file = FileUtils.getFile("forretningsregel-fejl.log"); assertNotNull(file); try { List<String> lines = FileUtils.readLines(file); assertEquals(1, lines.size()); assertTrue(lines.get(0).contains("Proceduredato findes ikke")); } catch (IOException e) { fail("Lines are expected"); } }
From source file:edu.ku.brc.specify.tools.schemalocale.LocalizerApp.java
/** * //from w w w. ja va2 s . c o m */ public void processProperties() { initLucene(); try { Locale currLocale = Locale.getDefault(); Vector<String> terms = new Vector<String>(); File resFile = new File("src/resources_" + currLocale.getLanguage() + ".properties"); List<?> lines = FileUtils.readLines(resFile); for (String line : (List<String>) lines) { if (!line.startsWith("#")) { int inx = line.indexOf("="); if (inx > -1) { String[] toks = StringUtils.split(line, "="); if (toks.length > 1) { terms.add(toks[0]); } } } } Vector<Pair<String, String>> notFoundList = new Vector<Pair<String, String>>(); String field = "contents"; QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, field, analyzer); for (String term : terms) { Query query; try { if (term.equals("AND") || term.equals("OR")) continue; query = parser.parse(term); String subTerm = null; int hits = getTotalHits(query, 10); if (hits == 0) { int inx = term.indexOf('.'); if (inx > -1) { subTerm = term.substring(inx + 1); hits = getTotalHits(parser.parse(subTerm), 10); if (hits == 0) { int lastInx = term.lastIndexOf('.'); if (lastInx > -1 && lastInx != inx) { subTerm = term.substring(lastInx + 1); hits = getTotalHits(parser.parse(subTerm), 10); } } } } if (hits == 0) { notFoundList.add(new Pair<String, String>(term, subTerm)); System.out.println("'" + term + "' was not found " + (subTerm != null ? ("SubTerm[" + subTerm + "]") : "")); } } catch (ParseException e) { e.printStackTrace(); } } ConversionLogger convLogger = new ConversionLogger(); convLogger.initialize("resources", "Resources"); TableWriter tblWriter = convLogger.getWriter("resources.html", "Resources"); tblWriter.startTable(); tblWriter.logHdr("Id", "Full Key", "Sub Key"); int cnt = 1; for (Pair<String, String> pair : notFoundList) { tblWriter.log(Integer.toString(cnt++), pair.first, pair.second != null ? pair.second : " "); } tblWriter.endTable(); convLogger.closeAll(); } catch (IOException ex) { edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount(); edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(LocalizerApp.class, ex); ex.printStackTrace(); } }
From source file:com.gargoylesoftware.htmlunit.BrowserVersionFeaturesTest.java
private void unusedCheck(final File dir, final List<String> unusedFeatures) throws IOException { for (final File file : dir.listFiles()) { if (file.isDirectory() && !".svn".equals(file.getName())) { unusedCheck(file, unusedFeatures); } else if (file.getName().endsWith(".java")) { final List<String> lines = FileUtils.readLines(file); final String browserVersionFeatures = BrowserVersionFeatures.class.getSimpleName(); for (final String line : lines) { for (final Iterator<String> it = unusedFeatures.iterator(); it.hasNext();) { if (line.contains(browserVersionFeatures + '.' + it.next())) { it.remove();/*from w w w . j ava2s . com*/ } } } } } }
From source file:dk.nsi.haiba.lprimporter.testdata.SQLStatementsFromCPR83174CSV.java
private void generateDiagnosesData() throws IOException { File file = FileUtils.toFile(getClass().getClassLoader().getResource("data/cpr83174DIAG.csv")); boolean first = true; List<String> lines = FileUtils.readLines(file); for (String line : lines) { if (first) { // first row is column metadata first = false;// w w w . ja v a 2 s.c om continue; } // V_RECNUM;C_DIAG;C_TILDIAG String[] splits = line.split(";"); String recnum = splits[0]; String code = splits[1]; String tillaeg = splits[2]; String type = "A"; StringBuffer sql = new StringBuffer(); sql.append("INSERT INTO T_DIAG (V_RECNUM, C_DIAG, C_TILDIAG, C_DIAGTYPE) VALUES ("); sql.append(recnum); sql.append(", '"); sql.append(code); sql.append("', '"); sql.append(tillaeg); sql.append("', '"); sql.append(type); sql.append("');"); System.out.println(sql.toString()); } }
From source file:com.denimgroup.threadfix.importer.cli.CommandLineMigration.java
private static void convert(String inputScript, String outputScript) { File file = new File(inputScript); LOGGER.info("Converting threadfix script to mySql script " + outputScript + " ..."); File outputFile = new File(outputScript); FileOutputStream fos = null;//w w w . j a va 2 s . c om try { fos = new FileOutputStream(outputFile); OutputStreamWriter osw = new OutputStreamWriter(fos); List<String> lines = FileUtils.readLines(file); osw.write("SET FOREIGN_KEY_CHECKS=0;\n"); String table; for (String line : lines) { if (line != null && line.toUpperCase().startsWith("CREATE MEMORY TABLE ")) { table = RegexUtils.getRegexResult(line, TABLE_PATTERN); System.out.println("Create new table:" + table); String[] tableName = table.split("\\(", 2); if (tableName.length == 2) { List<String> fieldList = list(); String[] fields = tableName[1].trim().replace("(", "").replace(")", "").split(","); for (int i = 0; i < fields.length; i++) { if (!"CONSTRAINT".equalsIgnoreCase(fields[i].trim().split(" ")[0])) { String field = fields[i].trim().split(" ")[0].replace("\"", ""); if (!fieldList.contains(field)) fieldList.add(field); } } String fieldsStr = org.apache.commons.lang3.StringUtils.join(fieldList, ","); tableMap.put(tableName[0].toUpperCase(), "(" + fieldsStr + ")"); } } else if (line != null && line.toUpperCase().startsWith("INSERT INTO ")) { table = RegexUtils.getRegexResult(line, INSERT_PATTERN).toUpperCase(); if (tableMap.get(table) != null) { line = line.replaceFirst(" " + table + " ", " " + table + tableMap.get(table) + " "); if (line.contains(ACUNETIX_ESCAPE)) { line = line.replace(ACUNETIX_ESCAPE, ACUNETIX_ESCAPE_REPLACE); } line = escapeString(line) + ";\n"; osw.write(line); } } } osw.write("SET FOREIGN_KEY_CHECKS=1;\n"); osw.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:it.drwolf.ridire.session.async.Mapper.java
@SuppressWarnings("unchecked") public static Integer countWordsFromPoSTagResource(String posTagResourceFileName) throws IOException { List<String> lines = FileUtils.readLines(new File(posTagResourceFileName)); Integer count = 0;//ww w. j a v a 2 s . c o m StrTokenizer tokenizer = StrTokenizer.getTSVInstance(); for (String l : lines) { tokenizer.reset(l); String[] tokens = tokenizer.getTokenArray(); if (tokens.length == 3) { if (Mapper.isValidPos(tokens[1].trim())) { ++count; } } } return count; }