List of usage examples for org.apache.commons.io IOUtils readLines
public static List readLines(Reader input) throws IOException
Reader
as a list of Strings, one entry per line. From source file:com.sap.research.connectivity.gw.parsers.JavaSourceFileEditor.java
private List<String> getFileLines() { InputStream inputStream = sourceFile.getInputStream(); List<String> lines = null; try {//from www.j a v a 2 s. com lines = IOUtils.readLines(inputStream); } catch (IOException e) { System.out.println("Could not read the lines of the class file"); e.printStackTrace(); } finally { IOUtils.closeQuietly(inputStream); } return lines; }
From source file:io.anserini.rerank.lib.AxiomReranker.java
/** * If the result is deterministic we can cache all the external docids by reading them from a file *///from w w w .j a va 2 s. co m private List<String> buildExternalDocidsCache(SearchArgs args) throws IOException { InputStream in = getReadFileStream(args.axiom_docids); BufferedReader bRdr = new BufferedReader(new InputStreamReader(in)); return IOUtils.readLines(bRdr); }
From source file:com.redhat.red.offliner.PomArtifactListReaderTest.java
/** * Checks if type of a dependency is mapped correctly, if its mapping to extension-classifier is defined by an * external properties file. First it runs with an empty mapping file to ensure the mapping is not applied. Then the * test simply stores the default properties contents into a temporary file and runs checks if mapping was applied. *//*w w w .j ava2 s . c o m*/ @Test public void readPathsMapTypeWithExternalMapping() throws Exception { // create empty mapping properties file File mappingFile = getFile("test.properties"); try (OutputStream os = new FileOutputStream(mappingFile)) { // nothing to do, just create an empty file } // create reader with the empty mapping PomArtifactListReader artifactListReader = new PomArtifactListReader(null, mappingFile.getPath(), new BasicCredentialsProvider()); ArtifactList artList = artifactListReader.readPaths(getFile("repo.pom")); List<String> paths = artList.getPaths(); checkPath(paths, "org/apache/maven/plugins/maven-assembly-plugin/2.5.5/maven-assembly-plugin-2.5.5.pom"); checkPath(paths, "org/apache/maven/plugins/maven-assembly-plugin/2.5.5/maven-assembly-plugin-2.5.5.jar", false); checkPath(paths, "org/apache/maven/plugins/maven-assembly-plugin/2.5.5/maven-assembly-plugin-2.5.5.maven-plugin"); checkPath(paths, "org/apache/ant/ant/1.8.0/ant-1.8.0-tests.jar", false); checkPath(paths, "org/apache/ant/ant/1.8.0/ant-1.8.0.test-jar"); // read the contents of the internal properties file List<String> contents; try (InputStream is = getClass().getClassLoader() .getResourceAsStream(PomArtifactListReader.DEFAULT_TYPE_MAPPING_RES)) { contents = IOUtils.readLines(is); } // write the mapping into the external file try (OutputStream os = new FileOutputStream(mappingFile)) { IOUtils.writeLines(contents, null, os); } // create reader with the copied mapping artifactListReader = new PomArtifactListReader(null, mappingFile.getPath(), new BasicCredentialsProvider()); artList = artifactListReader.readPaths(getFile("repo.pom")); paths = artList.getPaths(); checkPath(paths, "org/apache/maven/plugins/maven-assembly-plugin/2.5.5/maven-assembly-plugin-2.5.5.pom"); checkPath(paths, "org/apache/maven/plugins/maven-assembly-plugin/2.5.5/maven-assembly-plugin-2.5.5.jar"); checkPath(paths, "org/apache/ant/ant/1.8.0/ant-1.8.0-tests.jar"); }
From source file:gov.nih.nci.nbia.servlet.DownloadServletV2.java
private void downloadManifestFile(String fileName, HttpServletResponse response, String userId, String password) {/*from w ww. jav a 2 s . com*/ logger.info("looking for manifest file name ..." + fileName); response.setContentType("text/plain"); response.setHeader("Content-Disposition", "attachment;filename=downloadname.txt"); try { List<String> readLines = IOUtils.readLines(new FileReader(fileName)); OutputStream os = response.getOutputStream(); IOUtils.writeLines(readLines, System.getProperty("line.separator"), os); os.close(); } catch (IOException e) { e.printStackTrace(); } }
From source file:edu.virginia.speclab.juxta.author.model.JuxtaXMLParser.java
/** * read doc into lines. determine character offset map for each line. * collapse lines into one raw xml string with new lines. * @throws ReportedException/* w ww. ja v a2 s. c o m*/ */ private void firstPassReadFile() throws ReportedException { try { FileInputStream fis = new FileInputStream(this.file); InputStreamReader is = new InputStreamReader(stripUtf8Bom(fis), this.decoder); List<String> lines = IOUtils.readLines(is); StringBuffer rawXMLTextBuffer = new StringBuffer(); int offset = 0; lineToOffsetMap.add(new Integer(-1)); for (String line : lines) { // Strip out marcon decorators; they break the text components line = line.replaceAll("[~\u0304]", ""); int length = line.length(); lineToOffsetMap.add(new Integer(offset + length)); offset += length + 1; rawXMLTextBuffer.append(line).append("\n"); } rawXMLText = rawXMLTextBuffer.toString(); } catch (IOException e) { throw new ReportedException(e, "Problem with I/O on file: " + file); } }
From source file:com.opengamma.examples.bloomberg.loader.ExampleEquityPortfolioLoader.java
protected Collection<ExternalId> readEquityTickers() { final Collection<ExternalId> result = new ArrayList<>(); final InputStream inputStream = ExampleEquityPortfolioLoader.class .getResourceAsStream("example-equity.csv"); try {//from w ww .j av a 2 s . c o m if (inputStream != null) { final List<String> equityTickers = IOUtils.readLines(inputStream); for (String idStr : equityTickers) { idStr = StringUtils.trimToNull(idStr); if (idStr != null && !idStr.startsWith("#")) { result.add(ExternalSchemes.bloombergTickerSecurityId(idStr)); } } } else { throw new OpenGammaRuntimeException("File '" + EXAMPLE_EQUITY_FILE + "' could not be found"); } } catch (final IOException ex) { throw new OpenGammaRuntimeException( "An error occurred while reading file '" + EXAMPLE_EQUITY_FILE + "'"); } finally { IOUtils.closeQuietly(inputStream); } final StringBuilder sb = new StringBuilder(); sb.append("Parsed ").append(result.size()).append(" equities:\n"); for (final ExternalId equityId : result) { sb.append("\t").append(equityId.getValue()).append("\n"); } s_logger.info(sb.toString()); return result; }
From source file:com.opengamma.bloombergexample.loader.ExampleEquityPortfolioLoader.java
protected Collection<ExternalId> readEquityTickers() { Collection<ExternalId> result = new ArrayList<ExternalId>(); InputStream inputStream = ExampleEquityPortfolioLoader.class.getResourceAsStream("example-equity.csv"); try {//from w ww. j a v a2 s .co m if (inputStream != null) { List<String> equityTickers = IOUtils.readLines(inputStream); for (String idStr : equityTickers) { idStr = StringUtils.trimToNull(idStr); if (idStr != null && !idStr.startsWith("#")) { result.add(ExternalSchemes.bloombergTickerSecurityId(idStr)); } } } else { throw new OpenGammaRuntimeException("File '" + EXAMPLE_EQUITY_FILE + "' could not be found"); } } catch (IOException ex) { throw new OpenGammaRuntimeException( "An error occurred while reading file '" + EXAMPLE_EQUITY_FILE + "'"); } finally { IOUtils.closeQuietly(inputStream); } StringBuilder sb = new StringBuilder(); sb.append("Parsed ").append(result.size()).append(" equities:\n"); for (ExternalId equityId : result) { sb.append("\t").append(equityId.getValue()).append("\n"); } s_logger.info(sb.toString()); return result; }
From source file:com.linkedin.pinot.tools.perf.QueryRunner.java
/** * Use single thread to run queries as fast as possible. * <p>Use a single thread to send queries back to back and log statistic information periodically. * <p>Queries are picked sequentially from the query file. * <p>Query runner will stop when all queries in the query file has been executed number of times configured. * * @param conf perf benchmark driver config. * @param queryFile query file.//from www . j av a2 s . c om * @param numTimesToRunQueries number of times to run all queries in the query file, 0 means infinite times. * @param reportIntervalMs report interval in milliseconds. * @param numIntervalsToReportAndClearStatistics number of report intervals to report detailed statistics and clear * them, 0 means never. * @throws Exception */ public static void singleThreadedQueryRunner(PerfBenchmarkDriverConf conf, String queryFile, int numTimesToRunQueries, int reportIntervalMs, int numIntervalsToReportAndClearStatistics) throws Exception { List<String> queries; try (FileInputStream input = new FileInputStream(new File(queryFile))) { queries = IOUtils.readLines(input); } PerfBenchmarkDriver driver = new PerfBenchmarkDriver(conf); int numQueriesExecuted = 0; long totalBrokerTime = 0L; long totalClientTime = 0L; List<Statistics> statisticsList = Collections.singletonList(new Statistics(CLIENT_TIME_STATISTICS)); long startTime = System.currentTimeMillis(); long reportStartTime = startTime; int numReportIntervals = 0; int numTimesExecuted = 0; while (numTimesToRunQueries == 0 || numTimesExecuted < numTimesToRunQueries) { for (String query : queries) { JSONObject response = driver.postQuery(query); numQueriesExecuted++; long brokerTime = response.getLong("timeUsedMs"); totalBrokerTime += brokerTime; long clientTime = response.getLong("totalTime"); totalClientTime += clientTime; statisticsList.get(0).addValue(clientTime); long currentTime = System.currentTimeMillis(); if (currentTime - reportStartTime >= reportIntervalMs) { long timePassed = currentTime - startTime; LOGGER.info( "Time Passed: {}ms, Queries Executed: {}, Average QPS: {}, Average Broker Time: {}ms, " + "Average Client Time: {}ms.", timePassed, numQueriesExecuted, numQueriesExecuted / ((double) timePassed / MILLIS_PER_SECOND), totalBrokerTime / (double) numQueriesExecuted, totalClientTime / (double) numQueriesExecuted); reportStartTime = currentTime; numReportIntervals++; if ((numIntervalsToReportAndClearStatistics != 0) && (numReportIntervals == numIntervalsToReportAndClearStatistics)) { numReportIntervals = 0; startTime = currentTime; numQueriesExecuted = 0; totalBrokerTime = 0L; totalClientTime = 0L; for (Statistics statistics : statisticsList) { statistics.report(); statistics.clear(); } } } } numTimesExecuted++; } long timePassed = System.currentTimeMillis() - startTime; LOGGER.info("--------------------------------------------------------------------------------"); LOGGER.info("FINAL REPORT:"); LOGGER.info( "Time Passed: {}ms, Queries Executed: {}, Average QPS: {}, Average Broker Time: {}ms, " + "Average Client Time: {}ms.", timePassed, numQueriesExecuted, numQueriesExecuted / ((double) timePassed / MILLIS_PER_SECOND), totalBrokerTime / (double) numQueriesExecuted, totalClientTime / (double) numQueriesExecuted); for (Statistics statistics : statisticsList) { statistics.report(); } }
From source file:net.logstash.logback.encoder.LogstashEncoderTest.java
@Test public void testEncoderConfiguration() throws Exception { // Empty the log file PrintWriter writer = new PrintWriter(System.getProperty("java.io.tmpdir") + "/test.log"); writer.print(""); writer.close();/* ww w. jav a2 s. c o m*/ LOG.info("Testing info logging."); InputStream is = new FileInputStream(System.getProperty("java.io.tmpdir") + "/test.log"); List<String> lines = IOUtils.readLines(is); JsonNode node = MAPPER.readTree(lines.get(0).getBytes("UTF-8")); assertThat(node.get("appname").textValue(), is("damnGodWebservice")); Assert.assertTrue( node.get("roles").equals(LogstashFormatter.parseCustomFields("[\"customerorder\", \"auth\"]"))); Assert.assertTrue(node.get("buildinfo").equals(LogstashFormatter.parseCustomFields( "{ \"version\" : \"Version 0.1.0-SNAPSHOT\", \"lastcommit\" : \"75473700d5befa953c45f630c6d9105413c16fe1\"}"))); }
From source file:de.openknowledge.jaxrs.versioning.AddressResourceTest.java
private InputStream send(URL url, String method, String resource) throws IOException { HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setDoOutput(true);//w w w . j a v a 2s. c om connection.setRequestMethod(method); connection.setRequestProperty("Content-Type", "application/json"); PrintWriter writer = new PrintWriter(connection.getOutputStream()); for (String line : IOUtils.readLines(AddressV1.class.getResourceAsStream(resource))) { writer.write(line); } writer.close(); return connection.getInputStream(); }