List of usage examples for org.apache.commons.io IOUtils writeLines
public static void writeLines(Collection lines, String lineEnding, Writer writer) throws IOException
toString()
value of each item in a collection to a Writer
line by line, using the specified line ending. From source file:org.openhab.io.habmin.services.sitemap.SitemapConfigResource.java
private Collection<SitemapBean> createSitemap(String sitemapname, String copyname, URI uri) { String fname = new String("configurations/sitemaps/" + sitemapname + SITEMAP_FILEEXT); try {// w w w .ja va 2 s.c o m List<String> sitemapData; if (copyname != null && !copyname.isEmpty()) { String fcopyname = new String("configurations/sitemaps/" + copyname + SITEMAP_FILEEXT); sitemapData = IOUtils.readLines(new FileInputStream(fcopyname)); // Now find the sitemap name and replace it! for (int cnt = 0; cnt < sitemapData.size(); cnt++) { Matcher matcher = SITEMAP_DEFINITION.matcher(sitemapData.get(cnt)); if (matcher.matches()) { sitemapData.set(cnt, "sitemap " + sitemapname + " label=\"" + matcher.group(2) + "\""); break; } } } else { // Default to a new file sitemapData = new ArrayList<String>(); sitemapData.add("sitemap " + sitemapname + " label=\"Main Menu\""); sitemapData.add("{"); sitemapData.add("}"); } // Check if the file exists File file = new File(fname); if (!file.exists()) { // Create the new sitemap file.createNewFile(); BufferedWriter out = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(file), "UTF-8")); IOUtils.writeLines(sitemapData, "\r\n", out); out.close(); // Update the model repository ModelRepository repo = HABminApplication.getModelRepository(); if (repo != null) { InputStream inFile; try { inFile = new FileInputStream(fname); repo.addOrRefreshModel(sitemapname + SITEMAP_FILEEXT, inFile); } catch (FileNotFoundException e) { logger.debug("Error refreshing new sitemap " + sitemapname + ":", e); } } } } catch (IOException e) { logger.debug("Error writing to sitemap file " + sitemapname + ":", e); } // Now return the sitemap list return getSitemapList(uri); }
From source file:org.openhab.io.hueemulation.internal.HueEmulationServlet.java
/** * Adds a username to the user file//ww w . ja v a 2 s.c om * * @param userName * @throws IOException */ private synchronized void addUser(String userName) throws IOException { if (!userNames.contains(userName)) { userNames.add(userName); USER_FILE.getParentFile().mkdirs(); FileOutputStream fos = null; try { fos = new FileOutputStream(USER_FILE); IOUtils.writeLines(userNames, null, fos); } finally { IOUtils.closeQuietly(fos); } } }
From source file:org.openiot.gsn.http.restapi.VSManagerService.java
@POST @Path("/{vsname}/registerRdf") public Response registerRdfVS(Reader metadata, @PathParam("vsname") String vsname) { SensorMetadata meta = new SensorMetadata(); String filePath = VSensorLoader.getVSConfigurationFilePath(vsname).replace(".xml", ".ttl"); try {/*from w ww . j a va 2 s.c om*/ List<String> lines = IOUtils.readLines(metadata); String concat = ""; for (String line : lines) { concat += line; } InputStream is = new ByteArrayInputStream(concat.getBytes()); meta.load(is); SensorAnnotator.addRdfMetadatatoLSM(meta); FileWriter fw = new FileWriter(filePath, true); IOUtils.writeLines(lines, "\n", fw); fw.close(); } catch (Exception e) { logger.error("Unable to load RDF metadata for sensor.", e); throw new VSensorConfigException("Unable to load RDF metadata for sensor.", e); } return Response.ok().build(); }
From source file:org.openiot.gsn.http.restapi.VSManagerService.java
@POST @Path("/{vsname}/register") public Response registerVS(InputStream metadata, @PathParam("vsname") String vsname) { String sensorId;// ww w . j a va 2 s . com String sensorIdOld = null; String filePath = VSensorLoader.getVSConfigurationFilePath(vsname).replace(".xml", ".metadata"); try { List<String> lines = IOUtils.readLines(metadata); FileWriter fw = new FileWriter(filePath, false); IOUtils.writeLines(lines, "\n", fw); fw.close(); LSMSensorMetaData lsmmd = new LSMSensorMetaData(); Config configData = ConfigFactory.parseFile(new File(filePath)); if (configData.hasPath(LSMSensorMetaData.KEY_SENSOR_ID)) { sensorIdOld = configData.getString(LSMSensorMetaData.KEY_SENSOR_ID); } lsmmd.init(configData, true); sensorId = SensorAnnotator.addSensorToLSM(lsmmd); if (sensorIdOld == null || sensorId.compareTo(sensorIdOld) != 0) { logger.info("SensorId has changed from {} to {}.", sensorIdOld, sensorId); lsmmd.setSensorID(sensorId); Config configDataNew = configData.withValue(LSMSensorMetaData.KEY_SENSOR_ID, ConfigValueFactory.fromAnyRef(sensorId)); configDataNew = configDataNew.withValue(LSMSensorMetaData.KEY_SENSOR_ID, ConfigValueFactory.fromAnyRef(sensorId)); String metadataNew = configDataNew.root() .render(ConfigRenderOptions.defaults().setJson(false).setOriginComments(false)); fw = new FileWriter(filePath, false); IOUtils.write(metadataNew, fw); fw.close(); } } catch (Exception e) { logger.error("Unable to load metadata for sensor", e); throw new VSensorConfigException("Unable to load metadata for sensor.", e); } return Response.ok(sensorId).build(); }
From source file:org.openrdf.rio.RDFWriterTest.java
private void testPerformanceInternal(boolean storeParsedStatements) throws Exception { Model model = new LinkedHashModel(); for (int i = 0; i < 100000; i++) { Value obj = potentialObjects.get(prng.nextInt(potentialObjects.size())); if (obj == litBigPlaceholder) { StringBuffer big = new StringBuffer(); int len = 25000 + prng.nextInt(5000); for (int j = 0; j < len; j++) { big.append(((char) (32 + prng.nextInt(90)))); }//ww w. j a v a 2s.c om obj = vf.createLiteral(big.toString()); } model.add(potentialSubjects.get(prng.nextInt(potentialSubjects.size())), potentialPredicates.get(prng.nextInt(potentialPredicates.size())), obj); } System.out.println("Test class: " + this.getClass().getName()); System.out.println("Test statements size: " + model.size() + " (" + rdfWriterFactory.getRDFFormat() + ")"); assertFalse("Did not generate any test statements", model.isEmpty()); File testFile = tempDir .newFile("performancetest." + rdfWriterFactory.getRDFFormat().getDefaultFileExtension()); FileOutputStream out = new FileOutputStream(testFile); try { long startWrite = System.currentTimeMillis(); RDFWriter rdfWriter = rdfWriterFactory.getWriter(out); setupWriterConfig(rdfWriter.getWriterConfig()); // Test prefixed URIs for only some of the URIs available rdfWriter.handleNamespace(RDF.PREFIX, RDF.NAMESPACE); rdfWriter.handleNamespace(SKOS.PREFIX, SKOS.NAMESPACE); rdfWriter.handleNamespace(FOAF.PREFIX, FOAF.NAMESPACE); rdfWriter.handleNamespace(EARL.PREFIX, EARL.NAMESPACE); rdfWriter.handleNamespace("ex", exNs); rdfWriter.startRDF(); for (Statement nextSt : model) { rdfWriter.handleStatement(nextSt); } rdfWriter.endRDF(); long endWrite = System.currentTimeMillis(); System.out.println( "Write took: " + (endWrite - startWrite) + " ms (" + rdfWriterFactory.getRDFFormat() + ")"); System.out.println("File size (bytes): " + testFile.length()); } finally { out.close(); } FileInputStream in = new FileInputStream(testFile); try { RDFParser rdfParser = rdfParserFactory.getParser(); setupParserConfig(rdfParser.getParserConfig()); rdfParser.setValueFactory(vf); Model parsedModel = new LinkedHashModel(); if (storeParsedStatements) { rdfParser.setRDFHandler(new StatementCollector(parsedModel)); } long startParse = System.currentTimeMillis(); rdfParser.parse(in, "foo:bar"); long endParse = System.currentTimeMillis(); System.out.println( "Parse took: " + (endParse - startParse) + " ms (" + rdfParserFactory.getRDFFormat() + ")"); if (storeParsedStatements) { if (model.size() != parsedModel.size()) { if (model.size() < 1000) { boolean originalIsSubset = Models.isSubset(model, parsedModel); boolean parsedIsSubset = Models.isSubset(parsedModel, model); System.out.println("originalIsSubset=" + originalIsSubset); System.out.println("parsedIsSubset=" + parsedIsSubset); System.out.println("Written statements=>"); IOUtils.writeLines(IOUtils.readLines(new FileInputStream(testFile)), "\n", System.out); System.out.println("Parsed statements=>"); Rio.write(parsedModel, System.out, RDFFormat.NQUADS); } } assertEquals("Unexpected number of statements, expected " + model.size() + " found " + parsedModel.size(), model.size(), parsedModel.size()); if (rdfParser.getRDFFormat().supportsNamespaces()) { assertTrue("Expected at least 5 namespaces, found " + parsedModel.getNamespaces().size(), parsedModel.getNamespaces().size() >= 5); assertEquals(exNs, parsedModel.getNamespace("ex").get().getName()); } } } finally { in.close(); } }
From source file:org.opoo.press.source.CachedSource.java
CachedSource(SourceEntry sourceEntry, Map<String, Object> frontMatter, List<String> contentLines) { super();//from www. ja v a 2 s . c o m this.frontMatter = frontMatter; this.sourceEntry = sourceEntry; FileOutputStream stream = null; try { this.contentFile = File.createTempFile("PageSourceContent", ".bin"); stream = new FileOutputStream(contentFile); IOUtils.writeLines(contentLines, null, stream); } catch (IOException e) { throw new RuntimeException(e); } finally { IOUtils.closeQuietly(stream); } }
From source file:org.opoo.press.source.SourceParserImpl.java
@Override public Source parse(SourceEntry sourceEntry) throws NoFrontMatterException { List<String> metaLines = new ArrayList<String>(); List<String> contentLines = new ArrayList<String>(); InputStream stream = null;//from ww w . j a va 2 s . c om List<String> currentList = metaLines; try { stream = new FileInputStream(sourceEntry.getFile()); LineIterator iterator = IOUtils.lineIterator(stream, "UTF-8"); if (!iterator.hasNext()) { throw new RuntimeException("File not content: " + sourceEntry.getFile()); } String line = iterator.next(); if (!isFrontMatterStartLine(line, sourceEntry)) { log.debug("Maybe a static file: " + sourceEntry.getFile()); throw new NoFrontMatterException(sourceEntry); } boolean hasFrontMatterEndLine = false; //process headers while (iterator.hasNext()) { line = iterator.next(); if (isFrontMatterEndLine(line)) { hasFrontMatterEndLine = true; currentList = contentLines; continue; } currentList.add(line); } if (!hasFrontMatterEndLine) { log.debug("Maybe a static file: " + sourceEntry.getFile()); throw new NoFrontMatterException(sourceEntry); } } catch (IOException e) { throw new RuntimeException(e); } finally { IOUtils.closeQuietly(stream); } StringWriter metaWriter = new StringWriter(); StringWriter contentWriter = new StringWriter(); try { IOUtils.writeLines(metaLines, null, metaWriter); IOUtils.writeLines(contentLines, null, contentWriter); } catch (IOException e) { throw new RuntimeException(e); } finally { IOUtils.closeQuietly(contentWriter); IOUtils.closeQuietly(metaWriter); } @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) yaml.load(metaWriter.toString()); String content = contentWriter.toString(); return new SimpleSource(sourceEntry, map, content); }
From source file:org.ow2.proactive.scheduler.authentication.ManageUsers.java
/** * Stores the logins into login.cfg/*w w w . ja v a 2 s . co m*/ */ private static void storeLoginFile(String loginFilePath, Properties props) throws IOException { try (BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(loginFilePath)))) { props.store(writer, null); } List<String> lines = null; try (FileInputStream stream = new FileInputStream(loginFilePath)) { lines = IOUtils.readLines(stream); } TreeMap<String, String> sortedUsers = new TreeMap<>(); for (String line : lines) { if (!(line.isEmpty() || line.startsWith("#"))) { String[] loginAndPwd = line.split("=", 2); sortedUsers.put(loginAndPwd[0], loginAndPwd[1]); } } List<String> modifiedLines = new ArrayList<>(sortedUsers.size()); for (Map.Entry entry : sortedUsers.entrySet()) { modifiedLines.add(entry.getKey() + ":" + entry.getValue()); } try (BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(loginFilePath)))) { IOUtils.writeLines(modifiedLines, System.getProperty("line.separator"), writer); } System.out.println("Stored login file in " + loginFilePath); }
From source file:org.silverpeas.tools.dbBuilder.wysiwyg.adjustment.LogRewriter.java
/** * Executing treatments//from www . ja v a 2 s. c o m */ private LogRewriter execute() throws Exception { dataWiring = DataWiring.execute(dbBuilderLogs); BufferedReader dbBuilderLogsReader = IOUtils.toBufferedReader(new FileReader(dbBuilderLogs)); try { FileOutputStream dbBuilderLogsOS = FileUtils.openOutputStream(rewrittenDbBuilderLogs); try { dataWiring.writeStatistics(dbBuilderLogsOS); if (Boolean.valueOf(System.getProperty("statsOnly"))) { return this; } String line; do { // A line line = dbBuilderLogsReader.readLine(); if (line == null) { break; } String componentIdEnding = null; // New component ? Matcher matcher = DataWiring.REGEXP_NEW_COMPONENT_DETECTOR.matcher(line); if (matcher.find()) { String componentId = matcher.group(1); if (currents.containsKey(componentId)) { throw new IllegalStateException( "The componentId " + componentId + " has already been started !!!"); } currents.put(componentId, new ComponentLogs(componentId)); } // End component ? matcher = DataWiring.REGEXP_END_COMPONENT_DETECTOR.matcher(line); if (matcher.find()) { String componentId = matcher.group(1); if (!currents.containsKey(componentId)) { throw new IllegalStateException( "The componentId " + componentId + " has already been ending !!!"); } componentIdEnding = componentId; } if (!currents.isEmpty()) { String componentId = dataWiring.getComponentIdFromLine(line); if (componentId == null) { // System.out.println("No component found in the line : " + line); continue; } ComponentLogs componentLogs = currents.get(componentId); if (componentLogs == null) { System.out.println("No component logs found the line : " + line); continue; } componentLogs.addLine(line); } if (componentIdEnding != null) { ComponentLogs componentLogs = currents.remove(componentIdEnding); dataWiring.clearComponentId(componentIdEnding); IOUtils.writeLines(componentLogs.getLines(), "\n", dbBuilderLogsOS); } } while (true); } finally { IOUtils.closeQuietly(dbBuilderLogsOS); } } finally { IOUtils.closeQuietly(dbBuilderLogsReader); } return this; }
From source file:org.silverpeas.tools.dbBuilder.wysiwyg.purge.LogRewriter.java
/** * Executing treatments/*from ww w .jav a 2 s .c om*/ */ private LogRewriter execute() throws Exception { dataWiring = DataWiring.execute(dbBuilderLogs); BufferedReader dbBuilderLogsReader = IOUtils.toBufferedReader(new FileReader(dbBuilderLogs)); try { FileOutputStream dbBuilderLogsOS = FileUtils.openOutputStream(rewrittenDbBuilderLogs); try { if (Boolean.valueOf(System.getProperty("statsOnly"))) { return this; } String line; do { // A line line = dbBuilderLogsReader.readLine(); if (line == null) { break; } String componentIdEnding = null; // New component ? Matcher matcher = DataWiring.REGEXP_NEW_COMPONENT_DETECTOR.matcher(line); if (matcher.find()) { String componentId = matcher.group(1); if (currents.containsKey(componentId)) { throw new IllegalStateException( "The componentId " + componentId + " has already been started !!!"); } currents.put(componentId, new ComponentLogs(componentId)); } // End component ? matcher = DataWiring.REGEXP_END_COMPONENT_DETECTOR.matcher(line); if (matcher.find()) { String componentId = matcher.group(1); if (!currents.containsKey(componentId)) { throw new IllegalStateException( "The componentId " + componentId + " has already been ending !!!"); } componentIdEnding = componentId; } if (!currents.isEmpty()) { String componentId = dataWiring.getComponentIdFromLine(line); if (componentId == null) { // System.out.println("No component found in the line : " + line); continue; } ComponentLogs componentLogs = currents.get(componentId); if (componentLogs == null) { System.out.println("No component logs found the line : " + line); continue; } componentLogs.addLine(line); } if (componentIdEnding != null) { ComponentLogs componentLogs = currents.remove(componentIdEnding); dataWiring.clearComponentId(componentIdEnding); IOUtils.writeLines(componentLogs.getLines(), "\n", dbBuilderLogsOS); } } while (true); } finally { IOUtils.closeQuietly(dbBuilderLogsOS); } } finally { IOUtils.closeQuietly(dbBuilderLogsReader); } return this; }