List of usage examples for org.apache.commons.io LineIterator nextLine
public String nextLine()
Reader
. From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private Set<String> collectCollaborators() throws Exception { Set<String> ids = Sets.newHashSet(); LineIterator iterator = getLineIterator("relations.json"); String line = ""; try {/*ww w . j a v a 2 s . c om*/ while (iterator.hasNext()) { line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { XRelation object = objectMapper.readValue(line, XRelation.class); String relationType = filterField(object.relation_type); if ("collaborated_with".equals(relationType)) { String leftObject = verifyNonEmptyField(line, "leftObject", filterField(object.leftObject)); String leftId = verifyNonEmptyField(line, "leftId", filterField(object.leftId)); if ("Person".equals(leftObject)) { ids.add(leftId); } } } } } catch (Exception e) { throw e; } finally { LineIterator.closeQuietly(iterator); } return ids; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private void importRelations() throws Exception { relationTypeConcordance = getRelationTypeConcordance(); LineIterator iterator = getLineIterator("relations.json"); String line = ""; try {//from w ww. j av a 2s .c o m while (iterator.hasNext()) { line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { handleRelation(line); } } } catch (Exception e) { System.out.println(line); throw e; } finally { LineIterator.closeQuietly(iterator); relationTypeConcordance = null; } }
From source file:nl.opengeogroep.safetymaps.server.admin.stripes.LayerActionBean.java
@Before(stages = LifecycleStage.BindingAndValidation) private void findMapfiles() { try {/*from www. j a va 2 s. c o m*/ JSONArray a = new JSONArray(); File search = Cfg.getPath("static_mapserver_searchdirs"); if (search != null) { for (File f : FileUtils.listFiles(search, new String[] { "map" }, true)) { JSONObject m = new JSONObject(); m.put("path", f.getPath().substring(search.getPath().length() + 1)); a.put(m); // Naive mapfile parser. Perhaps replace by JavaScript client-side // GetCap parsing JSONArray l = new JSONArray(); m.put("layers", l); LineIterator it = FileUtils.lineIterator(f, "US-ASCII"); try { while (it.hasNext()) { String line = it.nextLine().trim(); if (line.equals("LAYER")) { String n = it.nextLine().trim(); n = n.substring(6, n.length() - 1); l.put(n); } } } finally { it.close(); } } } mapFilesJson = a.toString(4); } catch (Exception e) { } }
From source file:nl.ru.cmbi.vase.tools.util.ToolBox.java
public static StringBuilder getStringBuilderFromFile(final String fileName) throws IOException { // int STRINGBUILDER_SIZE = 403228383; // big! File file = new File(fileName); LineIterator it = FileUtils.lineIterator(file); StringBuilder sb = new StringBuilder(); // STRINGBUILDER_SIZE while (it.hasNext()) { sb.append(it.nextLine()); sb.append("\n"); }/*from w ww. ja v a 2 s . c o m*/ it.close(); return sb; }
From source file:org.adf.emg.sonar.ojaudit.JavaMetricsDecorator.java
@Override public void decorate(Resource resource, DecoratorContext context) { if (!(Qualifiers.isFile(resource) && resource.getName().endsWith(".java"))) { // only process .java files return;//from w w w. j av a2 s.c o m } ProjectFileSystem fileSystem = context.getProject().getFileSystem(); File file = lookup(resource, fileSystem); LineIterator iterator = null; int numLines = 0; int numBlankLines = 0; int numCommentLines = 0; try { Charset charset = fileSystem.getSourceCharset(); iterator = charset == null ? FileUtils.lineIterator(file) : FileUtils.lineIterator(file, charset.name()); boolean inComment = false; while (iterator.hasNext()) { String trimmedLine = iterator.nextLine().trim(); numLines++; boolean lineHasCode = false; boolean lineHasComment = false; while (!trimmedLine.isEmpty()) { if (inComment) { // in a comment. try to find end marker int endIndex = trimmedLine.indexOf(END_COMMENT); if (endIndex == -1) { // (rest of) line is comment lineHasComment = true; trimmedLine = ""; // remove comment } else { // remove comment to see if there is code after it lineHasComment = true; trimmedLine = trimmedLine.substring(endIndex + END_COMMENT.length()); inComment = false; } } else { // not in a comment if (trimmedLine.startsWith("//")) { trimmedLine = ""; continue; } // try to find begin comment marker int startIndex = trimmedLine.indexOf(START_COMMENT); if (startIndex == -1) { // (rest of) line is non-comment lineHasCode = true; trimmedLine = ""; // remove non-comment } else if (startIndex == 0) { // line starts with start marker inComment = true; trimmedLine = trimmedLine.substring(startIndex + START_COMMENT.length()); } else { // line contains start marker lineHasCode = true; inComment = true; trimmedLine = trimmedLine.substring(startIndex + START_COMMENT.length()); } } trimmedLine = trimmedLine.trim(); } if (!lineHasCode) { if (lineHasComment) { numCommentLines++; } else { numBlankLines++; } } } } catch (IOException e) { LOG.warn("error reading " + file + " to collect metrics", e); } finally { LineIterator.closeQuietly(iterator); } context.saveMeasure(CoreMetrics.LINES, (double) numLines); // Lines context.saveMeasure(CoreMetrics.COMMENT_LINES, (double) numCommentLines); // Non Commenting Lines of Code context.saveMeasure(CoreMetrics.NCLOC, (double) numLines - numBlankLines - numCommentLines); // Comment Lines }
From source file:org.adf.emg.sonar.ojaudit.XmlMetricsDecorator.java
@Override public void decorate(Resource resource, DecoratorContext context) { if (!Qualifiers.isFile(resource)) { return;//from www . j a v a 2s . co m } ProjectFileSystem fileSystem = context.getProject().getFileSystem(); File file = lookup(resource, fileSystem); try { if (readFirstByte(file) != '<') { return; } } catch (IOException e) { throw new SonarException(e); } int numCommentLines; CountCommentParser commentCounter = new CountCommentParser(); try { numCommentLines = commentCounter.countLinesOfComment(FileUtils.openInputStream(file)); if (numCommentLines == -1) { return; } } catch (IOException e) { throw new SonarException(e); } LineIterator iterator = null; int numLines = 0; int numBlankLines = 0; try { Charset charset = fileSystem.getSourceCharset(); iterator = charset == null ? FileUtils.lineIterator(file) : FileUtils.lineIterator(file, charset.name()); while (iterator.hasNext()) { String line = iterator.nextLine(); numLines++; if (line.trim().isEmpty()) { numBlankLines++; } } } catch (IOException e) { LOG.warn("error reading " + file + " to collect metrics", e); } finally { LineIterator.closeQuietly(iterator); } context.saveMeasure(CoreMetrics.LINES, (double) numLines); // Lines context.saveMeasure(CoreMetrics.COMMENT_LINES, (double) numCommentLines); // Non Commenting Lines of Code context.saveMeasure(CoreMetrics.NCLOC, (double) numLines - numBlankLines - numCommentLines); // Comment Lines }
From source file:org.apache.accumulo.test.AuditMessageIT.java
/** * Returns a List of Audit messages that have been grep'd out of the MiniAccumuloCluster output. * * @param stepName/*w w w . j a v a 2 s .co m*/ * A unique name for the test being executed, to identify the System.out messages. * @return A List of the Audit messages, sorted (so in chronological order). */ private ArrayList<String> getAuditMessages(String stepName) throws IOException { // ACCUMULO-3144 Make sure we give the processes enough time to flush the write buffer try { Thread.sleep(2000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException("Interrupted waiting for data to be flushed to output streams"); } for (MiniAccumuloClusterImpl.LogWriter lw : getCluster().getLogWriters()) { lw.flush(); } // Grab the audit messages System.out.println("Start of captured audit messages for step " + stepName); ArrayList<String> result = new ArrayList<String>(); File[] files = getCluster().getConfig().getLogDir().listFiles(); assertNotNull(files); for (File file : files) { // We want to grab the files called .out if (file.getName().contains(".out") && file.isFile() && file.canRead()) { LineIterator it = FileUtils.lineIterator(file, UTF_8.name()); try { while (it.hasNext()) { String line = it.nextLine(); if (line.matches(".* \\[" + AuditedSecurityOperation.AUDITLOG + "\\s*\\].*")) { // Only include the message if startTimestamp is null. or the message occurred after the startTimestamp value if ((lastAuditTimestamp == null) || (line.substring(0, 23).compareTo(lastAuditTimestamp) > 0)) result.add(line); } } } finally { LineIterator.closeQuietly(it); } } } Collections.sort(result); for (String s : result) { System.out.println(s); } System.out.println("End of captured audit messages for step " + stepName); if (result.size() > 0) lastAuditTimestamp = (result.get(result.size() - 1)).substring(0, 23); return result; }
From source file:org.apache.accumulo.test.AuditMessageIT.java
@Test public void testImportExportOperationsAudits() throws AccumuloSecurityException, AccumuloException, TableExistsException, TableNotFoundException, IOException, InterruptedException { conn.securityOperations().createLocalUser(AUDIT_USER_1, new PasswordToken(PASSWORD)); conn.securityOperations().grantSystemPermission(AUDIT_USER_1, SystemPermission.SYSTEM); conn.securityOperations().changeUserAuthorizations(AUDIT_USER_1, auths); grantEverySystemPriv(conn, AUDIT_USER_1); // Connect as Audit User and do a bunch of stuff. // Start testing activities here auditConnector = getCluster().getConnector(AUDIT_USER_1, new PasswordToken(PASSWORD)); auditConnector.tableOperations().create(OLD_TEST_TABLE_NAME); // Insert some play data BatchWriter bw = auditConnector.createBatchWriter(OLD_TEST_TABLE_NAME, new BatchWriterConfig()); Mutation m = new Mutation("myRow"); m.put("cf1", "cq1", "v1"); m.put("cf1", "cq2", "v3"); bw.addMutation(m);/* ww w .j a va 2s . c o m*/ bw.close(); // Prepare to export the table File exportDir = new File(getCluster().getConfig().getDir().toString() + "/export"); auditConnector.tableOperations().offline(OLD_TEST_TABLE_NAME); auditConnector.tableOperations().exportTable(OLD_TEST_TABLE_NAME, exportDir.toString()); // We've exported the table metadata to the MiniAccumuloCluster root dir. Grab the .rf file path to re-import it File distCpTxt = new File(exportDir.toString() + "/distcp.txt"); File importFile = null; LineIterator it = FileUtils.lineIterator(distCpTxt, UTF_8.name()); // Just grab the first rf file, it will do for now. String filePrefix = "file:"; try { while (it.hasNext() && importFile == null) { String line = it.nextLine(); if (line.matches(".*\\.rf")) { importFile = new File(line.replaceFirst(filePrefix, "")); } } } finally { LineIterator.closeQuietly(it); } FileUtils.copyFileToDirectory(importFile, exportDir); auditConnector.tableOperations().importTable(NEW_TEST_TABLE_NAME, exportDir.toString()); // Now do a Directory (bulk) import of the same data. auditConnector.tableOperations().create(THIRD_TEST_TABLE_NAME); File failDir = new File(exportDir + "/tmp"); assertTrue(failDir.mkdirs() || failDir.isDirectory()); auditConnector.tableOperations().importDirectory(THIRD_TEST_TABLE_NAME, exportDir.toString(), failDir.toString(), false); auditConnector.tableOperations().online(OLD_TEST_TABLE_NAME); // Stop testing activities here ArrayList<String> auditMessages = getAuditMessages("testImportExportOperationsAudits"); assertEquals(1, findAuditMessage(auditMessages, String .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME)) .size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE, "offlineTable", OLD_TEST_TABLE_NAME)).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_EXPORT_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME, exportDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_IMPORT_AUDIT_TEMPLATE, NEW_TEST_TABLE_NAME, filePrefix + exportDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME)) .size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_BULK_IMPORT_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME, filePrefix + exportDir.toString(), filePrefix + failDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE, "onlineTable", OLD_TEST_TABLE_NAME)).size()); }
From source file:org.apache.accumulo.test.AuditMessageTest.java
/** * Returns a List of Audit messages that have been grep'd out of the MiniAccumuloCluster output. * /*from www .j a va 2s . c o m*/ * @param stepName * A unique name for the test being executed, to identify the System.out messages. * @return A List of the Audit messages, sorted (so in chronological order). */ private ArrayList<String> getAuditMessages(String stepName) throws IOException { for (MiniAccumuloCluster.LogWriter lw : logWriters) { lw.flush(); } // Grab the audit messages System.out.println("Start of captured audit messages for step " + stepName); ArrayList<String> result = new ArrayList<String>(); for (File file : logDir.listFiles()) { // We want to grab the files called .out if (file.getName().contains(".out") && file.isFile() && file.canRead()) { LineIterator it = FileUtils.lineIterator(file, Constants.UTF8.name()); try { while (it.hasNext()) { String line = it.nextLine(); if (line.matches(".* \\[" + AuditedSecurityOperation.AUDITLOG + "\\s*\\].*")) { // Only include the message if startTimestamp is null. or the message occurred after the startTimestamp value if ((lastAuditTimestamp == null) || (line.substring(0, 23).compareTo(lastAuditTimestamp) > 0)) result.add(line); } } } finally { LineIterator.closeQuietly(it); } } } Collections.sort(result); for (String s : result) { System.out.println(s); } System.out.println("End of captured audit messages for step " + stepName); if (result.size() > 0) lastAuditTimestamp = (result.get(result.size() - 1)).substring(0, 23); return result; }
From source file:org.apache.accumulo.test.AuditMessageTest.java
@Test(timeout = 60 * 1000) public void testImportExportOperationsAudits() throws AccumuloSecurityException, AccumuloException, TableExistsException, TableNotFoundException, IOException, InterruptedException { conn.securityOperations().createLocalUser(AUDIT_USER_1, new PasswordToken(PASSWORD)); conn.securityOperations().grantSystemPermission(AUDIT_USER_1, SystemPermission.SYSTEM); conn.securityOperations().changeUserAuthorizations(AUDIT_USER_1, auths); grantEverySystemPriv(conn, AUDIT_USER_1); // Connect as Audit User and do a bunch of stuff. // Start testing activities here auditConnector = accumulo.getConnector(AUDIT_USER_1, PASSWORD); auditConnector.tableOperations().create(OLD_TEST_TABLE_NAME); // Insert some play data BatchWriter bw = auditConnector.createBatchWriter(OLD_TEST_TABLE_NAME, new BatchWriterConfig()); Mutation m = new Mutation("myRow"); m.put("cf1", "cq1", "v1"); m.put("cf1", "cq2", "v3"); bw.addMutation(m);/*w w w . j a v a 2s.co m*/ bw.close(); // Prepare to export the table File exportDir = new File(accumulo.getConfig().getDir().toString() + "/export"); auditConnector.tableOperations().offline(OLD_TEST_TABLE_NAME); auditConnector.tableOperations().exportTable(OLD_TEST_TABLE_NAME, exportDir.toString()); // We've exported the table metadata to the MiniAccumuloCluster root dir. Grab the .rf file path to re-import it File distCpTxt = new File(exportDir.toString() + "/distcp.txt"); File importFile = null; LineIterator it = FileUtils.lineIterator(distCpTxt, Constants.UTF8.name()); // Just grab the first rf file, it will do for now. String filePrefix = "file:"; try { while (it.hasNext() && importFile == null) { String line = it.nextLine(); if (line.matches(".*\\.rf")) { importFile = new File(line.replaceFirst(filePrefix, "")); } } } finally { LineIterator.closeQuietly(it); } FileUtils.copyFileToDirectory(importFile, exportDir); auditConnector.tableOperations().importTable(NEW_TEST_TABLE_NAME, exportDir.toString()); // Now do a Directory (bulk) import of the same data. auditConnector.tableOperations().create(THIRD_TEST_TABLE_NAME); File failDir = new File(exportDir + "/tmp"); failDir.mkdirs(); auditConnector.tableOperations().importDirectory(THIRD_TEST_TABLE_NAME, exportDir.toString(), failDir.toString(), false); auditConnector.tableOperations().online(OLD_TEST_TABLE_NAME); // Stop testing activities here ArrayList<String> auditMessages = getAuditMessages("testImportExportOperationsAudits"); assertEquals(1, findAuditMessage(auditMessages, String .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME)) .size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE, "offlineTable", OLD_TEST_TABLE_NAME)).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_EXPORT_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME, exportDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_IMPORT_AUDIT_TEMPLATE, NEW_TEST_TABLE_NAME, exportDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME)) .size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_BULK_IMPORT_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME, filePrefix + exportDir.toString(), filePrefix + failDir.toString())).size()); assertEquals(1, findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE, "onlineTable", OLD_TEST_TABLE_NAME)).size()); }