Example usage for org.apache.commons.io LineIterator closeQuietly

List of usage examples for org.apache.commons.io LineIterator closeQuietly

Introduction

In this page you can find the example usage for org.apache.commons.io LineIterator closeQuietly.

Prototype

public static void closeQuietly(LineIterator iterator) 

Source Link

Document

Closes the iterator, handling null and ignoring exceptions.

Usage

From source file:org.adf.emg.sonar.ojaudit.XmlMetricsDecorator.java

@Override
public void decorate(Resource resource, DecoratorContext context) {
    if (!Qualifiers.isFile(resource)) {
        return;//  www . j av  a  2 s  . c  o m
    }
    ProjectFileSystem fileSystem = context.getProject().getFileSystem();
    File file = lookup(resource, fileSystem);

    try {
        if (readFirstByte(file) != '<') {
            return;
        }
    } catch (IOException e) {
        throw new SonarException(e);
    }

    int numCommentLines;
    CountCommentParser commentCounter = new CountCommentParser();
    try {
        numCommentLines = commentCounter.countLinesOfComment(FileUtils.openInputStream(file));
        if (numCommentLines == -1) {
            return;
        }
    } catch (IOException e) {
        throw new SonarException(e);
    }

    LineIterator iterator = null;
    int numLines = 0;
    int numBlankLines = 0;
    try {
        Charset charset = fileSystem.getSourceCharset();
        iterator = charset == null ? FileUtils.lineIterator(file)
                : FileUtils.lineIterator(file, charset.name());
        while (iterator.hasNext()) {
            String line = iterator.nextLine();
            numLines++;
            if (line.trim().isEmpty()) {
                numBlankLines++;
            }
        }
    } catch (IOException e) {
        LOG.warn("error reading " + file + " to collect metrics", e);
    } finally {
        LineIterator.closeQuietly(iterator);
    }

    context.saveMeasure(CoreMetrics.LINES, (double) numLines); // Lines
    context.saveMeasure(CoreMetrics.COMMENT_LINES, (double) numCommentLines); // Non Commenting Lines of Code
    context.saveMeasure(CoreMetrics.NCLOC, (double) numLines - numBlankLines - numCommentLines); // Comment Lines
}

From source file:org.apache.accumulo.test.AuditMessageIT.java

/**
 * Returns a List of Audit messages that have been grep'd out of the MiniAccumuloCluster output.
 *
 * @param stepName//from www . ja v a2 s .  co  m
 *          A unique name for the test being executed, to identify the System.out messages.
 * @return A List of the Audit messages, sorted (so in chronological order).
 */
private ArrayList<String> getAuditMessages(String stepName) throws IOException {
    // ACCUMULO-3144 Make sure we give the processes enough time to flush the write buffer
    try {
        Thread.sleep(2000);
    } catch (InterruptedException e) {
        Thread.currentThread().interrupt();
        throw new IOException("Interrupted waiting for data to be flushed to output streams");
    }

    for (MiniAccumuloClusterImpl.LogWriter lw : getCluster().getLogWriters()) {
        lw.flush();
    }

    // Grab the audit messages
    System.out.println("Start of captured audit messages for step " + stepName);

    ArrayList<String> result = new ArrayList<String>();
    File[] files = getCluster().getConfig().getLogDir().listFiles();
    assertNotNull(files);
    for (File file : files) {
        // We want to grab the files called .out
        if (file.getName().contains(".out") && file.isFile() && file.canRead()) {
            LineIterator it = FileUtils.lineIterator(file, UTF_8.name());
            try {
                while (it.hasNext()) {
                    String line = it.nextLine();
                    if (line.matches(".* \\[" + AuditedSecurityOperation.AUDITLOG + "\\s*\\].*")) {
                        // Only include the message if startTimestamp is null. or the message occurred after the startTimestamp value
                        if ((lastAuditTimestamp == null)
                                || (line.substring(0, 23).compareTo(lastAuditTimestamp) > 0))
                            result.add(line);
                    }
                }
            } finally {
                LineIterator.closeQuietly(it);
            }
        }
    }
    Collections.sort(result);

    for (String s : result) {
        System.out.println(s);
    }
    System.out.println("End of captured audit messages for step " + stepName);
    if (result.size() > 0)
        lastAuditTimestamp = (result.get(result.size() - 1)).substring(0, 23);

    return result;
}

From source file:org.apache.accumulo.test.AuditMessageIT.java

@Test
public void testImportExportOperationsAudits() throws AccumuloSecurityException, AccumuloException,
        TableExistsException, TableNotFoundException, IOException, InterruptedException {

    conn.securityOperations().createLocalUser(AUDIT_USER_1, new PasswordToken(PASSWORD));
    conn.securityOperations().grantSystemPermission(AUDIT_USER_1, SystemPermission.SYSTEM);
    conn.securityOperations().changeUserAuthorizations(AUDIT_USER_1, auths);
    grantEverySystemPriv(conn, AUDIT_USER_1);

    // Connect as Audit User and do a bunch of stuff.
    // Start testing activities here
    auditConnector = getCluster().getConnector(AUDIT_USER_1, new PasswordToken(PASSWORD));
    auditConnector.tableOperations().create(OLD_TEST_TABLE_NAME);

    // Insert some play data
    BatchWriter bw = auditConnector.createBatchWriter(OLD_TEST_TABLE_NAME, new BatchWriterConfig());
    Mutation m = new Mutation("myRow");
    m.put("cf1", "cq1", "v1");
    m.put("cf1", "cq2", "v3");
    bw.addMutation(m);//  w ww  .  java2s  .c o m
    bw.close();

    // Prepare to export the table
    File exportDir = new File(getCluster().getConfig().getDir().toString() + "/export");

    auditConnector.tableOperations().offline(OLD_TEST_TABLE_NAME);
    auditConnector.tableOperations().exportTable(OLD_TEST_TABLE_NAME, exportDir.toString());

    // We've exported the table metadata to the MiniAccumuloCluster root dir. Grab the .rf file path to re-import it
    File distCpTxt = new File(exportDir.toString() + "/distcp.txt");
    File importFile = null;
    LineIterator it = FileUtils.lineIterator(distCpTxt, UTF_8.name());

    // Just grab the first rf file, it will do for now.
    String filePrefix = "file:";
    try {
        while (it.hasNext() && importFile == null) {
            String line = it.nextLine();
            if (line.matches(".*\\.rf")) {
                importFile = new File(line.replaceFirst(filePrefix, ""));
            }
        }
    } finally {
        LineIterator.closeQuietly(it);
    }
    FileUtils.copyFileToDirectory(importFile, exportDir);
    auditConnector.tableOperations().importTable(NEW_TEST_TABLE_NAME, exportDir.toString());

    // Now do a Directory (bulk) import of the same data.
    auditConnector.tableOperations().create(THIRD_TEST_TABLE_NAME);
    File failDir = new File(exportDir + "/tmp");
    assertTrue(failDir.mkdirs() || failDir.isDirectory());
    auditConnector.tableOperations().importDirectory(THIRD_TEST_TABLE_NAME, exportDir.toString(),
            failDir.toString(), false);
    auditConnector.tableOperations().online(OLD_TEST_TABLE_NAME);

    // Stop testing activities here

    ArrayList<String> auditMessages = getAuditMessages("testImportExportOperationsAudits");

    assertEquals(1,
            findAuditMessage(auditMessages, String
                    .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME))
                            .size());
    assertEquals(1,
            findAuditMessage(auditMessages,
                    String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE,
                            "offlineTable", OLD_TEST_TABLE_NAME)).size());
    assertEquals(1,
            findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_EXPORT_AUDIT_TEMPLATE,
                    OLD_TEST_TABLE_NAME, exportDir.toString())).size());
    assertEquals(1,
            findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_IMPORT_AUDIT_TEMPLATE,
                    NEW_TEST_TABLE_NAME, filePrefix + exportDir.toString())).size());
    assertEquals(1,
            findAuditMessage(auditMessages, String
                    .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME))
                            .size());
    assertEquals(1,
            findAuditMessage(auditMessages,
                    String.format(AuditedSecurityOperation.CAN_BULK_IMPORT_AUDIT_TEMPLATE,
                            THIRD_TEST_TABLE_NAME, filePrefix + exportDir.toString(),
                            filePrefix + failDir.toString())).size());
    assertEquals(1,
            findAuditMessage(auditMessages,
                    String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE,
                            "onlineTable", OLD_TEST_TABLE_NAME)).size());

}

From source file:org.apache.accumulo.test.AuditMessageTest.java

/**
 * Returns a List of Audit messages that have been grep'd out of the MiniAccumuloCluster output.
 * //from   w  w  w .  j  av a 2s.  com
 * @param stepName
 *          A unique name for the test being executed, to identify the System.out messages.
 * @return A List of the Audit messages, sorted (so in chronological order).
 */
private ArrayList<String> getAuditMessages(String stepName) throws IOException {

    for (MiniAccumuloCluster.LogWriter lw : logWriters) {
        lw.flush();
    }

    // Grab the audit messages
    System.out.println("Start of captured audit messages for step " + stepName);

    ArrayList<String> result = new ArrayList<String>();
    for (File file : logDir.listFiles()) {
        // We want to grab the files called .out
        if (file.getName().contains(".out") && file.isFile() && file.canRead()) {
            LineIterator it = FileUtils.lineIterator(file, Constants.UTF8.name());
            try {
                while (it.hasNext()) {
                    String line = it.nextLine();
                    if (line.matches(".* \\[" + AuditedSecurityOperation.AUDITLOG + "\\s*\\].*")) {
                        // Only include the message if startTimestamp is null. or the message occurred after the startTimestamp value
                        if ((lastAuditTimestamp == null)
                                || (line.substring(0, 23).compareTo(lastAuditTimestamp) > 0))
                            result.add(line);
                    }
                }
            } finally {
                LineIterator.closeQuietly(it);
            }
        }
    }
    Collections.sort(result);

    for (String s : result) {
        System.out.println(s);
    }
    System.out.println("End of captured audit messages for step " + stepName);
    if (result.size() > 0)
        lastAuditTimestamp = (result.get(result.size() - 1)).substring(0, 23);

    return result;
}

From source file:org.apache.accumulo.test.AuditMessageTest.java

@Test(timeout = 60 * 1000)
public void testImportExportOperationsAudits() throws AccumuloSecurityException, AccumuloException,
        TableExistsException, TableNotFoundException, IOException, InterruptedException {

    conn.securityOperations().createLocalUser(AUDIT_USER_1, new PasswordToken(PASSWORD));
    conn.securityOperations().grantSystemPermission(AUDIT_USER_1, SystemPermission.SYSTEM);
    conn.securityOperations().changeUserAuthorizations(AUDIT_USER_1, auths);
    grantEverySystemPriv(conn, AUDIT_USER_1);

    // Connect as Audit User and do a bunch of stuff.
    // Start testing activities here
    auditConnector = accumulo.getConnector(AUDIT_USER_1, PASSWORD);
    auditConnector.tableOperations().create(OLD_TEST_TABLE_NAME);

    // Insert some play data
    BatchWriter bw = auditConnector.createBatchWriter(OLD_TEST_TABLE_NAME, new BatchWriterConfig());
    Mutation m = new Mutation("myRow");
    m.put("cf1", "cq1", "v1");
    m.put("cf1", "cq2", "v3");
    bw.addMutation(m);/*from ww  w. ja  va 2s.c  o m*/
    bw.close();

    // Prepare to export the table
    File exportDir = new File(accumulo.getConfig().getDir().toString() + "/export");

    auditConnector.tableOperations().offline(OLD_TEST_TABLE_NAME);
    auditConnector.tableOperations().exportTable(OLD_TEST_TABLE_NAME, exportDir.toString());

    // We've exported the table metadata to the MiniAccumuloCluster root dir. Grab the .rf file path to re-import it
    File distCpTxt = new File(exportDir.toString() + "/distcp.txt");
    File importFile = null;
    LineIterator it = FileUtils.lineIterator(distCpTxt, Constants.UTF8.name());

    // Just grab the first rf file, it will do for now.
    String filePrefix = "file:";
    try {
        while (it.hasNext() && importFile == null) {
            String line = it.nextLine();
            if (line.matches(".*\\.rf")) {
                importFile = new File(line.replaceFirst(filePrefix, ""));
            }
        }
    } finally {
        LineIterator.closeQuietly(it);
    }
    FileUtils.copyFileToDirectory(importFile, exportDir);
    auditConnector.tableOperations().importTable(NEW_TEST_TABLE_NAME, exportDir.toString());

    // Now do a Directory (bulk) import of the same data.
    auditConnector.tableOperations().create(THIRD_TEST_TABLE_NAME);
    File failDir = new File(exportDir + "/tmp");
    failDir.mkdirs();
    auditConnector.tableOperations().importDirectory(THIRD_TEST_TABLE_NAME, exportDir.toString(),
            failDir.toString(), false);
    auditConnector.tableOperations().online(OLD_TEST_TABLE_NAME);

    // Stop testing activities here

    ArrayList<String> auditMessages = getAuditMessages("testImportExportOperationsAudits");

    assertEquals(1,
            findAuditMessage(auditMessages, String
                    .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, OLD_TEST_TABLE_NAME))
                            .size());
    assertEquals(1,
            findAuditMessage(auditMessages,
                    String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE,
                            "offlineTable", OLD_TEST_TABLE_NAME)).size());
    assertEquals(1,
            findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_EXPORT_AUDIT_TEMPLATE,
                    OLD_TEST_TABLE_NAME, exportDir.toString())).size());
    assertEquals(1,
            findAuditMessage(auditMessages, String.format(AuditedSecurityOperation.CAN_IMPORT_AUDIT_TEMPLATE,
                    NEW_TEST_TABLE_NAME, exportDir.toString())).size());
    assertEquals(1,
            findAuditMessage(auditMessages, String
                    .format(AuditedSecurityOperation.CAN_CREATE_TABLE_AUDIT_TEMPLATE, THIRD_TEST_TABLE_NAME))
                            .size());
    assertEquals(1,
            findAuditMessage(auditMessages,
                    String.format(AuditedSecurityOperation.CAN_BULK_IMPORT_AUDIT_TEMPLATE,
                            THIRD_TEST_TABLE_NAME, filePrefix + exportDir.toString(),
                            filePrefix + failDir.toString())).size());
    assertEquals(1,
            findAuditMessage(auditMessages,
                    String.format(AuditedSecurityOperation.CAN_ONLINE_OFFLINE_TABLE_AUDIT_TEMPLATE,
                            "onlineTable", OLD_TEST_TABLE_NAME)).size());

}

From source file:org.apache.jackrabbit.oak.plugins.blob.MarkSweepGarbageCollector.java

/**
 * Sweep phase of gc candidate deletion.
 * <p>/*from   ww w  .  j  ava2 s. c  o m*/
 * Performs the following steps depending upon the type of the blob store refer
 * {@link org.apache.jackrabbit.oak.plugins.blob.SharedDataStore.Type}:
 *
 * <ul>
 *     <li>Shared</li>
 *     <li>
 *     <ul>
 *      <li> Merge all marked references (from the mark phase run independently) available in the data store meta
 *          store (from all configured independent repositories).
 *      <li> Retrieve all blob ids available.
 *      <li> Diffs the 2 sets above to retrieve list of blob ids not used.
 *      <li> Deletes only blobs created after
 *          (earliest time stamp of the marked references - #maxLastModifiedInterval) from the above set.
 *     </ul>
 *     </li>
 *
 *     <li>Default</li>
 *     <li>
 *     <ul>
 *      <li> Mark phase already run.
 *      <li> Retrieve all blob ids available.
 *      <li> Diffs the 2 sets above to retrieve list of blob ids not used.
 *      <li> Deletes only blobs created after
 *          (time stamp of the marked references - #maxLastModifiedInterval).
 *     </ul>
 *     </li>
 * </ul>
 *
 * @return the number of blobs deleted
 * @throws Exception the exception
 * @param fs the garbage collector file state
 * @param markStart the start time of mark to take as reference for deletion
 */
protected long sweep(GarbageCollectorFileState fs, long markStart) throws Exception {
    long earliestRefAvailTime;
    // Merge all the blob references available from all the reference files in the data store meta store
    // Only go ahead if merge succeeded
    try {
        earliestRefAvailTime = GarbageCollectionType.get(blobStore).mergeAllMarkedReferences(blobStore, fs);
        LOG.debug("Earliest reference available for timestamp [{}]", earliestRefAvailTime);
        earliestRefAvailTime = (earliestRefAvailTime < markStart ? earliestRefAvailTime : markStart);
    } catch (Exception e) {
        return 0;
    }

    // Find all blob references after iterating over the whole repository
    (new BlobIdRetriever(fs)).call();

    // Calculate the references not used
    difference(fs);
    long count = 0;
    long deleted = 0;

    long lastMaxModifiedTime = getLastMaxModifiedTime(earliestRefAvailTime);
    LOG.debug("Starting sweep phase of the garbage collector");
    LOG.debug("Sweeping blobs with modified time > than the configured max deleted time ({}). ",
            timestampToString(lastMaxModifiedTime));

    ConcurrentLinkedQueue<String> exceptionQueue = new ConcurrentLinkedQueue<String>();

    LineIterator iterator = FileUtils.lineIterator(fs.getGcCandidates(), Charsets.UTF_8.name());
    List<String> ids = newArrayList();

    while (iterator.hasNext()) {
        ids.add(iterator.next());

        if (ids.size() >= getBatchCount()) {
            count += ids.size();
            deleted += sweepInternal(ids, exceptionQueue, lastMaxModifiedTime);
            ids = newArrayList();
        }
    }
    if (!ids.isEmpty()) {
        count += ids.size();
        deleted += sweepInternal(ids, exceptionQueue, lastMaxModifiedTime);
    }

    BufferedWriter writer = null;
    try {
        if (!exceptionQueue.isEmpty()) {
            writer = Files.newWriter(fs.getGarbage(), Charsets.UTF_8);
            saveBatchToFile(newArrayList(exceptionQueue), writer);
        }
    } finally {
        LineIterator.closeQuietly(iterator);
        IOUtils.closeQuietly(writer);
    }

    if (!exceptionQueue.isEmpty()) {
        LOG.warn(
                "Unable to delete some blobs entries from the blob store. Details around such blob entries can "
                        + "be found in [{}]",
                fs.getGarbage().getAbsolutePath());
    }
    if (count != deleted) {
        LOG.warn(
                "Deleted only [{}] blobs entries from the [{}] candidates identified. This may happen if blob "
                        + "modified time is > " + "than the max deleted time ({})",
                deleted, count, timestampToString(lastMaxModifiedTime));
    }

    // Remove all the merged marked references
    GarbageCollectionType.get(blobStore).removeAllMarkedReferences(blobStore);
    LOG.debug("Ending sweep phase of the garbage collector");
    return deleted;
}

From source file:org.bml.util.io.IOUtils.java

/**
 * This is just a helper method to keep all the closeQuietly methods in one
 * place/*from w  ww  . ja va2 s.  c  o m*/
 *
 * @param theLineIterator an open or null LineIterator
 */
public static void closeQuietly(LineIterator theLineIterator) {
    if (theLineIterator == null) {
        return;
    }
    LineIterator.closeQuietly(theLineIterator);
}

From source file:org.cleverbus.admin.services.log.LogEventParsingIterator.java

/**
 * Returns a line iterator to process next/current file, opening the next file, if necessary.
 *
 * @return line iterator for the current/next file; or null if no files left
 *//* w  w  w .j  av  a2 s.  co m*/
private LineIterator getLineIterator() throws IOException {
    if (lineIterator != null && lineIterator.hasNext()) {
        return lineIterator;
    }

    // discard current line iterator
    LineIterator.closeQuietly(lineIterator);
    lineIterator = null;

    if (fileEventsFound == 0) {
        Log.debug("No events in the last file, closing prematurely");
        close(); // optimize: last file had no events, no point continuing
    } else if (!logFiles.isEmpty()) {
        File file = logFiles.poll();
        Log.debug("Opening {}", file);
        lineIterator = FileUtils.lineIterator(file);
        fileEventsFound = 0; // restart per-file counter
    }

    return lineIterator;
}

From source file:org.cleverbus.admin.services.log.LogEventParsingIterator.java

@Override
public void close() {
    LineIterator.closeQuietly(lineIterator);
    lineIterator = null;
    logFiles.clear();
    Log.debug("Closed");
}

From source file:org.ednovo.data.handlers.FileInputProcessor.java

@Override
public void handleRow(Object row) throws Exception {
    File folder = new File(fileInputData.get("file-path"));
    Collection<File> files = FileUtils.listFiles(folder,
            new WildcardFileFilter(fileInputData.get("path-pattern")), DirectoryFileFilter.DIRECTORY);
    StopWatch sw = new StopWatch();
    for (final File file : files) {
        LOG.info("processing file {}", file.getAbsolutePath());
        sw.start();//from  w ww. j  a va2 s .  c  o  m
        long lines = 0;
        try {
            LineIterator it = FileUtils.lineIterator(file, "UTF-8");
            try {
                while (it.hasNext()) {
                    final String line = it.nextLine();

                    // Send the row to the next process handler.
                    getNextRowHandler().processRow(line);

                    lines++;
                    if (lines % 1000 == 0) {
                        LOG.info("file-lines: {} ", lines);
                    }
                }
            } finally {
                LineIterator.closeQuietly(it);
            }
        } catch (IOException e) {
            LOG.error("Error processing file {} ", file.getAbsolutePath(), e);
        }
        sw.stop("file:" + file.getAbsolutePath() + ": lines= " + lines + " ");
        LOG.info(sw.toString(Integer.parseInt(lines + "")));
    }
}