Example usage for org.apache.commons.io LineIterator nextLine

List of usage examples for org.apache.commons.io LineIterator nextLine

Introduction

In this page you can find the example usage for org.apache.commons.io LineIterator nextLine.

Prototype

public String nextLine() 

Source Link

Document

Returns the next line in the wrapped Reader.

Usage

From source file:org.wikimedia.analytics.varnishkafka.Cli.java

private Integer writeProtobufOutput() {
    int n = 0;/*from ww  w  . j a  v a  2s .co m*/
    try {
        LineIterator it = FileUtils.lineIterator(inputFile, "UTF-8");
        File outputFile = new File(cwd.getPath(), "test." + getFormat());
        outputFile.delete();
        OutputStream out = new FileOutputStream(outputFile);
        BufferedOutputStream bos = new BufferedOutputStream(out);
        SnappyOutputStream snappyOutputStream = null;

        if (compress) {
            snappyOutputStream = new SnappyOutputStream(bos);
        }

        log.info("Output file path: " + outputFile.toString());
        try {
            setStart(System.nanoTime());
            while (it.hasNext()) {
                n++;
                String line = it.nextLine();
                String[] fields = line.split("\\t");
                Logline.LogLine logline = Logline.LogLine.newBuilder().setKafkaOffset(Long.parseLong(fields[0]))
                        .setHost(fields[1]).setSeqNum(Long.parseLong(fields[2])).setTimestamp(fields[3])
                        .setResponse(Float.parseFloat(fields[4])).setIp(fields[5]).setHttpStatus(fields[6])
                        .setBytesSent(parseBytesSent(fields[7])).setRequestMethod(fields[8]).setUri(fields[9])
                        .setProxyHost(fields[10]).setMimeType(fields[11]).setReferer(fields[12])
                        .setXForwardedFor(fields[13]).setUserAgent(fields[14]).setAcceptLanguage(fields[15])
                        .setXAnalytics(fields[16]).build();

                if (compress) {
                    snappyOutputStream.write(logline.toByteArray());
                } else {
                    bos.write(logline.toByteArray());
                }
            }
            setEnd(System.nanoTime());
        } finally {
            try {
                bos.flush();
                out.flush();
                out.close();
                bos.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    } catch (IOException e) {
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }
    return n;
}

From source file:org.wikimedia.analytics.varnishkafka.Cli.java

private Integer writeAvroOutput() {
    Schema schema = null;/*from  w w w  .  jav  a  2s  .c om*/
    int n = 0;

    try {
        InputStream inputStream = ClassLoader.getSystemClassLoader()
                .getResourceAsStream("WebRequest.avro.json");
        schema = new Schema.Parser().parse(inputStream);
        inputStream.close();

        File file = new File(cwd.getPath(), "test." + getFormat());
        log.info("Output file path: " + file.toString());
        file.delete();
        DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
        DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(writer);

        if (compress) {
            dataFileWriter.setCodec(CodecFactory.snappyCodec());
        }

        dataFileWriter.create(schema, file);

        try {
            LineIterator it = FileUtils.lineIterator(inputFile, "UTF-8");

            try {
                setStart(System.nanoTime());
                while (it.hasNext()) {
                    n++;
                    String line = it.nextLine();
                    String[] fields = line.split("\\t");

                    // Populate data
                    GenericRecord r = new GenericData.Record(schema);
                    r.put("kafka_offset", Long.parseLong(fields[0]));
                    r.put("host", fields[1]);
                    r.put("seq_num", Long.parseLong(fields[2]));
                    r.put("timestamp", fields[3]);
                    r.put("response", Float.parseFloat(fields[4]));
                    r.put("ip", fields[5]);
                    r.put("http_status", fields[6]);
                    r.put("bytes_sent", parseBytesSent(fields[7]));
                    r.put("request_method", fields[8]);
                    r.put("uri", fields[9]);
                    r.put("proxy_host", fields[10]);
                    r.put("mime_type", fields[11]);
                    r.put("referer", fields[12]);
                    r.put("x_forwarded_for", fields[13]);
                    r.put("user_agent", fields[14]);
                    r.put("accept_language", fields[15]);
                    r.put("x_analytics", fields[16]);
                    dataFileWriter.append(r);
                }

                setEnd(System.nanoTime());
            } finally {
                dataFileWriter.flush();
                dataFileWriter.close();
            }
        } catch (IOException ex) {
            ex.printStackTrace();
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
    return n;
}

From source file:org.wso2.identity.integration.test.user.mgt.UserImportLoggingTestCase.java

/**
 * Read the audit log file and extract the log entries as lines.
 *
 * @return : An Array List which contains audit log lines.
 * @throws IOException : If any error occurred while reading the file.
 */// ww  w. j av  a  2  s  .  c  om
private List<String> readAuditLogFile() throws IOException {

    List<String> bulkUserImportAuditLogs = new ArrayList<>();
    String auditLogFile = LOG_FILE_LOCATION + File.separatorChar + AUDIT_LOG_FILE_NAME;
    File auditFile = new File(auditLogFile);

    // Iterate through the file and read lines.
    LineIterator iterator = FileUtils.lineIterator(auditFile, ENCODING);

    while (iterator.hasNext()) {
        String auditLine = iterator.nextLine();

        if (StringUtils.contains(auditLine, BULK_USER_IMPORT_OP)) {
            bulkUserImportAuditLogs.add(auditLine);
        }
    }
    return bulkUserImportAuditLogs;
}

From source file:playground.anhorni.locationchoice.preprocess.facilities.facilitiescreation.fromBZ.entreprisecensus.EnterpriseCensusParser.java

private final void readPresenceCodes(EnterpriseCensus ec) {

    log.info("Reading the presence code file...");

    int lineCounter = 0;
    int skip = 1;

    String filename = presenceCodeFile;
    String separator = ";";
    File file = new File(filename);

    LineIterator it = null;
    String line = null;/*from   w  ww .  j  a va  2s. c om*/
    String[] tokens = null;
    String reli = null;

    try {
        it = FileUtils.lineIterator(file, "UTF-8");
    } catch (IOException e) {
        e.printStackTrace();
    }
    try {
        while (it.hasNext()) {
            line = it.nextLine();
            tokens = line.split(separator);

            if (lineCounter == 0) {
                log.info("Processing header line...");
                for (String token : tokens) {
                    ec.addPresenceCodeNOGAType(token.replaceAll("\"", ""));
                }
                log.info("Processing header line...done.");
            } else {

                reli = tokens[0];
                for (int pos = 0; pos < tokens.length; pos++) {
                    if (Pattern.matches("1", tokens[pos])) {
                        ec.addPresenceCode(reli, ec.getPresenceCodeNOGAType(pos));
                    }
                }
            }
            lineCounter++;
            if (lineCounter % skip == 0) {
                log.info("Processed hectares: " + Integer.toString(lineCounter));
                skip *= 2;
            }
        }
    } finally {
        LineIterator.closeQuietly(it);
    }
    log.info("Processed hectares: " + Integer.toString(lineCounter));
    log.info("Reading the presence code file...done.");
}

From source file:playground.anhorni.locationchoice.preprocess.facilities.facilitiescreation.fromBZ.entreprisecensus.EnterpriseCensusParser.java

private final void readHectareAggregations(EnterpriseCensus ec) {

    log.info("Reading the hectare aggregation file...");

    String separator = ",";
    String filename = inputHectareAggregationFile;
    File file = new File(filename);

    LineIterator it = null;
    String line = null;//from  w  ww .ja  va 2 s. co m
    String[] tokens = null;
    String reli = null;
    int lineCounter = 0, skip = 1;

    try {
        it = FileUtils.lineIterator(file, "UTF-8");
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    try {
        while (it.hasNext()) {
            line = it.nextLine();
            tokens = line.split(separator);

            if (lineCounter == 0) {
                log.info("Processing header line...");
                for (String token : tokens) {
                    ec.addhectareAggregationNOGAType(token.replaceAll("\"", ""));
                }
                log.info("Processing header line...done.");
            } else {

                reli = tokens[0];
                for (int pos = 0; pos < tokens.length; pos++) {
                    if (!Pattern.matches("0", tokens[pos])) {
                        ec.addHectareAggregationInformation(reli, ec.getHectareAggregationNOGAType(pos),
                                Double.parseDouble(tokens[pos]));
                    }
                }
            }

            lineCounter++;
            if (lineCounter % skip == 0) {
                log.info("Processed hectares: " + Integer.toString(lineCounter));
                skip *= 2;
            }
        }
    } finally {
        LineIterator.closeQuietly(it);
    }
    log.info("Processed hectares: " + Integer.toString(lineCounter));
    log.info("Reading the hectare aggregation file...done.");
}

From source file:playground.meisterk.org.matsim.enterprisecensus.EnterpriseCensusParser.java

private final void readPresenceCodes(EnterpriseCensus ec, final Config config) {

    log.info("Reading the presence code file...");

    int lineCounter = 0;
    int skip = 1;

    String filename = config.getParam(EnterpriseCensus.EC_MODULE, EnterpriseCensus.EC_PRESENCECODEFILE);
    String separator = config.getParam(EnterpriseCensus.EC_MODULE, EnterpriseCensus.EC_PRESENCECODESEPARATOR);

    File file = new File(filename);

    LineIterator it = null;
    String line = null;// w  w  w  .  j  a va 2 s  .  co  m
    String[] tokens = null;
    String reli = null;

    try {
        it = FileUtils.lineIterator(file, "UTF-8");
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    try {
        while (it.hasNext()) {
            line = it.nextLine();
            tokens = line.split(separator);

            if (lineCounter == 0) {
                log.info("Processing header line...");
                for (String token : tokens) {
                    ec.addPresenceCodeNOGAType(token.replaceAll("\"", ""));
                }
                log.info("Processing header line...done.");
            } else {

                reli = tokens[0];
                for (int pos = 0; pos < tokens.length; pos++) {
                    if (Pattern.matches("1", tokens[pos])) {
                        ec.addPresenceCode(reli, ec.getPresenceCodeNOGAType(pos));
                    }
                }
            }

            lineCounter++;
            if (lineCounter % skip == 0) {
                log.info("Processed hectares: " + Integer.toString(lineCounter));
                skip *= 2;
            }
        }
    } finally {
        LineIterator.closeQuietly(it);
    }

    log.info("Processed hectares: " + Integer.toString(lineCounter));

    log.info("Reading the presence code file...done.");

}

From source file:playground.meisterk.org.matsim.enterprisecensus.EnterpriseCensusParser.java

private final void readHectareAggregations(EnterpriseCensus ec, Config config) {

    log.info("Reading the hectare aggregation file...");

    String filename = config.getParam(EnterpriseCensus.EC_MODULE,
            EnterpriseCensus.EC_INPUTHECTAREAGGREGATIONFILE);
    String separator = config.getParam(EnterpriseCensus.EC_MODULE,
            EnterpriseCensus.EC_INPUTHECTAREAGGREGATIONSEPARATOR);
    File file = new File(filename);

    LineIterator it = null;
    String line = null;/* w  w w. ja v a 2 s .c  om*/
    String[] tokens = null;
    String reli = null;
    int lineCounter = 0, skip = 1;

    try {
        it = FileUtils.lineIterator(file, "UTF-8");
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    try {
        while (it.hasNext()) {
            line = it.nextLine();
            tokens = line.split(separator);

            if (lineCounter == 0) {
                log.info("Processing header line...");
                for (String token : tokens) {
                    ec.addhectareAggregationNOGAType(token.replaceAll("\"", ""));
                }
                log.info("Processing header line...done.");
            } else {

                reli = tokens[0];
                for (int pos = 0; pos < tokens.length; pos++) {
                    if (!Pattern.matches("0", tokens[pos])) {
                        ec.addHectareAggregationInformation(reli, ec.getHectareAggregationNOGAType(pos),
                                Double.parseDouble(tokens[pos]));
                    }
                }
            }

            lineCounter++;
            if (lineCounter % skip == 0) {
                log.info("Processed hectares: " + Integer.toString(lineCounter));
                skip *= 2;
            }
        }
    } finally {
        LineIterator.closeQuietly(it);
    }

    log.info("Processed hectares: " + Integer.toString(lineCounter));

    log.info("Reading the hectare aggregation file...done.");

}

From source file:playground.staheale.preprocess.AgentInteractionEnterpriseCensusParser.java

private final void readPresenceCodes(AgentInteractionEnterpriseCensus ec) {

    log.info("Reading the presence code file...");

    int lineCounter = 0;
    int skip = 1;

    String filename = presenceCodeFile;
    String separator = ";";

    File file = new File(filename);

    LineIterator it = null;
    String line = null;/*  w  w  w. j  a  v  a2 s  . c  o  m*/
    String[] tokens = null;
    String reli = null;

    try {
        it = FileUtils.lineIterator(file, "UTF-8");
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    try {
        while (it.hasNext()) {
            line = it.nextLine();
            tokens = line.split(separator);

            if (lineCounter == 0) {
                log.info("Processing header line...");
                for (String token : tokens) {
                    ec.addPresenceCodeNOGAType(token.replaceAll("\"", ""));
                }
                log.info("Processing header line...done.");
            } else {

                reli = tokens[0];
                for (int pos = 0; pos < tokens.length; pos++) {
                    if (Pattern.matches("1", tokens[pos])) {
                        ec.addPresenceCode(reli, ec.getPresenceCodeNOGAType(pos));
                    }
                }
            }

            lineCounter++;
            if (lineCounter % skip == 0) {
                log.info("Processed hectares: " + Integer.toString(lineCounter));
                skip *= 2;
            }
        }
    } finally {
        LineIterator.closeQuietly(it);
    }

    log.info("Processed hectares: " + Integer.toString(lineCounter));

    log.info("Reading the presence code file...done.");

}

From source file:playground.staheale.preprocess.AgentInteractionEnterpriseCensusParser.java

private final void readHectareAggregations(AgentInteractionEnterpriseCensus ec) {

    log.info("Reading the hectare aggregation file...");

    String separator = ",";
    String filename = inputHectareAggregationFile;
    File file = new File(filename);

    LineIterator it = null;
    String line = null;/* w  w w.jav  a 2 s .co m*/
    String[] tokens = null;
    String reli = null;
    int lineCounter = 0, skip = 1;

    try {
        it = FileUtils.lineIterator(file, "UTF-8");
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    try {
        while (it.hasNext()) {
            line = it.nextLine();
            tokens = line.split(separator);

            if (lineCounter == 0) {
                log.info("Processing header line...");
                for (String token : tokens) {
                    ec.addhectareAggregationNOGAType(token.replaceAll("\"", ""));
                }
                log.info("Processing header line...done.");
            } else {

                reli = tokens[0];
                for (int pos = 0; pos < tokens.length; pos++) {
                    if (!Pattern.matches("0", tokens[pos])) {
                        ec.addHectareAggregationInformation(reli, ec.getHectareAggregationNOGAType(pos),
                                Double.parseDouble(tokens[pos]));
                    }
                }
            }

            lineCounter++;
            if (lineCounter % skip == 0) {
                log.info("Processed hectares: " + Integer.toString(lineCounter));
                skip *= 2;
            }
        }
    } finally {
        LineIterator.closeQuietly(it);
    }

    log.info("Processed hectares: " + Integer.toString(lineCounter));

    log.info("Reading the hectare aggregation file...done.");

}

From source file:se.alingsas.alfresco.repo.utils.byggreda.ReadMetadataDocument.java

/**
 * Takes an input stream which should point to a metadata document for
 * byggreda. Validates and parses the data and returns a set of
 * ByggRedaDocument//w ww .  j a  v a 2s  .  c om
 * 
 * @param inputStream
 * @return
 */
public static Set<ByggRedaDocument> read(final InputStream inputStream, List<String> globalMessages) {
    if (inputStream == null) {
        return null;
    }
    Set<ByggRedaDocument> result = new HashSet<ByggRedaDocument>();
    LineIterator lineIterator = null;
    String line = "";
    int lineNumber = 1;
    try {
        lineIterator = IOUtils.lineIterator(inputStream, "ISO-8859-1");
        // Skip first line which is a header line
        if (lineIterator.hasNext()) {

            line = lineIterator.nextLine();
            if (!line.startsWith("\"Film\";\"") && !line.startsWith("Film;")) {
                globalMessages.add(
                        "#1: Sidhuvud ej funnet p frsta raden i styrfilen. Frsta raden var: " + line);
                LOG.error("No header found on the first line in the document. First line was: " + line
                        + ". Aborting...");
                return result;
            } else {
                LOG.debug("Line #" + lineNumber + ": Skipping header");
            }
        }
        while (lineIterator.hasNext()) {
            lineNumber++;
            line = lineIterator.nextLine();
            // if it's an empty line or a comment, skip
            if (!StringUtils.hasText(line) || line.startsWith("#")) {
                globalMessages.add("#" + lineNumber + ": Tom rad, eller bortkommenterad rad funnel, skippas");
                LOG.info("Line #" + lineNumber + ": Skipping comment or empty line");
                continue;
            }
            // Validation and error handling
            ByggRedaDocument document = parseAndValidate(line);
            document.setLineNumber(lineNumber);
            if (!document.isReadSuccessfully()) {
                // An error occured, we need to log this
                LOG.error("Line #" + document.getLineNumber() + ": " + document.getStatusMsg());
            } else {
                // Document successfully read
                LOG.debug("Line #" + document.getLineNumber() + ": "
                        + "Successfully read record. , Record number: " + document.getRecordDisplay());
            }
            result.add(document);
        }
    } catch (final Exception ex) {
        globalMessages.add("#" + lineNumber + ": Fel vid inlsning av rad " + lineNumber
                + " frn styrfil. Radens innehll: " + line + " Systemmeddelande: " + ex.getMessage());
        LOG.error("Error on line '" + lineNumber + "'. Line contents: " + line, ex);
    } finally {
        IOUtils.closeQuietly(inputStream);
        LineIterator.closeQuietly(lineIterator);
    }
    return result;
}