Example usage for org.apache.commons.io.input ReversedLinesFileReader readLine

List of usage examples for org.apache.commons.io.input ReversedLinesFileReader readLine

Introduction

In this page you can find the example usage for org.apache.commons.io.input ReversedLinesFileReader readLine.

Prototype

public String readLine() throws IOException 

Source Link

Document

Returns the lines of the file from bottom to top.

Usage

From source file:dbseer.gui.user.DBSeerDataSetPath.java

public int getNumTransactionType() {
    if (this.avgLatency.isEmpty()) {
        return 0;
    }/*from  www.j a  va  2s.c o  m*/

    // use the last line of average latency file to get num tx types.
    File avgLatencyFile = new File(this.avgLatency);
    if (avgLatencyFile == null || !avgLatencyFile.exists() || avgLatencyFile.length() == 0) {
        return 0;
    }

    try {
        ReversedLinesFileReader reverseFileReader = new ReversedLinesFileReader(avgLatencyFile);
        String line = reverseFileReader.readLine(); // read last line.
        String[] tokens = line.trim().split("\\s+");
        reverseFileReader.close();
        return (tokens.length - 1);
    } catch (IOException e) {
        e.printStackTrace();
        return 0;
    }
}

From source file:com.kajj.tools.logviewer.LogRepository.java

/**
 * Returns the last <code>numberOfLines</code> lines from the log file.
 *
 * @param fileName the name of the log file.
 * @param numberOfLines the number of lines to return.
 * @return The last <code>numberOfLines</code> lines in the log file.
 * @throws IOException if an I/O exception occurs reading the log file.
 *//*from   w  ww  .  java2  s .c  o  m*/
public List<String> getTailLog(final String fileName, final int numberOfLines) throws IOException {
    final File logFile = new File(LOG_DIRECTORY, fileName);
    final ReversedLinesFileReader reader = new ReversedLinesFileReader(logFile);
    final LinkedList<String> logs = new LinkedList<>();
    for (int i = 0; i < numberOfLines; i++) {
        final String line = reader.readLine();
        if (line == null) {
            break;
        }

        logs.addFirst(line);
    }

    return logs;
}

From source file:com.kotcrab.vis.editor.util.CrashReporter.java

private void printLog() throws IOException {
    println("--- Log file (last 200 lines) ---");

    ReversedLinesFileReader reader = new ReversedLinesFileReader(logFile);
    Array<String> logLines = new Array<>();

    for (int i = 0; i < 200; i++) {
        String line = reader.readLine();
        if (line == null)
            break;
        logLines.add(line);/*from   w w w  .  j a  v  a 2s .c  om*/
    }

    logLines.reverse();

    for (String s : logLines)
        println(s);

    println("---------------------------------");
    println();
}

From source file:com.enioka.jqm.api.ServiceSimple.java

@GET
@Path("enginelog")
@Produces(MediaType.TEXT_PLAIN)/*from  w  w w  .java  2  s. c  o  m*/
public String getEngineLog(@QueryParam("latest") int latest) {
    // Failsafe
    if (latest > 10000) {
        latest = 10000;
    }

    ReversedLinesFileReader r = null;
    try {
        File f = new File(
                FilenameUtils.concat("./logs/", "jqm-" + context.getInitParameter("jqmnode") + ".log"));
        r = new ReversedLinesFileReader(f);
        StringBuilder sb = new StringBuilder(latest);
        String buf = r.readLine();
        int i = 1;
        while (buf != null && i <= latest) {
            sb.append(buf);
            sb.append(System.getProperty("line.separator"));
            i++;
            buf = r.readLine();
        }
        return sb.toString();
    } catch (Exception e) {
        throw new ErrorDto("Could not return the desired file", 8, e, Status.NO_CONTENT);
    } finally {
        IOUtils.closeQuietly(r);
    }
}

From source file:edu.sjsu.pokemonclassifier.classification.UserPreferenceInfo.java

/**
 * Modified by sidmishraw -- closed reading and writing streams
 * @param dataRow/*from  w  w w  . ja v a 2s.c  o m*/
 */
private void writeInfoToFile(String dataRow) {

    BufferedWriter bufferedWriter = null;
    ReversedLinesFileReader rLinesFileReader = null;

    try {

        int labelIndex = 0;

        File userStoredData = new File(fileName);

        System.out.println("FIle: " + userStoredData.getAbsolutePath());

        if (!userStoredData.exists()) {

            userStoredData.createNewFile();

            System.out.println("DEBUG INFO :: Created file " + fileName);
        } else {

            rLinesFileReader = new ReversedLinesFileReader(userStoredData);

            String bottomLine = rLinesFileReader.readLine();

            System.out.println("DEBUG INFO :: line read " + bottomLine);

            String[] tokens = bottomLine.split(" ");

            labelIndex = Integer.parseInt(tokens[0]) + 1;
        }

        numOfLikedPokemon = labelIndex + 1;

        // ToDo:
        //   Set a limit of file writing.
        //   Need to write in specific line

        String line = String.format("%d %s", labelIndex, dataRow);

        bufferedWriter = new BufferedWriter(new FileWriter(userStoredData, true));

        bufferedWriter.write(line + "\n");
    } catch (IOException e) {
        System.out.println("COULD NOT WRITE TO FILE!!");
    } finally {

        if (null != rLinesFileReader) {

            try {

                rLinesFileReader.close();
            } catch (IOException e) {

                e.printStackTrace();
            }
        }

        if (null != bufferedWriter) {

            try {

                bufferedWriter.close();
            } catch (IOException e) {

                e.printStackTrace();
            }
        }
    }
}

From source file:dbseer.gui.user.DBSeerDataSet.java

public long getEndTime() {
    if (!loadDatasetPath()) {
        return -1;
    }/*from   w  ww  .j  a va 2s  .co  m*/

    if (datasetPathList.isEmpty()) {
        return -1;
    }

    DBSeerDataSetPath path = datasetPathList.get(0);
    String latencyPath = path.getAvgLatency();
    File latencyFile = new File(latencyPath);

    try {
        ReversedLinesFileReader lastLineReader = new ReversedLinesFileReader(latencyFile);
        String lastLine = lastLineReader.readLine();

        return this.getTimestamp(lastLine);
    } catch (FileNotFoundException e) {
        return -1;
    } catch (IOException e) {
        return -1;
    }

}

From source file:org.jumpmind.metl.ui.views.admin.LoggingPanel.java

@Override
@SuppressWarnings("unchecked")
public Object onBackgroundDataRefresh() {
    StringBuilder builder = null;
    if (logFile != null && logFile.exists() && autoRefreshOn.getValue()) {
        try {//from  w w w.j  a  v a2  s. c om
            builder = new StringBuilder();
            Pattern pattern = Pattern.compile("^\\d\\d\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d:\\d\\d,\\d\\d\\d .*");
            String filterValue = filter.getValue();
            boolean isFiltering = !StringUtils.isBlank(filterValue);
            Pattern filter = Pattern.compile("(.*)(" + filterValue + ")(.*)");
            ReversedLinesFileReader reader = new ReversedLinesFileReader(logFile);
            try {
                int lines = Integer.parseInt(bufferSize.getValue());
                int counter = 0;
                String line = null;
                do {
                    if (!isFiltering) {
                        line = StringEscapeUtils.escapeHtml(reader.readLine());
                    } else {
                        StringBuilder multiLine = new StringBuilder();
                        while ((line = StringEscapeUtils.escapeHtml(reader.readLine())) != null) {
                            if (pattern.matcher(line).matches()) {
                                multiLine.insert(0, line);
                                line = multiLine.toString();
                                break;
                            } else {
                                multiLine.insert(0, line + "<br/>");
                                counter++;
                            }
                        }
                    }

                    if (line != null) {
                        boolean showLine = !isFiltering;
                        if (isFiltering) {
                            Matcher matcher = filter.matcher(line);
                            if (showLine = matcher.matches()) {
                                line = matcher.replaceAll("$1<font color='red'>$2</font>$3");
                            }
                        }
                        if (showLine) {
                            builder.insert(0, line + "<br/>");
                            counter++;
                        }
                    }
                } while (line != null && counter < lines);
            } finally {
                if (reader != null) {
                    reader.close();
                }
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
    return builder;
}

From source file:org.knime.ext.textprocessing.nodes.tagging.stanfordnlpnescorer.StanfordNlpNeScorerNodeModel.java

/**
 * {@inheritDoc}//from  w  w w  .  ja  va 2  s.c om
 */
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {

    m_inputModelPortObject = (StanfordNERModelPortObject) inObjects[1];
    m_inputModel = m_inputModelPortObject.getNERModel();
    m_usedDict = m_inputModelPortObject.getDictSet();
    m_tag = m_inputModelPortObject.getTag();
    m_tokenizerName = m_inputModelPortObject.getTokenizerName();

    //create a BufferedDataContainer for the scoring values
    BufferedDataContainer accTable = exec.createDataContainer(new DataTableSpec(QUALITY_MEASURES_SPECS));

    // build pattern set from dictionary
    DataTableSpec docTableSpec = (DataTableSpec) inObjects[0].getSpec();
    BufferedDataTable docDataInput = (BufferedDataTable) inObjects[0];
    Set<Pattern> knownEntitiesPatternSet = new LinkedHashSet<Pattern>();
    for (String word : m_usedDict) {
        knownEntitiesPatternSet.add(Pattern.compile(word));
    }

    // create dictionary tagger to tag the input documents with the dictionary used for building the model
    MultiTermRegexDocumentTagger tagger = new MultiTermRegexDocumentTagger(true, knownEntitiesPatternSet, m_tag,
            true, m_tokenizerName);

    // create UUID to add them to the file path to avoid cases where two instances of the node model used the same file path at the same time
    String tempDir = KNIMEConstants.getKNIMETempDir() + "/";
    String m_annotatedTestFilePath = tempDir + "aD-" + UUID.randomUUID().toString() + ".tsv";

    // create the annotated test file
    File m_annotatedTestFile = new File(m_annotatedTestFilePath);
    PrintWriter sentenceFileWriter = new PrintWriter(m_annotatedTestFile, "UTF-8");

    int missingValueCounter = 0;

    // tag documents and transform sentences to strings while tagged terms get StanfordNLP annotation
    // iterate through columns
    for (int i = 0; i < docTableSpec.getNumColumns(); i++) {
        // iterate through rows if column with correct name has been found
        if (docTableSpec.getColumnSpec(i).getName().equals(m_docColumnModel.getStringValue())) {
            int counter = 0;
            Set<String> countMultiWordTerms = new HashSet<String>();
            for (DataRow row : docDataInput) {
                //set progress bar
                counter++;
                double progress = (counter / (double) docDataInput.size()) / (3.0);
                exec.setProgress(progress, "Preparing documents for validation");
                exec.checkCanceled();

                if (!row.getCell(i).isMissing() && row.getCell(i).getType().isCompatible(DocumentValue.class)) {
                    Document doc = ((DocumentValue) row.getCell(i)).getDocument();
                    Document taggedDoc = tagger.tag(doc);
                    Iterator<Sentence> si = taggedDoc.sentenceIterator();
                    while (si.hasNext()) {
                        Sentence s = si.next();
                        List<Term> termList = s.getTerms();
                        Iterator<Term> ti = termList.iterator();
                        while (ti.hasNext()) {
                            Term t = ti.next();
                            String termText = t.getText();
                            String termTextWithWsSuffix = t.getTextWithWsSuffix();
                            if (m_usedDict.contains(termText) || m_usedDict.contains(termTextWithWsSuffix)) {
                                if (t.getWords().size() > 1) {
                                    // multi-word terms should not be written in one line in the training file
                                    countMultiWordTerms.add(t.getText());

                                    // so skip it by splitting the term and writing each word in one line
                                    for (Word w : t.getWords()) {
                                        sentenceFileWriter.println(w.getText() + "\t" + m_tag.getTagValue());
                                    }
                                } else {
                                    sentenceFileWriter.println(termText + "\t" + m_tag.getTagValue());
                                }
                            } else if (!m_usedDict.contains(termText)
                                    || !m_usedDict.contains(termTextWithWsSuffix)) {
                                sentenceFileWriter.println(termText + "\tO");
                            }
                        }
                    }
                } else {
                    missingValueCounter++;
                }
            }
        }
    }

    if (missingValueCounter == 1) {
        setWarningMessage(missingValueCounter + " row has been ignored due to missing value.");
    } else if (missingValueCounter > 1) {
        setWarningMessage(missingValueCounter + " rows have been ignored due to missing values.");
    }

    sentenceFileWriter.close();

    exec.setProgress(0.5, "Validate model");
    // create logger configuration and catch the scores which will be printed to the log file
    File tmpLogFile = new File(KNIMEConstants.getKNIMETempDir() + "/scores.log");
    RedwoodConfiguration conf = RedwoodConfiguration.empty();
    conf.handlers(Handlers.chain(Handlers.hideDebug, Handlers.file(tmpLogFile))).apply();

    // classify the documents with our model
    DocumentReaderAndWriter<CoreLabel> raw = m_inputModel.makeReaderAndWriter();
    Triple<Double, Double, Double> prfScores = m_inputModel.classifyAndWriteAnswers(m_annotatedTestFilePath,
            new ByteArrayOutputStream(), raw, true);

    DataRow stats = new DefaultRow(new RowKey("Row0"),
            new DataCell[] { DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell(),
                    DataType.getMissingCell(), DataType.getMissingCell(), DataType.getMissingCell() });

    ReversedLinesFileReader logReader = new ReversedLinesFileReader(tmpLogFile, StandardCharsets.UTF_8);

    try {
        // get values from output stream
        String[] scores = logReader.readLine().split("\t");
        if (scores.length >= 7) {
            Double precision = prfScores.first() / 100;
            Double recall = prfScores.second() / 100;
            Double f1 = prfScores.third() / 100;
            int tp = Integer.parseInt(scores[4].trim());
            int fp = Integer.parseInt(scores[5].trim());
            int fn = Integer.parseInt(scores[6].trim());
            // create the scores row and add it to the BufferedDataContainer we created in the beginning
            stats = new DefaultRow(new RowKey("Row0"),
                    new DataCell[] { new DoubleCell(precision), new DoubleCell(recall), new DoubleCell(f1),
                            new IntCell(tp), new IntCell(fp), new IntCell(fn) });
            if (tp == 0 && fp == 0 && fn == 0 && precision == 0 && recall == 1 && f1 == 0) {
                setWarningMessage("Could not parse quality measures of model validation.");
            }
        }
    } catch (NumberFormatException e) {
        setWarningMessage("Could not parse quality measures of model validation.");
    } finally {
        logReader.close();
        tmpLogFile.delete();
        m_annotatedTestFile.delete();
    }
    accTable.addRowToTable(stats);

    accTable.close();

    return new BufferedDataTable[] { accTable.getTable() };
}

From source file:org.springframework.shell.core.JLineShell.java

/**
 * read history commands from history log. the history size if determined by --histsize options.
 *
 * @return history commands/*from   www.ja v  a2  s. com*/
 */
private String[] filterLogEntry() {
    ArrayList<String> entries = new ArrayList<String>();
    try {
        ReversedLinesFileReader reader = new ReversedLinesFileReader(new File(getHistoryFileName()), 4096,
                Charset.forName("UTF-8"));
        int size = 0;
        String line = null;
        while ((line = reader.readLine()) != null) {
            if (!line.startsWith("//")) {
                size++;
                if (size > historySize) {
                    break;
                } else {
                    entries.add(line);
                }
            }
        }
    } catch (IOException e) {
        logger.warning("read history file failed. Reason:" + e.getMessage());
    }
    Collections.reverse(entries);
    return entries.toArray(new String[0]);
}

From source file:org.wso2.emm.agent.services.operation.OperationManager.java

public static String getOperationResponseFromLogcat(Context context, String logcat) throws IOException {
    File logcatFile = new File(logcat);
    if (logcatFile.exists() && logcatFile.canRead()) {
        DeviceInfo deviceInfo = new DeviceInfo(context);
        EventPayload eventPayload = new EventPayload();
        eventPayload.setPayload(logcat);
        eventPayload.setType("LOGCAT");
        eventPayload.setDeviceIdentifier(deviceInfo.getDeviceId());

        StringBuilder emmBuilder = new StringBuilder();
        StringBuilder publisherBuilder = new StringBuilder();
        int index = 0;
        String line;//from   w  ww  . j  av a2 s  .co  m
        ReversedLinesFileReader reversedLinesFileReader = new ReversedLinesFileReader(logcatFile,
                Charset.forName("US-ASCII"));
        while ((line = reversedLinesFileReader.readLine()) != null) {
            publisherBuilder.insert(0, "\n");
            publisherBuilder.insert(0, line);
            //OPERATION_RESPONSE filed in the DM_DEVICE_OPERATION_RESPONSE is declared as a blob and hence can only hold 64Kb.
            //So we don't want to throw exceptions in the server. Limiting the response in here to limit the server traffic also.
            if (emmBuilder.length() < Character.MAX_VALUE - 8192) { //Keeping 8kB for rest of the response payload.
                emmBuilder.insert(0, "\n");
                emmBuilder.insert(0, line);
            }
            if (++index >= Constants.LogPublisher.NUMBER_OF_LOG_LINES) {
                break;
            }
        }
        LogPublisherFactory publisher = new LogPublisherFactory(context);
        if (publisher.getLogPublisher() != null) {
            eventPayload.setPayload(publisherBuilder.toString());
            publisher.getLogPublisher().publish(eventPayload);
            if (Constants.DEBUG_MODE_ENABLED) {
                Log.d(TAG, "Logcat published size: " + eventPayload.getPayload().length());
            }
        }
        eventPayload.setPayload(emmBuilder.toString());
        Gson logcatResponse = new Gson();
        logcatFile.delete();
        if (Constants.DEBUG_MODE_ENABLED) {
            Log.d(TAG, "Logcat payload size: " + eventPayload.getPayload().length());
        }
        return logcatResponse.toJson(eventPayload);
    } else {
        throw new IOException("Unable to find or read log file.");
    }
}