Example usage for java.io RandomAccessFile getFilePointer

List of usage examples for java.io RandomAccessFile getFilePointer

Introduction

In this page you can find the example usage for java.io RandomAccessFile getFilePointer.

Prototype

public native long getFilePointer() throws IOException;

Source Link

Document

Returns the current offset in this file.

Usage

From source file:org.opencb.cellbase.lib.db.VariantAnnotationCalculatorTest.java

private void skipVepFileHeader(RandomAccessFile raf) throws IOException {
    String line;//w w  w .  j  av  a 2 s .c om
    long pos;
    do {
        pos = raf.getFilePointer();
        line = raf.readLine();
    } while (line.startsWith("#"));
    raf.seek(pos);
}

From source file:org.artifactory.webapp.wicket.page.logs.SystemLogsViewPanel.java

/**
 * Attemps to continue reading the log file from the last position, and the updates the log path, size and link
 * According to the outcome./*from   www  .  j  a v a 2  s.  c  om*/
 *
 * @param cleanPanel True if the text container should be cleaned of content. false if not
 * @return String - The newly read content
 */
protected String readLogAndUpdateSize(boolean cleanPanel) {
    if ((lastPointer > systemLogFile.length()) || cleanPanel) {
        lastPointer = 0;
    }
    long size = systemLogFile.length();
    setLogInfo();
    if (lastPointer == size) {
        return "";
    }
    StringBuilder sb = new StringBuilder();
    RandomAccessFile logRandomAccessFile = null;
    try {
        logRandomAccessFile = new RandomAccessFile(systemLogFile, "r");

        //If the log file is larger than 100K
        if (lastPointer == 0 && logRandomAccessFile.length() > FIRST_READ_BLOCK_SIZE) {
            //Point to the begining of the last 100K
            lastPointer = logRandomAccessFile.length() - FIRST_READ_BLOCK_SIZE;
        }
        logRandomAccessFile.seek(lastPointer);

        String line;
        while ((line = logRandomAccessFile.readLine()) != null) {
            CharSequence escapedLine = Strings.escapeMarkup(line, false, false);
            sb.append("<div>").append(escapedLine).append("<br/></div>");
        }
        lastPointer = logRandomAccessFile.getFilePointer();

    } catch (IOException e) {
        throw new RuntimeException(e.getMessage());
    } finally {
        try {
            if (logRandomAccessFile != null) {
                logRandomAccessFile.close();
            }
        } catch (IOException ignore) {
        }
    }
    return sb.toString();
}

From source file:com.microsoft.azure.management.datalake.store.uploader.SingleSegmentUploader.java

/**
 * Determines the upload cutoff for text file.
 *
 * @param buffer The buffer./*from   w  w w  .  j  ava2s.c  o  m*/
 * @param bufferDataLength length of the buffer data.
 * @param inputStream The input stream.
 * @return The index within the buffer which indicates a record boundary cutoff for a single append request for a text file.
 * @throws UploadFailedException indicates that the upload failed for the specified reason.
 * @throws IOException indicates the path is inaccessible or does not exist.
 */
private int determineUploadCutoffForTextFile(byte[] buffer, int bufferDataLength, RandomAccessFile inputStream)
        throws UploadFailedException, IOException {
    Charset encoding = Charset.forName(metadata.getEncodingName());
    //NOTE: we return an offset, but everywhere else below we treat it as a byte count; in order for that to work, we need to add 1 to the result of FindNewLine.
    int uploadCutoff = StringExtensions.findNewline(buffer, bufferDataLength - 1, bufferDataLength, true,
            encoding, metadata.getDelimiter()) + 1;
    if (uploadCutoff <= 0 && (metadata.getSegmentCount() > 1 || bufferDataLength >= MAX_RECORD_LENGTH)) {
        throw new UploadFailedException(MessageFormat.format(
                "Found a record that exceeds the maximum allowed record length around offset {0}",
                inputStream.getFilePointer()));
    }

    //a corner case here is when the newline is 2 chars long, and the first of those lands on the last byte of the buffer. If so, let's try to find another
    //newline inside the buffer, because we might be splitting this wrongly.
    if ((metadata.getDelimiter() == null || StringUtils.isEmpty(metadata.getDelimiter()))
            && uploadCutoff == buffer.length && buffer[buffer.length - 1] == (byte) '\r') {
        int newCutoff = StringExtensions.findNewline(buffer, bufferDataLength - 2, bufferDataLength - 1, true,
                encoding, metadata.getDelimiter()) + 1;
        if (newCutoff > 0) {
            uploadCutoff = newCutoff;
        }
    }

    return uploadCutoff;
}

From source file:w2v.WordToVec.java

private int readWordIndex(RandomAccessFile raf) throws IOException {
    String word = readWord(raf);//from   w  w  w.  j ava2  s . c o m
    if (raf.length() == raf.getFilePointer()) {
        return -1;
    }
    return searchVocab(word);
}

From source file:com.dotmarketing.servlets.taillog.Tailer.java

/**
 * Follows changes in the file, calling the TailerListener's handle method for each new line.
 *///from  w  w  w . j  a  v  a 2  s. c  om
public void run() {
    RandomAccessFile reader = null;
    try {
        long last = 0; // The last time the file was checked for changes
        long position = 0; // position within the file
        // Open the file
        while (run && reader == null) {
            try {
                reader = new RandomAccessFile(file, "r");
            } catch (FileNotFoundException e) {
                listener.fileNotFound();
            }

            if (reader == null) {
                try {
                    Thread.sleep(delay);
                } catch (InterruptedException e) {
                }
            } else {
                // The current position in the file
                position = end ? file.length() : startPosition;
                last = System.currentTimeMillis();
                reader.seek(position);
                readLine(reader);
                position = reader.getFilePointer();
            }
        }

        while (run) {

            // Check the file length to see if it was rotated
            long length = file.length();

            if (length < position) {

                // File was rotated
                listener.fileRotated();

                // Reopen the reader after rotation
                try {
                    // Ensure that the old file is closed iff we re-open it successfully
                    RandomAccessFile save = reader;
                    reader = new RandomAccessFile(file, "r");
                    position = 0;
                    // close old file explicitly rather than relying on GC picking up previous RAF
                    IOUtils.closeQuietly(save);
                } catch (FileNotFoundException e) {
                    // in this case we continue to use the previous reader and position values
                    listener.fileNotFound();
                }
                continue;
            } else {

                // File was not rotated

                // See if the file needs to be read again
                if (length > position) {

                    // The file has more content than it did last time
                    last = System.currentTimeMillis();
                    position = readLines(reader);

                } else if (FileUtils.isFileNewer(file, last)) {

                    /* This can happen if the file is truncated or overwritten
                     * with the exact same length of information. In cases like
                     * this, the file position needs to be reset
                     */
                    position = 0;
                    reader.seek(position); // cannot be null here

                    // Now we can read new lines
                    last = System.currentTimeMillis();
                    position = readLines(reader);
                }
            }
            try {
                Thread.sleep(delay);
            } catch (InterruptedException e) {
            }
        }

    } catch (Exception e) {

        listener.handle(e);

    } finally {
        try {
            reader.close();
        } catch (Exception e) {
            Logger.error(this.getClass(), "Unable to close: " + e.getMessage());
        }

    }
}

From source file:org.apache.james.mailrepository.file.MBoxMailRepository.java

/**
 * Parse the mbox file.//from w  w w.java  2 s .  c  o  m
 * 
 * @param ins
 *            The random access file to load. Note that the file may or may
 *            not start at offset 0 in the file
 * @param messAct
 *            The action to take when a message is found
 */
private MimeMessage parseMboxFile(RandomAccessFile ins, MessageAction messAct) {
    if ((getLogger().isDebugEnabled())) {
        String logBuffer = this.getClass().getName() + " Start parsing " + mboxFile;

        getLogger().debug(logBuffer);
    }
    try {

        Pattern sepMatchPattern = Pattern.compile("^From (.*) (.*):(.*):(.*)$");

        int c;
        boolean inMessage = false;
        StringBuffer messageBuffer = new StringBuffer();
        String previousMessageSeparator = null;
        boolean foundSep;

        long prevMessageStart = ins.getFilePointer();
        if (BUFFERING) {
            String line;
            while ((line = ins.readLine()) != null) {
                foundSep = sepMatchPattern.matcher(line).matches();

                if (foundSep && inMessage) {
                    // if ((DEEP_DEBUG) && (getLogger().isDebugEnabled())) {
                    // getLogger().debug(this.getClass().getName() +
                    // " Invoking " + messAct.getClass() + " at " +
                    // prevMessageStart);
                    // }
                    MimeMessage endResult = messAct.messageAction(previousMessageSeparator,
                            messageBuffer.toString(), prevMessageStart);
                    if (messAct.isComplete()) {
                        // I've got what I want so just exit
                        return endResult;
                    }
                    previousMessageSeparator = line;
                    prevMessageStart = ins.getFilePointer() - line.length();
                    messageBuffer = new StringBuffer();
                    inMessage = true;
                }
                // Only done at the start (first header)
                if (foundSep && !inMessage) {
                    previousMessageSeparator = line;
                    inMessage = true;
                }
                if (!foundSep && inMessage) {
                    messageBuffer.append(line).append("\n");
                }
            }
        } else {
            StringBuffer line = new StringBuffer();
            while ((c = ins.read()) != -1) {
                if (c == 10) {
                    foundSep = sepMatchPattern.matcher(line).matches();
                    if (foundSep && inMessage) {
                        // if ((DEEP_DEBUG) &&
                        // (getLogger().isDebugEnabled())) {
                        // getLogger().debug(this.getClass().getName() +
                        // " Invoking " + messAct.getClass() + " at " +
                        // prevMessageStart);
                        // }
                        MimeMessage endResult = messAct.messageAction(previousMessageSeparator,
                                messageBuffer.toString(), prevMessageStart);
                        if (messAct.isComplete()) {
                            // I've got what I want so just exit
                            return endResult;
                        }
                        previousMessageSeparator = line.toString();
                        prevMessageStart = ins.getFilePointer() - line.length();
                        messageBuffer = new StringBuffer();
                        inMessage = true;
                    }
                    // Only done at the start (first header)
                    if (foundSep && !inMessage) {
                        previousMessageSeparator = line.toString();
                        inMessage = true;
                    }
                    if (!foundSep) {
                        messageBuffer.append(line).append((char) c);
                    }
                    line = new StringBuffer(); // Reset buffer
                } else {
                    line.append((char) c);
                }
            }
        }

        if (messageBuffer.length() != 0) {
            // process last message
            return messAct.messageAction(previousMessageSeparator, messageBuffer.toString(), prevMessageStart);
        }
    } catch (IOException ioEx) {
        getLogger().error("Unable to write file (General I/O problem) " + mboxFile, ioEx);
    } catch (PatternSyntaxException e) {
        getLogger().error("Bad regex passed " + mboxFile, e);
    } finally {
        if ((getLogger().isDebugEnabled())) {
            String logBuffer = this.getClass().getName() + " Finished parsing " + mboxFile;

            getLogger().debug(logBuffer);
        }
    }
    return null;
}

From source file:org.hrva.capture.LogTail.java

/**
 * Tail the given file if the size has changed and return a temp filename.
 *
 * <p>This returns a temp filename if the log being tailed has changed.
 * </p>//ww  w. ja va 2s  .  c o  m
 * 
 * <p>The supplied target filename is -- actually -- a format string.
 * The available value, <<tt>{0}</tt> is the sequence number
 * that's saved in the history cache.</p>
 *
 * @param source The log filename to tail
 * @param target A temporary filename into which to save the tail piece.
 * @return temp filename, if the file size changed; otherwise null
 * @throws FileNotFoundException
 * @throws IOException
 */
public String tail(String source, String target) throws FileNotFoundException, IOException {
    // The resulting file name (or null if the log did not grow).
    String temp_name = null;

    // Open our last-time-we-looked file.
    String cache_file_name = global.getProperty("logtail.tail_status_filename", "logtail.history");
    String limit_str = global.getProperty("logtail.file_size_limit", "1m"); // 1 * 1024 * 1024;
    int limit;
    if (limit_str.endsWith("m") || limit_str.endsWith("M")) {
        limit = 1024 * 1024 * Integer.parseInt(limit_str.substring(0, limit_str.length() - 1));
    } else if (limit_str.endsWith("k") || limit_str.endsWith("K")) {
        limit = 1024 * Integer.parseInt(limit_str.substring(0, limit_str.length() - 1));
    } else {
        limit = Integer.parseInt(limit_str);
    }

    Properties state = get_state(cache_file_name);

    // Find the previous size and sequence number
    String prev_size_str = state.getProperty("size." + source, "0");
    long prev_size = Long.parseLong(prev_size_str);
    String seq_str = state.getProperty("seq." + source, "0");
    long sequence = Long.parseLong(seq_str);

    Object[] details = { source, target, seq_str, prev_size_str };
    logger.info(MessageFormat.format("Tailing {0} to {1}", details));
    logger.info(MessageFormat.format("Count {2}, Bytes {3}", details));
    sequence += 1;

    // Attempt to seek to the previous position
    long position = 0;
    File log_to_tail = new File(source);
    RandomAccessFile rdr = new RandomAccessFile(log_to_tail, "r");
    try {
        long current_size = rdr.length();
        if (current_size == prev_size) {
            // Same size.  Nothing more to do here.
            position = current_size;
        } else {
            // Changed size.  Either grew or was truncated.
            if (rdr.length() < prev_size) {
                // Got truncated.  Read from beginning.
                sequence = 0;
                prev_size = 0;
            } else {
                // Got bigger.  Read from where we left off.
                rdr.seek(prev_size);
            }
            // Read to EOF or the limit.  
            // No reason to get greedy.
            int read_size;
            if (current_size - prev_size > limit) {
                read_size = limit;
                rdr.seek(current_size - limit);
            } else {
                read_size = (int) (current_size - prev_size);
            }
            byte[] buffer = new byte[read_size];
            rdr.read(buffer);
            position = rdr.getFilePointer();

            // Write temp file
            Object[] args = { sequence };
            temp_name = MessageFormat.format(target, args);

            File extract = new File(temp_name);
            OutputStream wtr = new FileOutputStream(extract);
            wtr.write(buffer);
        }
    } finally {
        rdr.close();
    }

    // Update our private last-time-we-looked file.
    state.setProperty("size." + source, String.valueOf(position));
    state.setProperty("seq." + source, String.valueOf(sequence));
    save_state(cache_file_name, state);

    Object[] details2 = { source, target, seq_str, prev_size_str, String.valueOf(sequence),
            String.valueOf(position) };
    logger.info(MessageFormat.format("Count {4}, Bytes {5}", details2));

    return temp_name;
}

From source file:com.example.android.vault.EncryptedDocument.java

/**
 * Encrypt and write both the metadata and content sections of this
 * document, reading the content from the given pipe. Internally uses
 * {@link ParcelFileDescriptor#checkError()} to verify that content arrives
 * without errors. Writes to temporary file to keep atomic view of contents,
 * swapping into place only when write is successful.
 * <p/>/*from w ww.j  a  v  a 2  s  .  c  o  m*/
 * Pipe is left open, so caller is responsible for calling
 * {@link ParcelFileDescriptor#close()} or
 * {@link ParcelFileDescriptor#closeWithError(String)}.
 *
 * @param contentIn read end of a pipe.
 */
public void writeMetadataAndContent(JSONObject meta, ParcelFileDescriptor contentIn)
        throws IOException, GeneralSecurityException {
    // Write into temporary file to provide an atomic view of existing
    // contents during write, and also to recover from failed writes.
    final String tempName = mFile.getName() + ".tmp_" + Thread.currentThread().getId();
    final File tempFile = new File(mFile.getParentFile(), tempName);

    RandomAccessFile f = new RandomAccessFile(tempFile, "rw");
    try {
        // Truncate any existing data
        f.setLength(0);

        // Write content first to detect size
        if (contentIn != null) {
            f.seek(CONTENT_OFFSET);
            final int plainLength = writeSection(f, new FileInputStream(contentIn.getFileDescriptor()));
            meta.put(Document.COLUMN_SIZE, plainLength);

            // Verify that remote side of pipe finished okay; if they
            // crashed or indicated an error then this throws and we
            // leave the original file intact and clean up temp below.
            contentIn.checkError();
        }

        meta.put(Document.COLUMN_DOCUMENT_ID, mDocId);
        meta.put(Document.COLUMN_LAST_MODIFIED, System.currentTimeMillis());

        // Rewind and write metadata section
        f.seek(0);
        f.writeInt(MAGIC_NUMBER);

        final ByteArrayInputStream metaIn = new ByteArrayInputStream(
                meta.toString().getBytes(StandardCharsets.UTF_8));
        writeSection(f, metaIn);

        if (f.getFilePointer() > CONTENT_OFFSET) {
            throw new IOException("Metadata section was too large");
        }

        // Everything written fine, atomically swap new data into place.
        // fsync() before close would be overkill, since rename() is an
        // atomic barrier.
        f.close();
        tempFile.renameTo(mFile);

    } catch (JSONException e) {
        throw new IOException(e);
    } finally {
        // Regardless of what happens, always try cleaning up.
        f.close();
        tempFile.delete();
    }
}

From source file:org.commoncrawl.service.crawler.CrawlLog.java

private static void transferLocalCheckpointLog(File crawlLogPath, HDFSCrawlURLWriter writer, long checkpointId)
        throws IOException {

    // and open the crawl log file ...
    RandomAccessFile inputStream = null;

    IOException exception = null;

    CRC32 crc = new CRC32();
    CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 17);
    byte[] syncBytesBuffer = new byte[SYNC_BYTES_SIZE];

    // save position for potential debug output.
    long lastReadPosition = 0;

    try {//from   w  w w  . j a v  a2 s.co  m
        inputStream = new RandomAccessFile(crawlLogPath, "rw");
        // and a data input stream ...
        RandomAccessFile reader = inputStream;
        // seek to zero
        reader.seek(0L);

        // read the header ...
        LogFileHeader header = readLogFileHeader(reader);

        // read a crawl url from the stream...

        while (inputStream.getFilePointer() < header._fileSize) {

            if (seekToNextSyncBytesPos(syncBytesBuffer, reader, header._fileSize)) {

                try {
                    lastReadPosition = inputStream.getFilePointer();

                    // skip sync
                    inputStream.skipBytes(SYNC_BYTES_SIZE);

                    // read length ...
                    int urlDataLen = reader.readInt();
                    long urlDataCRC = reader.readLong();

                    if (urlDataLen > buffer.getBuffer().length) {
                        buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536);
                    }
                    reader.read(buffer.getBuffer(), 0, urlDataLen);
                    crc.reset();
                    crc.update(buffer.getBuffer(), 0, urlDataLen);

                    long computedValue = crc.getValue();

                    // validate crc values ...
                    if (computedValue != urlDataCRC) {
                        LOG.error("CRC Mismatch Detected during HDFS transfer in CrawlLog:"
                                + crawlLogPath.getAbsolutePath() + " Checkpoint Id:" + checkpointId
                                + " FilePosition:" + lastReadPosition);
                        inputStream.seek(lastReadPosition + 1);
                    } else {
                        // allocate a crawl url data structure
                        CrawlURL url = new CrawlURL();
                        DataInputStream bufferReader = new DataInputStream(
                                new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen));
                        // populate it from the (in memory) data stream
                        url.readFields(bufferReader);
                        try {
                            // and write out appropriate sequence file entries ...
                            writer.writeCrawlURLItem(new Text(url.getUrl()), url);
                        } catch (IOException e) {
                            LOG.error("Failed to write CrawlURL to SequenceFileWriter with Exception:"
                                    + CCStringUtils.stringifyException(e));
                            throw new URLWriterException();
                        }
                    }
                } catch (URLWriterException e) {
                    LOG.error("Caught URLRewriter Exception! - Throwing to outer layer!");
                    throw e;
                } catch (Exception e) {
                    LOG.error("Ignoring Error Processing CrawlLog Entry at Position:" + lastReadPosition
                            + " Exception:" + CCStringUtils.stringifyException(e));
                }
            } else {
                break;
            }
        }
    } catch (EOFException e) {
        LOG.error("Caught EOF Exception during read of local CrawlLog:" + crawlLogPath.getAbsolutePath()
                + " Checkpoint Id:" + checkpointId + " FilePosition:" + lastReadPosition);
    } catch (IOException e) {
        LOG.error(CCStringUtils.stringifyException(e));
        exception = e;
        throw e;
    } finally {
        if (inputStream != null)
            inputStream.close();
    }
}

From source file:org.commoncrawl.service.crawler.CrawlList.java

private static long readLogFileHeader(RandomAccessFile file, LogFileHeader header) throws IOException {

    file.seek(0);//from ww  w.j a  v a  2 s .co m

    header.readHeader(file);

    return file.getFilePointer();
}