Example usage for java.io RandomAccessFile skipBytes

List of usage examples for java.io RandomAccessFile skipBytes

Introduction

In this page you can find the example usage for java.io RandomAccessFile skipBytes.

Prototype

public int skipBytes(int n) throws IOException 

Source Link

Document

Attempts to skip over n bytes of input discarding the skipped bytes.

Usage

From source file:Main.java

public static void main(String[] args) {
    try {//from  w w  w . j  a va 2 s  . com

        RandomAccessFile raf = new RandomAccessFile("c:/test.txt", "rw");

        raf.writeUTF("Hello World from java2s.com");

        // set the file pointer at 0 position
        raf.seek(0);

        // print the string
        System.out.println(raf.readUTF());

        // set the file pointer at 0 position
        raf.seek(0);

        // attempt to skip 10 bytes
        System.out.println(raf.skipBytes(10));

        System.out.println(raf.readLine());

        // set the file pointer to position 8
        raf.seek(8);

        // attempt to skip 10 more bytes
        System.out.println(raf.skipBytes(10));
        raf.close();
    } catch (IOException ex) {
        ex.printStackTrace();
    }

}

From source file:com.sangupta.snowpack.SnowpackRecover.java

/**
 * Try and recover from a chunk.//from w w  w . ja  v  a  2 s . c  o m
 * 
 * @param chunkID
 * @param chunkFile
 * @param metadataDB 
 * @return
 * @throws IOException 
 */
private static ChunkInfo recoverChunkInfo(final int chunkID, final File chunkFile,
        SnowpackMetadataDB metadataDB) throws IOException {
    // open the file for reading
    RandomAccessFile raf = new RandomAccessFile(chunkFile, "r");

    // read the length first
    int nameLength, length, terminator, headerLength, numFiles = 0;
    long offset;

    List<FlakeMetadata> metas = new ArrayList<FlakeMetadata>();

    try {
        while (raf.getFilePointer() < raf.length()) {
            offset = raf.getFilePointer();

            nameLength = raf.readInt();
            byte[] name = new byte[nameLength];
            raf.readFully(name);

            length = raf.readInt();
            raf.readLong();
            raf.skipBytes((int) length);

            terminator = raf.readByte();
            if (terminator != 0) {
                System.out.print(" invalid descriptor found...");
                return null;
            }

            headerLength = 4 + name.length + 4 + 8;

            numFiles++;

            metas.add(new FlakeMetadata(new String(name), nameLength, chunkID, offset, headerLength));
        }
    } finally {
        raf.close();
    }

    // all clear for recovery

    // save all metadata in new DB
    for (FlakeMetadata meta : metas) {
        metadataDB.save(meta);
    }

    // return chunk info
    ChunkInfo info = new ChunkInfo();
    info.chunkID = chunkID;
    info.numFiles = numFiles;
    info.writePointer = -1;

    return info;
}

From source file:TarList.java

public TarEntry(RandomAccessFile is) throws IOException, TarException {

    fileOffset = is.getFilePointer();/* w  w w .  ja  v a2  s.com*/

    // read() returns -1 at EOF
    if (is.read(name) < 0)
        throw new EOFException();
    // Tar pads to block boundary with nulls.
    if (name[0] == '\0')
        throw new EOFException();
    // OK, read remaining fields.
    is.read(mode);
    is.read(uid);
    is.read(gid);
    is.read(size);
    is.read(mtime);
    is.read(chksum);
    type = is.readByte();
    is.read(linkName);
    is.read(magic);
    is.read(uname);
    is.read(gname);
    is.read(devmajor);
    is.read(devminor);

    // Since the tar header is < 512, we need to skip it.
    is.skipBytes((int) (TarFile.RECORDSIZE - (is.getFilePointer() % TarFile.RECORDSIZE)));

    // TODO if checksum() fails,
    //   throw new TarException("Failed to find next header");

}

From source file:org.commoncrawl.service.crawler.CrawlLog.java

private static void transferLocalCheckpointLog(File crawlLogPath, HDFSCrawlURLWriter writer, long checkpointId)
        throws IOException {

    // and open the crawl log file ...
    RandomAccessFile inputStream = null;

    IOException exception = null;

    CRC32 crc = new CRC32();
    CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 17);
    byte[] syncBytesBuffer = new byte[SYNC_BYTES_SIZE];

    // save position for potential debug output.
    long lastReadPosition = 0;

    try {/*from  w w  w.j  a  va2  s.c om*/
        inputStream = new RandomAccessFile(crawlLogPath, "rw");
        // and a data input stream ...
        RandomAccessFile reader = inputStream;
        // seek to zero
        reader.seek(0L);

        // read the header ...
        LogFileHeader header = readLogFileHeader(reader);

        // read a crawl url from the stream...

        while (inputStream.getFilePointer() < header._fileSize) {

            if (seekToNextSyncBytesPos(syncBytesBuffer, reader, header._fileSize)) {

                try {
                    lastReadPosition = inputStream.getFilePointer();

                    // skip sync
                    inputStream.skipBytes(SYNC_BYTES_SIZE);

                    // read length ...
                    int urlDataLen = reader.readInt();
                    long urlDataCRC = reader.readLong();

                    if (urlDataLen > buffer.getBuffer().length) {
                        buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536);
                    }
                    reader.read(buffer.getBuffer(), 0, urlDataLen);
                    crc.reset();
                    crc.update(buffer.getBuffer(), 0, urlDataLen);

                    long computedValue = crc.getValue();

                    // validate crc values ...
                    if (computedValue != urlDataCRC) {
                        LOG.error("CRC Mismatch Detected during HDFS transfer in CrawlLog:"
                                + crawlLogPath.getAbsolutePath() + " Checkpoint Id:" + checkpointId
                                + " FilePosition:" + lastReadPosition);
                        inputStream.seek(lastReadPosition + 1);
                    } else {
                        // allocate a crawl url data structure
                        CrawlURL url = new CrawlURL();
                        DataInputStream bufferReader = new DataInputStream(
                                new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen));
                        // populate it from the (in memory) data stream
                        url.readFields(bufferReader);
                        try {
                            // and write out appropriate sequence file entries ...
                            writer.writeCrawlURLItem(new Text(url.getUrl()), url);
                        } catch (IOException e) {
                            LOG.error("Failed to write CrawlURL to SequenceFileWriter with Exception:"
                                    + CCStringUtils.stringifyException(e));
                            throw new URLWriterException();
                        }
                    }
                } catch (URLWriterException e) {
                    LOG.error("Caught URLRewriter Exception! - Throwing to outer layer!");
                    throw e;
                } catch (Exception e) {
                    LOG.error("Ignoring Error Processing CrawlLog Entry at Position:" + lastReadPosition
                            + " Exception:" + CCStringUtils.stringifyException(e));
                }
            } else {
                break;
            }
        }
    } catch (EOFException e) {
        LOG.error("Caught EOF Exception during read of local CrawlLog:" + crawlLogPath.getAbsolutePath()
                + " Checkpoint Id:" + checkpointId + " FilePosition:" + lastReadPosition);
    } catch (IOException e) {
        LOG.error(CCStringUtils.stringifyException(e));
        exception = e;
        throw e;
    } finally {
        if (inputStream != null)
            inputStream.close();
    }
}

From source file:org.commoncrawl.service.crawler.CrawlLog.java

public static void walkCrawlLogFile(File crawlLogPath, long startOffset) throws IOException {

    // and open the crawl log file ...
    RandomAccessFile inputStream = null;

    IOException exception = null;

    CRC32 crc = new CRC32();
    CustomByteArrayOutputStream buffer = new CustomByteArrayOutputStream(1 << 17);
    byte[] syncBytesBuffer = new byte[SYNC_BYTES_SIZE];

    // save position for potential debug output.
    long lastReadPosition = 0;

    try {//  www . j  a  v a  2 s  .c  o  m
        inputStream = new RandomAccessFile(crawlLogPath, "rw");

        // and a data input stream ...
        RandomAccessFile reader = inputStream;
        // seek to zero
        reader.seek(0L);

        // read the header ...
        LogFileHeader header = readLogFileHeader(reader);

        System.out.println("Header ItemCount:" + header._itemCount + " FileSize:" + header._fileSize);

        if (startOffset != 0L) {
            System.out.println("Preseeking to:" + startOffset);
            reader.seek(startOffset);
        }

        Configuration conf = new Configuration();

        // read a crawl url from the stream...

        long recordCount = 0;
        while (inputStream.getFilePointer() < header._fileSize) {

            // System.out.println("PRE-SYNC SeekPos:"+
            // inputStream.getFilePointer());
            if (seekToNextSyncBytesPos(syncBytesBuffer, reader, header._fileSize)) {

                // System.out.println("POST-SYNC SeekPos:"+
                // inputStream.getFilePointer());

                lastReadPosition = inputStream.getFilePointer();

                // skip sync
                inputStream.skipBytes(SYNC_BYTES_SIZE);

                // read length ...
                int urlDataLen = reader.readInt();
                long urlDataCRC = reader.readLong();

                if (urlDataLen > buffer.getBuffer().length) {
                    buffer = new CustomByteArrayOutputStream(((urlDataLen / 65536) + 1) * 65536);
                }
                reader.read(buffer.getBuffer(), 0, urlDataLen);
                crc.reset();
                crc.update(buffer.getBuffer(), 0, urlDataLen);

                long computedValue = crc.getValue();

                // validate crc values ...
                if (computedValue != urlDataCRC) {
                    LOG.error("CRC Mismatch Detected during HDFS transfer in CrawlLog:"
                            + crawlLogPath.getAbsolutePath() + " FilePosition:" + lastReadPosition);
                    inputStream.seek(lastReadPosition + 1);
                } else {
                    if (recordCount++ % 10000 == 0) {
                        // allocate a crawl url data structure
                        CrawlURL url = new CrawlURL();
                        DataInputStream bufferReader = new DataInputStream(
                                new ByteArrayInputStream(buffer.getBuffer(), 0, urlDataLen));
                        // populate it from the (in memory) data stream
                        url.readFields(bufferReader);

                        System.out.println("Record:" + recordCount + " At:" + lastReadPosition + " URL:"
                                + url.getUrl() + " BuffSize:" + urlDataLen + " ContentLen:"
                                + url.getContentRaw().getCount() + " LastModified:"
                                + new Date(url.getLastAttemptTime()).toString());
                    }
                }
            } else {
                break;
            }
        }
    } catch (EOFException e) {
        LOG.error("Caught EOF Exception during read of local CrawlLog:" + crawlLogPath.getAbsolutePath()
                + " FilePosition:" + lastReadPosition);
    } catch (IOException e) {
        LOG.error(CCStringUtils.stringifyException(e));
        exception = e;
        throw e;
    } finally {
        if (inputStream != null)
            inputStream.close();
    }
}

From source file:org.kuali.coeus.sys.framework.controller.interceptor.PerformanceMeasurementFilter.java

private void insertLine(File file, HttpSample httpSample) throws IOException {
    RandomAccessFile randomAccessFile = null;
    try {/*from   www . j a v a  2  s . c o  m*/
        randomAccessFile = new RandomAccessFile(file, "rw");
        randomAccessFile.skipBytes((int) (file.length() - "\n</httpSamples>".getBytes().length));
        randomAccessFile.write("\n".getBytes());
        randomAccessFile.write(httpSample.toXML().getBytes());
        randomAccessFile.write("\n".getBytes());
        randomAccessFile.write("</httpSamples>".getBytes());
    } finally {
        if (randomAccessFile != null) {
            randomAccessFile.close();
        }
    }
}