Example usage for java.io DataInputStream reset

List of usage examples for java.io DataInputStream reset

Introduction

In this page you can find the example usage for java.io DataInputStream reset.

Prototype

public synchronized void reset() throws IOException 

Source Link

Document

Repositions this stream to the position at the time the mark method was last called on this input stream.

Usage

From source file:cn.xiongyihui.wificar.MjpegStream.java

public Bitmap readFrame(DataInputStream in) throws IOException {
    int mContentLength = -1;

    in.mark(FRAME_MAX_LENGTH);//from  ww  w  . j  a  v a2 s.c om
    int headerLen = getStartOfSequence(in, SOI_MARKER);
    in.reset();
    byte[] header = new byte[headerLen];
    in.readFully(header);
    try {
        mContentLength = parseContentLength(header);
    } catch (NumberFormatException nfe) {
        mContentLength = getEndOfSeqeunce(in, EOF_MARKER);
    }
    in.reset();
    byte[] frameData = new byte[mContentLength];
    in.skipBytes(headerLen);
    in.readFully(frameData);
    return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}

From source file:J2MESortMixedRecordDataTypeExample.java

public void commandAction(Command command, Displayable displayable) {
    if (command == exit) {
        destroyApp(true);// www  .  j a v  a  2s  .  c  om
        notifyDestroyed();
    } else if (command == start) {
        try {
            recordstore = RecordStore.openRecordStore("myRecordStore", true);
            byte[] outputRecord;
            String outputString[] = { "Mary", "Bob", "Adam" };
            int outputInteger[] = { 15, 10, 5 };
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            DataOutputStream outputDataStream = new DataOutputStream(outputStream);
            for (int x = 0; x < 3; x++) {
                outputDataStream.writeUTF(outputString[x]);
                outputDataStream.writeInt(outputInteger[x]);
                outputDataStream.flush();
                outputRecord = outputStream.toByteArray();
                recordstore.addRecord(outputRecord, 0, outputRecord.length);
                outputStream.reset();
            }
            outputStream.close();
            outputDataStream.close();

            String[] inputString = new String[3];
            byte[] byteInputData = new byte[300];
            ByteArrayInputStream inputStream = new ByteArrayInputStream(byteInputData);
            DataInputStream inputDataStream = new DataInputStream(inputStream);
            StringBuffer buffer = new StringBuffer();
            comparator = new Comparator();
            recordEnumeration = recordstore.enumerateRecords(null, comparator, false);
            while (recordEnumeration.hasNextElement()) {
                recordstore.getRecord(recordEnumeration.nextRecordId(), byteInputData, 0);
                buffer.append(inputDataStream.readUTF());
                buffer.append(inputDataStream.readInt());
                buffer.append("\n");
                inputDataStream.reset();
            }
            alert = new Alert("Reading", buffer.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
            inputDataStream.close();
            inputStream.close();
            recordstore.closeRecordStore();
            if (RecordStore.listRecordStores() != null) {
                RecordStore.deleteRecordStore("myRecordStore");
                comparator.compareClose();
                recordEnumeration.destroy();
            }
        } catch (Exception error) {
            alert = new Alert("Error Removing", error.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
        }
    }
}

From source file:SortMixedRecordDataTypeExample.java

public void commandAction(Command command, Displayable displayable) {
    if (command == exit) {
        destroyApp(true);/*  w w  w. j  a  va2  s. c  o  m*/
        notifyDestroyed();
    } else if (command == start) {
        try {
            recordstore = RecordStore.openRecordStore("myRecordStore", true);
        } catch (Exception error) {
            alert = new Alert("Error Creating", error.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
        }
        try {
            byte[] outputRecord;
            String outputString[] = { "Mary", "Bob", "Adam" };
            int outputInteger[] = { 15, 10, 5 };
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            DataOutputStream outputDataStream = new DataOutputStream(outputStream);
            for (int x = 0; x < 3; x++) {
                outputDataStream.writeUTF(outputString[x]);
                outputDataStream.writeInt(outputInteger[x]);
                outputDataStream.flush();
                outputRecord = outputStream.toByteArray();
                recordstore.addRecord(outputRecord, 0, outputRecord.length);
                outputStream.reset();
            }
            outputStream.close();
            outputDataStream.close();
        } catch (Exception error) {
            alert = new Alert("Error Writing", error.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
        }
        try {
            String[] inputString = new String[3];
            int z = 0;
            byte[] byteInputData = new byte[300];
            ByteArrayInputStream inputStream = new ByteArrayInputStream(byteInputData);
            DataInputStream inputDataStream = new DataInputStream(inputStream);
            StringBuffer buffer = new StringBuffer();
            comparator = new Comparator();
            recordEnumeration = recordstore.enumerateRecords(null, comparator, false);
            while (recordEnumeration.hasNextElement()) {
                recordstore.getRecord(recordEnumeration.nextRecordId(), byteInputData, 0);
                buffer.append(inputDataStream.readUTF());
                buffer.append(inputDataStream.readInt());
                buffer.append("\n");
                inputDataStream.reset();
            }
            alert = new Alert("Reading", buffer.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
            inputDataStream.close();
            inputStream.close();
        } catch (Exception error) {
            alert = new Alert("Error Reading", error.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
        }
        try {
            recordstore.closeRecordStore();
        } catch (Exception error) {
            alert = new Alert("Error Closing", error.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
        }
        if (RecordStore.listRecordStores() != null) {
            try {
                RecordStore.deleteRecordStore("myRecordStore");
                comparator.compareClose();
                recordEnumeration.destroy();
            } catch (Exception error) {
                alert = new Alert("Error Removing", error.toString(), null, AlertType.WARNING);
                alert.setTimeout(Alert.FOREVER);
                display.setCurrent(alert);
            }
        }
    }
}

From source file:org.apache.axiom.util.blob.WritableBlobTestBase.java

public void testMarkReset() throws IOException {
    byte[] sourceData1 = new byte[2000];
    byte[] sourceData2 = new byte[2000];
    random.nextBytes(sourceData1);/*w  w w.  j av a2  s . com*/
    random.nextBytes(sourceData2);
    WritableBlob blob = createBlob();
    try {
        OutputStream out = blob.getOutputStream();
        out.write(sourceData1);
        out.write(sourceData2);
        out.close();
        DataInputStream in = new DataInputStream(blob.getInputStream());
        byte[] data1 = new byte[sourceData1.length];
        byte[] data2 = new byte[sourceData2.length];
        in.readFully(data1);
        in.mark(sourceData2.length);
        in.readFully(data2);
        in.reset();
        in.readFully(data2);
        assertTrue(Arrays.equals(sourceData1, data1));
        assertTrue(Arrays.equals(sourceData2, data2));
    } finally {
        releaseBlob(blob);
    }
}

From source file:org.apache.fop.fonts.type1.PFBParser.java

/**
 * Parses a PFB file into a PFBData object.
 * @param in InputStream to load the PFB file from
 * @return PFBData memory representation of the font
 * @throws IOException In case of an I/O problem
 *///w  w w.  ja v a  2s.c  om
public PFBData parsePFB(InputStream in) throws IOException {
    PFBData pfb = new PFBData();
    BufferedInputStream bin = new BufferedInputStream(in);
    DataInputStream din = new DataInputStream(bin);
    din.mark(32);
    int firstByte = din.readUnsignedByte();
    din.reset();
    if (firstByte == 128) {
        pfb.setPFBFormat(PFBData.PFB_PC);
        parsePCFormat(pfb, din);
    } else {
        pfb.setPFBFormat(PFBData.PFB_RAW);
        parseRAWFormat(pfb, bin);
    }
    return pfb;
}

From source file:org.apache.hadoop.hbase.HRegionInfo.java

/**
 * Parses an HRegionInfo instance from the passed in stream.  Presumes the HRegionInfo was
 * serialized to the stream with {@link #toDelimitedByteArray()}
 * @param in//from  w  w  w  .  j ava2s .  co  m
 * @return An instance of HRegionInfo.
 * @throws IOException
 */
public static HRegionInfo parseFrom(final DataInputStream in) throws IOException {
    // I need to be able to move back in the stream if this is not a pb serialization so I can
    // do the Writable decoding instead.
    int pblen = ProtobufUtil.lengthOfPBMagic();
    byte[] pbuf = new byte[pblen];
    if (in.markSupported()) { //read it with mark()
        in.mark(pblen);
    }
    int read = in.read(pbuf); //assumption: if Writable serialization, it should be longer than pblen.
    if (read != pblen)
        throw new IOException("read=" + read + ", wanted=" + pblen);
    if (ProtobufUtil.isPBMagicPrefix(pbuf)) {
        return convert(HBaseProtos.RegionInfo.parseDelimitedFrom(in));
    } else {
        // Presume Writables.  Need to reset the stream since it didn't start w/ pb.
        if (in.markSupported()) {
            in.reset();
            HRegionInfo hri = new HRegionInfo();
            hri.readFields(in);
            return hri;
        } else {
            //we cannot use BufferedInputStream, it consumes more than we read from the underlying IS
            ByteArrayInputStream bais = new ByteArrayInputStream(pbuf);
            SequenceInputStream sis = new SequenceInputStream(bais, in); //concatenate input streams
            HRegionInfo hri = new HRegionInfo();
            hri.readFields(new DataInputStream(sis));
            return hri;
        }
    }
}

From source file:org.apache.hadoop.hdfs.qjournal.client.URLLogInputStream.java

/**
 * Read the header of fsedit log/*from ww  w  .j av  a 2 s  .  c  o m*/
 * @param in fsedit stream
 * @return the edit log version number
 * @throws IOException if error occurs
 */
static int readLogVersion(DataInputStream in) throws IOException, LogHeaderCorruptException {
    int logVersion = 0;
    // Read log file version. Could be missing.
    in.mark(4);
    // If edits log is greater than 2G, available method will return negative
    // numbers, so we avoid having to call available
    boolean available = true;
    try {
        logVersion = in.readByte();
    } catch (EOFException e) {
        available = false;
    }

    if (available) {
        in.reset();
        logVersion = in.readInt();
        if (logVersion < FSConstants.LAYOUT_VERSION) { // future version
            throw new LogHeaderCorruptException("Unexpected version of the file system log file: " + logVersion
                    + ". Current version = " + FSConstants.LAYOUT_VERSION + ".");
        }
    }
    return logVersion;
}

From source file:org.apache.hadoop.hdfs.server.namenode.bookkeeper.BookKeeperEditLogInputStream.java

/**
 * Safely reads the log version from the stream. Logic is exactly the same
 * as in the equivalent {@link EditLogFileInputStream} method.
 * @see EditLogFileInputStream#readLogVersion(DataInputStream)
 * @return The log version or 0 if stream is empty
 */// w  w w .j a  v  a  2s. c  o m
private static int readLogVersion(DataInputStream in) throws IOException {
    int logVersion = 0;
    in.mark(4);
    // See comments in EditLogFileInputStream as to why readLogVersion is
    // implemented in this way
    boolean available = true;
    try {
        logVersion = in.readByte();
    } catch (EOFException e) {
        available = false;
    }

    if (available) {
        in.reset();
        logVersion = in.readInt();
        if (logVersion < FSConstants.LAYOUT_VERSION) {
            throw new LedgerHeaderCorruptException("Unexpected version of the log segment in the ledger: "
                    + logVersion + ". Current version is " + FSConstants.LAYOUT_VERSION + ".");
        }
    }
    return logVersion;
}

From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer.java

/**
 * Check an fsimage datainputstream's version number.
 *
 * The datainput stream is returned at the same point as it was passed in;
 * this method has no effect on the datainputstream's read pointer.
 *
 * @param in Datainputstream of fsimage//from w ww  .  j a v a 2s .c  o m
 * @return Filesystem layout version of fsimage represented by stream
 * @throws IOException If problem reading from in
 */
private int findImageVersion(DataInputStream in) throws IOException {
    in.mark(42); // arbitrary amount, resetting immediately

    int version = in.readInt();
    in.reset();

    return version;
}

From source file:org.apache.synapse.util.TemporaryDataTest.java

public void testMarkReset() throws IOException {
    byte[] sourceData1 = new byte[2000];
    byte[] sourceData2 = new byte[2000];
    random.nextBytes(sourceData1);//from   w ww .j av  a 2s .c  om
    random.nextBytes(sourceData2);
    TemporaryData tmp = new TemporaryData(16, 512, "test", ".dat");
    OutputStream out = tmp.getOutputStream();
    out.write(sourceData1);
    out.write(sourceData2);
    out.close();
    DataInputStream in = new DataInputStream(tmp.getInputStream());
    byte[] data1 = new byte[sourceData1.length];
    byte[] data2 = new byte[sourceData2.length];
    in.readFully(data1);
    in.mark(sourceData2.length);
    in.readFully(data2);
    in.reset();
    in.readFully(data2);
    assertTrue(Arrays.equals(sourceData1, data1));
    assertTrue(Arrays.equals(sourceData2, data2));
}