Example usage for java.io DataInputStream readLong

List of usage examples for java.io DataInputStream readLong

Introduction

In this page you can find the example usage for java.io DataInputStream readLong.

Prototype

public final long readLong() throws IOException 

Source Link

Document

See the general contract of the readLong method of DataInput.

Usage

From source file:org.hydracache.server.data.versioning.VectorClockVersionFactory.java

@Override
public Version readObject(final DataInputStream dataIn) throws IOException {
    Validate.notNull(dataIn, "dataIn can not be null");

    final int entriesCount = dataIn.readInt();
    final List<VectorClockEntry> vectorClockEntries = new ArrayList<VectorClockEntry>();

    for (int i = 0; i < entriesCount; i++) {
        final Identity nodeId = getIdentityMarshaller().readObject(dataIn);
        final long value = dataIn.readLong();
        final long timeStamp = dataIn.readLong();
        vectorClockEntries.add(new VectorClockEntry(nodeId, value, timeStamp));
    }/* w  w  w.j ava2s. c o m*/

    return new VectorClock(vectorClockEntries);
}

From source file:org.apache.nifi.distributed.cache.client.DistributedMapCacheClientService.java

@Override
public <K, V> CacheEntry<K, V> fetch(final K key, final Serializer<K> keySerializer,
        final Deserializer<V> valueDeserializer) throws IOException {
    return withCommsSession(session -> {
        validateProtocolVersion(session, 2);

        final DataOutputStream dos = new DataOutputStream(session.getOutputStream());
        dos.writeUTF("fetch");

        serialize(key, keySerializer, dos);
        dos.flush();//from  w  w  w  .j  ava 2 s.co m

        // read response
        final DataInputStream dis = new DataInputStream(session.getInputStream());
        final long revision = dis.readLong();
        final byte[] responseBuffer = readLengthDelimitedResponse(dis);

        if (revision < 0) {
            // This indicates that key was not found.
            return null;
        }

        final StandardCacheEntry<K, V> standardCacheEntry = new StandardCacheEntry<>(key,
                valueDeserializer.deserialize(responseBuffer), revision);
        return standardCacheEntry;
    });
}

From source file:org.csc.phynixx.loggersystem.logrecord.XADataLogger.java

/**
 *
 * a new data record is created an added to dataRecorder
 *
 * @param dataRecorder//w w  w .  j a  va 2 s .  co  m
 *            DataRecorder that uses /operates on the current physical
 *            logger
 *
 * @param logRecordType
 * @param fieldData
 */
private void recoverData(PhynixxXADataRecorder dataRecorder, XALogRecordType logRecordType,
        byte[][] fieldData) {
    if (LOGGER.isDebugEnabled()) {
        if (fieldData == null || fieldData.length == 0) {
            throw new IllegalArgumentException("Record fields are empty");
        }
    }
    // field 0 is header
    byte[] headerData = fieldData[0];
    DataInputStream io = new DataInputStream(new ByteArrayInputStream(headerData));
    try {
        // redundant , just read it an skip
        io.readLong();

        int ordinal = io.readInt();
        byte[][] content = null;

        if (fieldData.length > 1) {
            content = new byte[fieldData.length - 1][];
            for (int i = 0; i < fieldData.length - 1; i++) {
                content[i] = fieldData[i + 1];
            }
        } else {
            content = new byte[][] {};
        }

        PhynixxDataRecord msg = new PhynixxDataRecord(dataRecorder.getXADataRecorderId(), ordinal,
                logRecordType, content);
        dataRecorder.addMessage(msg);

    } catch (Exception e) {
        throw new DelegatedRuntimeException(e);
    } finally {
        if (io != null) {
            IOUtils.closeQuietly(io);
        }
    }
}

From source file:org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService.java

private void loadToken(HistoryServerState state, byte[] data) throws IOException {
    MRDelegationTokenIdentifier tokenId = new MRDelegationTokenIdentifier();
    long renewDate;
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(data));
    try {/*from   w  ww  . ja v a 2 s.  com*/
        tokenId.readFields(in);
        renewDate = in.readLong();
    } finally {
        IOUtils.cleanup(LOG, in);
    }
    state.tokenState.put(tokenId, renewDate);
}

From source file:gobblin.writer.SimpleDataWriterTest.java

/**
 * Prepend the size to each record without delimiting the record. Each record
 * should be prepended by the size of that record and the bytes written should
 * include the prepended bytes.//  w w w .  j a  va  2  s . c om
 */
@Test
public void testPrependSizeWithoutDelimiter() throws IOException {
    properties.setProp(ConfigurationKeys.SIMPLE_WRITER_PREPEND_SIZE, true);
    properties.setProp(ConfigurationKeys.SIMPLE_WRITER_DELIMITER, "");
    SimpleDataWriter writer = buildSimpleDataWriter();
    byte[] rec1 = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 };
    byte[] rec2 = { 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 };
    byte[] rec3 = { 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45 };
    byte[][] records = { rec1, rec2, rec3 };

    writer.write(rec1);
    writer.write(rec2);
    writer.write(rec3);

    writer.close();
    writer.commit();

    Assert.assertEquals(writer.recordsWritten(), 3);
    Assert.assertEquals(writer.bytesWritten(), rec1.length + rec2.length + rec3.length + (Long.SIZE / 8 * 3));

    File outputFile = new File(writer.getOutputFilePath());
    DataInputStream dis = new DataInputStream(new FileInputStream(outputFile));
    for (int i = 0; i < 3; i++) {
        long size = dis.readLong();
        Assert.assertEquals(size, records[i].length);
        for (int j = 0; j < size; j++) {
            Assert.assertEquals(dis.readByte(), records[i][j]);
        }
    }
}

From source file:gobblin.writer.SimpleDataWriterTest.java

/**
 * Prepend the size to each record and delimit the record. Each record
 * should be prepended by the size of that record and the bytes written should
 * include the prepended bytes./*from   w  ww.j a  va  2 s  .  c om*/
 */
@Test
public void testPrependSizeWithDelimiter() throws IOException {
    properties.setProp(ConfigurationKeys.SIMPLE_WRITER_PREPEND_SIZE, true);
    SimpleDataWriter writer = buildSimpleDataWriter();
    byte[] rec1 = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 };
    byte[] rec2 = { 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 };
    byte[] rec3 = { 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45 };
    byte[][] records = { rec1, rec2, rec3 };

    writer.write(rec1);
    writer.write(rec2);
    writer.write(rec3);

    writer.close();
    writer.commit();

    Assert.assertEquals(writer.recordsWritten(), 3);
    Assert.assertEquals(writer.bytesWritten(),
            rec1.length + rec2.length + rec3.length + (Long.SIZE / 8 * 3) + 3);

    File outputFile = new File(writer.getOutputFilePath());
    DataInputStream dis = new DataInputStream(new FileInputStream(outputFile));
    for (int i = 0; i < 3; i++) {
        long size = dis.readLong();
        Assert.assertEquals(size, records[i].length + 1);
        for (int j = 0; j < size - 1; j++) {
            Assert.assertEquals(dis.readByte(), records[i][j]);
        }
        Assert.assertEquals(dis.readByte(), '\n');
    }
}

From source file:org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager.java

/**
 * Private helper methods to load Delegation tokens from fsimage
 *//*from  w w w  . j av  a2  s  .  co  m*/
private synchronized void loadCurrentTokens(DataInputStream in) throws IOException {
    int numberOfTokens = in.readInt();
    for (int i = 0; i < numberOfTokens; i++) {
        DelegationTokenIdentifier id = new DelegationTokenIdentifier();
        id.readFields(in);
        long expiryTime = in.readLong();
        addPersistedDelegationToken(id, expiryTime);
    }
}

From source file:org.apache.hadoop.mapreduce.v2.hs.HistoryServerFileSystemStateStoreService.java

private MRDelegationTokenIdentifier loadToken(HistoryServerState state, Path tokenFile, long numTokenFileBytes)
        throws IOException {
    MRDelegationTokenIdentifier tokenId = new MRDelegationTokenIdentifier();
    long renewDate;
    byte[] tokenData = readFile(tokenFile, numTokenFileBytes);
    DataInputStream in = new DataInputStream(new ByteArrayInputStream(tokenData));
    try {//from ww  w  . j  a  va 2 s. c  o m
        tokenId.readFields(in);
        renewDate = in.readLong();
    } finally {
        IOUtils.cleanup(LOG, in);
    }
    state.tokenState.put(tokenId, renewDate);
    return tokenId;
}

From source file:RealFunctionValidation.java

public static Object readAndWritePrimitiveValue(final DataInputStream in, final DataOutputStream out,
        final Class<?> type) throws IOException {

    if (!type.isPrimitive()) {
        throw new IllegalArgumentException("type must be primitive");
    }/*from   w ww . j av a  2s. c o  m*/
    if (type.equals(Boolean.TYPE)) {
        final boolean x = in.readBoolean();
        out.writeBoolean(x);
        return Boolean.valueOf(x);
    } else if (type.equals(Byte.TYPE)) {
        final byte x = in.readByte();
        out.writeByte(x);
        return Byte.valueOf(x);
    } else if (type.equals(Character.TYPE)) {
        final char x = in.readChar();
        out.writeChar(x);
        return Character.valueOf(x);
    } else if (type.equals(Double.TYPE)) {
        final double x = in.readDouble();
        out.writeDouble(x);
        return Double.valueOf(x);
    } else if (type.equals(Float.TYPE)) {
        final float x = in.readFloat();
        out.writeFloat(x);
        return Float.valueOf(x);
    } else if (type.equals(Integer.TYPE)) {
        final int x = in.readInt();
        out.writeInt(x);
        return Integer.valueOf(x);
    } else if (type.equals(Long.TYPE)) {
        final long x = in.readLong();
        out.writeLong(x);
        return Long.valueOf(x);
    } else if (type.equals(Short.TYPE)) {
        final short x = in.readShort();
        out.writeShort(x);
        return Short.valueOf(x);
    } else {
        // This should never occur.
        throw new IllegalStateException();
    }
}

From source file:org.apache.cassandra.db.SuperColumn.java

private SuperColumn defreezeSuperColumn(DataInputStream dis) throws IOException {
    String name = dis.readUTF();/* w  ww  . j  av  a2 s  . com*/
    SuperColumn superColumn = new SuperColumn(name);
    superColumn.markForDeleteAt(dis.readInt(), dis.readLong());
    return superColumn;
}