Example usage for java.io DataOutput writeLong

List of usage examples for java.io DataOutput writeLong

Introduction

In this page you can find the example usage for java.io DataOutput writeLong.

Prototype

void writeLong(long v) throws IOException;

Source Link

Document

Writes a long value, which is comprised of eight bytes, to the output stream.

Usage

From source file:com.mapr.hbase.support.objects.MHRegionInfo090x.java

@Override
public void write(DataOutput out) throws IOException {
    super.write(out);
    Bytes.writeByteArray(out, endKey);/*  w  ww  . j  ava2s .  c o m*/
    out.writeBoolean(offLine);
    out.writeLong(regionId);
    Bytes.writeByteArray(out, regionName);
    out.writeBoolean(split);
    Bytes.writeByteArray(out, startKey);
    tableDesc.write(out);
    out.writeInt(hashCode);
}

From source file:org.apache.hadoop.mapred.TaskStatus.java

public void write(DataOutput out) throws IOException {
    taskid.write(out);/*from w w  w .ja v a2s  .  c  o m*/
    out.writeFloat(progress);
    out.writeInt(numSlots);
    WritableUtils.writeEnum(out, runState);
    Text.writeString(out, diagnosticInfo);
    Text.writeString(out, stateString);
    WritableUtils.writeEnum(out, phase);
    out.writeLong(startTime);
    out.writeLong(finishTime);
    out.writeBoolean(includeCounters);
    out.writeLong(outputSize);
    if (includeCounters) {
        counters.write(out);
    }
    nextRecordRange.write(out);
}

From source file:com.mobicage.rogerthat.registration.RegistrationWizard2.java

@SuppressWarnings("unchecked")
@Override/*from   w w  w.  j  av  a  2s  . c om*/
public void writePickle(DataOutput out) throws IOException {
    T.UI();
    super.writePickle(out);
    boolean set = mCredentials != null;
    out.writeBoolean(set);
    if (set) {
        out.writeInt(mCredentials.getPickleClassVersion());
        mCredentials.writePickle(out);
    }
    set = mEmail != null;
    out.writeBoolean(set);
    if (set)
        out.writeUTF(mEmail);
    out.writeLong(mTimestamp);
    out.writeUTF(mRegistrationId);
    out.writeBoolean(mInGoogleAuthenticationProcess);
    out.writeUTF(mInstallationId);
    out.writeUTF(mDeviceId);
    set = mBeaconRegions != null;
    out.writeBoolean(set);
    if (set)
        out.writeUTF(JSONValue.toJSONString(mBeaconRegions.toJSONMap()));
    set = mDetectedBeacons != null;
    out.writeBoolean(set);
    if (set) {
        JSONArray db1 = new JSONArray();
        for (String db : mDetectedBeacons) {
            db1.add(db);
        }
        out.writeUTF(JSONValue.toJSONString(db1));
    }
}

From source file:com.mapr.hbase.support.objects.MHRegionInfo.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeByte(getVersion());/*  w ww .ja v  a2  s . c o m*/
    Bytes.writeByteArray(out, endKey);
    out.writeBoolean(offLine);
    out.writeLong(regionId);
    Bytes.writeByteArray(out, regionName);
    out.writeBoolean(split);
    Bytes.writeByteArray(out, startKey);
    if (getVersion() == 0) {
        tableDesc.write(out);
    } else {
        Bytes.writeByteArray(out, tableName);
    }
    out.writeInt(hashCode);
}

From source file:org.apache.vxquery.jsonparser.JSONParser.java

public void atomicValues(int tag, JsonParser parser, DataOutput out, StringValueBuilder svb, int levelArray,
        int levelObject) throws IOException {
    abvsStack.get(0).reset();//from  w  ww .j  a  v  a 2  s  .c  om
    out.write(tag);
    if (tag == ValueTag.XS_DOUBLE_TAG) {
        out.writeDouble(parser.getDoubleValue());
    } else if (tag == ValueTag.XS_STRING_TAG) {
        svb.write(parser.getText(), out);
    } else if (tag == ValueTag.XS_INTEGER_TAG) {
        out.writeLong(parser.getLongValue());
    }
    if (!itemStack.isEmpty()) {
        if (itemStack.get(itemStack.size() - 1) == itemType.ARRAY) {
            abStack.get(levelArray - 1).addItem(abvsStack.get(0));
            if (valueSeq != null && this.matched && levelArray == this.arrayMatchLevel) {
                this.literal = true;
                this.matched = false;
                writeElement(abvsStack.get(0));
            }
        } else if (itemStack.get(itemStack.size() - 1) == itemType.OBJECT) {
            obStack.get(levelObject - 1).addItem(spStack.get(levelObject - 1), abvsStack.get(0));
            if (valueSeq != null && this.matched && levelObject == this.objectMatchLevel) {
                this.literal = true;
                this.matched = false;
                writeElement(abvsStack.get(0));
            }
        }
    }
}

From source file:dk.statsbiblioteket.util.LineReaderTest.java

public void writeSample(DataOutput out) throws Exception {
    out.writeInt(12345);/* w w  w  .j  a va  2s  . c  o  m*/
    out.writeInt(-87);
    out.writeLong(123456789L);
    out.write("Hello World!\n".getBytes("utf-8"));
    out.write("Another world\n".getBytes("utf-8"));
    out.writeFloat(0.5f);
    out.writeBoolean(true);
    out.writeBoolean(false);
    out.writeByte(12);
    out.writeByte(-12);
    out.write(129);
    out.writeShort(-4567);
    out.writeBytes("ASCII");
}

From source file:org.apache.isis.objectstore.nosql.db.file.server.FileServer.java

private void syncConnection(final Socket connection, final int readTimeout) {
    try {/*from  w w w. j  a  v a 2  s .  c  o  m*/
        final CRC32 crc32 = new CRC32();
        final DataOutput output = new DataOutputStream(connection.getOutputStream());
        final DataInput input = new DataInputStream(new CheckedInputStream(connection.getInputStream(), crc32));

        if (input.readByte() != INIT) {
            return;
        }

        final LogRange logFileRange = Util.logFileRange();
        final long lastId = logFileRange.noLogFile() ? -1 : logFileRange.getLast();
        output.writeLong(lastId);
        do {
            if (input.readByte() != RECOVERY_LOG) {
                return;
            }
            crc32.reset();
            final long logId = input.readLong();
            final File file = Util.tmpLogFile(logId);
            LOG.info("syncing recovery file: " + file.getName());
            final BufferedOutputStream fileOutput = new BufferedOutputStream(new FileOutputStream(file));

            final byte[] buffer = new byte[8092];
            int length;
            while ((length = input.readInt()) > 0) {
                input.readFully(buffer, 0, length);
                fileOutput.write(buffer, 0, length);
            }
            fileOutput.close();

            final long calculatedChecksum = crc32.getValue();
            final long sentChecksum = input.readLong();
            if (calculatedChecksum != sentChecksum) {
                throw new NoSqlStoreException("Checksum didn't match during download of " + file.getName());
            }

            recover(file);
            final File renameTo = Util.logFile(logId);
            file.renameTo(renameTo);
        } while (true);
    } catch (final NoSqlStoreException e) {
        LOG.error("file server failure", e);
    } catch (final IOException e) {
        LOG.error("networking failure", e);
    } catch (final RuntimeException e) {
        LOG.error("request failure", e);
    } finally {
        try {
            connection.close();
        } catch (final IOException e) {
            LOG.warn("failure to close connection", e);
        }
    }

    // TODO restart
}

From source file:org.apache.pig.data.DataReaderWriter.java

@SuppressWarnings("unchecked")
public static void writeDatum(DataOutput out, Object val) throws IOException {
    // Read the data type
    byte type = DataType.findType(val);
    switch (type) {
    case DataType.TUPLE:
        // Because tuples are written directly by hadoop, the
        // tuple's write method needs to write the indicator byte.
        // So don't write the indicator byte here as it is for
        // everyone else.
        ((Tuple) val).write(out);
        break;//from w  ww. j a v  a2s.  c  om

    case DataType.BAG:
        out.writeByte(DataType.BAG);
        ((DataBag) val).write(out);
        break;

    case DataType.MAP: {
        out.writeByte(DataType.MAP);
        Map<String, Object> m = (Map<String, Object>) val;
        out.writeInt(m.size());
        Iterator<Map.Entry<String, Object>> i = m.entrySet().iterator();
        while (i.hasNext()) {
            Map.Entry<String, Object> entry = i.next();
            writeDatum(out, entry.getKey());
            writeDatum(out, entry.getValue());
        }
        break;
    }

    case DataType.INTERNALMAP: {
        out.writeByte(DataType.INTERNALMAP);
        Map<Object, Object> m = (Map<Object, Object>) val;
        out.writeInt(m.size());
        Iterator<Map.Entry<Object, Object>> i = m.entrySet().iterator();
        while (i.hasNext()) {
            Map.Entry<Object, Object> entry = i.next();
            writeDatum(out, entry.getKey());
            writeDatum(out, entry.getValue());
        }
        break;
    }

    case DataType.INTEGER:
        out.writeByte(DataType.INTEGER);
        out.writeInt((Integer) val);
        break;

    case DataType.LONG:
        out.writeByte(DataType.LONG);
        out.writeLong((Long) val);
        break;

    case DataType.FLOAT:
        out.writeByte(DataType.FLOAT);
        out.writeFloat((Float) val);
        break;

    case DataType.DOUBLE:
        out.writeByte(DataType.DOUBLE);
        out.writeDouble((Double) val);
        break;

    case DataType.BOOLEAN:
        out.writeByte(DataType.BOOLEAN);
        out.writeBoolean((Boolean) val);
        break;

    case DataType.BYTE:
        out.writeByte(DataType.BYTE);
        out.writeByte((Byte) val);
        break;

    case DataType.BYTEARRAY: {
        out.writeByte(DataType.BYTEARRAY);
        DataByteArray bytes = (DataByteArray) val;
        out.writeInt(bytes.size());
        out.write(bytes.mData);
        break;
    }

    case DataType.CHARARRAY: {
        String s = (String) val;
        byte[] utfBytes = s.getBytes(DataReaderWriter.UTF8);
        int length = utfBytes.length;

        if (length < DataReaderWriter.UNSIGNED_SHORT_MAX) {
            out.writeByte(DataType.CHARARRAY);
            out.writeShort(length);
            out.write(utfBytes);
        } else {
            out.writeByte(DataType.BIGCHARARRAY);
            out.writeInt(length);
            out.write(utfBytes);
        }
        break;
    }
    case DataType.GENERIC_WRITABLECOMPARABLE:
        out.writeByte(DataType.GENERIC_WRITABLECOMPARABLE);
        //store the class name, so we know the class to create on read
        writeDatum(out, val.getClass().getName());
        Writable writable = (Writable) val;
        writable.write(out);
        break;

    case DataType.NULL:
        out.writeByte(DataType.NULL);
        break;

    default:
        throw new RuntimeException("Unexpected data type " + type + " found in stream.");
    }
}

From source file:org.apache.rya.accumulo.mr.RyaStatementWritable.java

/**
 * Serializes this RyaStatementWritable.
 * @param   dataOutput  An output stream for serialized statement data.
 * @throws  IOException if the RyaStatement is null or otherwise can't be
 *          serialized./*from  w  w  w.j av a  2  s  . c  o  m*/
 */
@Override
public void write(DataOutput dataOutput) throws IOException {
    if (ryaStatement == null) {
        throw new IOException("Rya Statement is null");
    }
    try {
        Map<RdfCloudTripleStoreConstants.TABLE_LAYOUT, TripleRow> map = ryaContext
                .serializeTriple(ryaStatement);
        TripleRow tripleRow = map.get(RdfCloudTripleStoreConstants.TABLE_LAYOUT.SPO);
        byte[] row = tripleRow.getRow();
        byte[] columnFamily = tripleRow.getColumnFamily();
        byte[] columnQualifier = tripleRow.getColumnQualifier();
        write(dataOutput, row);
        write(dataOutput, columnFamily);
        write(dataOutput, columnQualifier);
        write(dataOutput, ryaStatement.getColumnVisibility());
        write(dataOutput, ryaStatement.getValue());
        Long timestamp = ryaStatement.getTimestamp();
        boolean b = timestamp != null;
        dataOutput.writeBoolean(b);
        if (b) {
            dataOutput.writeLong(timestamp);
        }
    } catch (TripleRowResolverException e) {
        throw new IOException(e);
    }
}

From source file:org.apache.hadoop.hbase.regionserver.wal.HLogKey.java

@Override
@Deprecated/*from   www .j  a v  a2  s . co m*/
public void write(DataOutput out) throws IOException {
    LOG.warn("HLogKey is being serialized to writable - only expected in test code");
    WritableUtils.writeVInt(out, VERSION.code);
    if (compressionContext == null) {
        Bytes.writeByteArray(out, this.encodedRegionName);
        Bytes.writeByteArray(out, this.tablename.getName());
    } else {
        Compressor.writeCompressed(this.encodedRegionName, 0, this.encodedRegionName.length, out,
                compressionContext.regionDict);
        Compressor.writeCompressed(this.tablename.getName(), 0, this.tablename.getName().length, out,
                compressionContext.tableDict);
    }
    out.writeLong(this.logSeqNum);
    out.writeLong(this.writeTime);
    // Don't need to write the clusters information as we are using protobufs from 0.95
    // Writing only the first clusterId for testing the legacy read
    Iterator<UUID> iterator = clusterIds.iterator();
    if (iterator.hasNext()) {
        out.writeBoolean(true);
        UUID clusterId = iterator.next();
        out.writeLong(clusterId.getMostSignificantBits());
        out.writeLong(clusterId.getLeastSignificantBits());
    } else {
        out.writeBoolean(false);
    }
}