Example usage for java.io DataOutput writeLong

List of usage examples for java.io DataOutput writeLong

Introduction

In this page you can find the example usage for java.io DataOutput writeLong.

Prototype

void writeLong(long v) throws IOException;

Source Link

Document

Writes a long value, which is comprised of eight bytes, to the output stream.

Usage

From source file:org.apache.hadoop.hbase.coprocessor.GroupByStatsValues.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeLong(count);
    out.writeLong(missing);/*from  w w w .  j a va2  s  .  c  om*/

    if (min == null) {
        min = ci.getMaxValue();
    }
    if (max == null) {
        max = ci.getMinValue();
    }
    if (sum == null) {
        sum = ci.castToReturnType(ci.zero());
    }
    if (sumOfSquares == null) {
        sumOfSquares = ci.castToReturnType(ci.zero());
    }
    new ObjectWritable(ClassUtils.wrapperToPrimitive(min.getClass()), min).write(out);
    new ObjectWritable(ClassUtils.wrapperToPrimitive(max.getClass()), max).write(out);
    new ObjectWritable(ClassUtils.wrapperToPrimitive(sum.getClass()), sum).write(out);
    new ObjectWritable(ClassUtils.wrapperToPrimitive(sumOfSquares.getClass()), sumOfSquares).write(out);

    WritableUtils.writeString(out, ci.getClass().getName());
}

From source file:com.liveramp.cascading_ext.bloom.BloomFilter.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeInt(this.numHashes);
    out.writeLong(this.vectorSize);
    out.writeLong(this.numElems);
    out.write(this.bits.getRaw());
    byte[] serializedHashFunction = SerializationUtils.serialize(this.hashFunction);
    out.writeInt(serializedHashFunction.length);
    out.write(serializedHashFunction);/* www .j av a2  s.c  o m*/
}

From source file:org.apache.hadoop.hdfs.protocol.Block.java

public void writeId(DataOutput out) throws IOException {
    out.writeLong(blockId);
    out.writeLong(generationStamp);
}

From source file:org.apache.hadoop.hdfs.protocol.Block.java

final void writeHelper(DataOutput out) throws IOException {
    out.writeLong(blockId);
    out.writeLong(numBytes);//  w  ww.j a va 2 s . c o  m
    out.writeLong(generationStamp);
    out.writeLong(classId);
}

From source file:com.marklogic.mapreduce.LargeBinaryDocument.java

@Override
public void write(DataOutput out) throws IOException {
    super.write(out);
    Text.writeString(out, path.toString());
    out.writeLong(offset);
    out.writeLong(size);//from   w w w  .ja va  2s .  c  o m
    out.writeLong(binaryOrigLen);
    conf.write(out);
}

From source file:org.apache.hadoop.hdfs.protocol.LocatedBlocks.java

public void write(DataOutput out) throws IOException {
    out.writeLong(this.fileLength);
    out.writeBoolean(underConstruction);
    // write located blocks
    int nrBlocks = locatedBlockCount();
    out.writeInt(nrBlocks);/*w ww.j av  a  2 s  . c o m*/
    if (nrBlocks == 0) {
        return;
    }
    for (LocatedBlock blk : this.blocks) {
        blk.write(out);
    }
}

From source file:com.nearinfinity.blur.mapreduce.BlurTask.java

@Override
public void write(DataOutput output) throws IOException {
    output.writeLong(_maxRecordCount);
    output.writeInt(_ramBufferSizeMB);/*  w  w w .  j a va2s  . c o m*/
    output.writeBoolean(_optimize);
    writeString(output, _indexingType.name());
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    TIOStreamTransport trans = new TIOStreamTransport(os);
    TBinaryProtocol protocol = new TBinaryProtocol(trans);
    try {
        _tableDescriptor.write(protocol);
    } catch (TException e) {
        throw new IOException(e);
    }
    os.close();
    byte[] bs = os.toByteArray();
    output.writeInt(bs.length);
    output.write(bs);
}

From source file:org.apache.hadoop.vertica.VerticaInputSplit.java

/** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
    Text.writeString(out, inputQuery);
    if (segmentParams != null && segmentParams.size() > 0) {
        out.writeLong(segmentParams.size());
        VerticaRecord record = new VerticaRecord(segmentParams, true);
        record.write(out);//from  ww  w.ja v  a 2s  .com
    } else
        out.writeLong(0);
    out.writeLong(start);
    out.writeLong(end);
}

From source file:com.chinamobile.bcbsp.action.Directive.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeInt(faultSSStep);// ww  w .j av  a 2s .c om
    out.writeLong(this.timestamp);
    out.writeInt(this.type.value());
    if (getType().value() == Directive.Type.Request.value()) {
        if (this.actionList == null) {
            WritableUtils.writeVInt(out, 0);
        } else {
            WritableUtils.writeVInt(out, actionList.size());
            for (WorkerManagerAction action : this.actionList) {
                WritableUtils.writeEnum(out, action.getActionType());
                action.write(out);
            }
        }

        WritableUtils.writeCompressedStringArray(out, this.workerManagersName);
    } else if (getType().value() == Directive.Type.Response.value()) {
        this.status.write(out);
    } else {
        throw new IllegalStateException("Wrong directive type:" + getType());
    }

    /* Zhicheng Liu added */
    out.writeInt(this.migrateSSStep);

}

From source file:org.apache.hama.bsp.JobStatus.java

@Override
public synchronized void write(DataOutput out) throws IOException {
    jobid.write(out);//  w  w w .  j a  v  a  2  s  .co m
    out.writeLong(setupProgress);
    out.writeLong(progress);
    out.writeLong(cleanupProgress);
    out.writeInt(runState);
    out.writeLong(startTime);
    out.writeLong(finishTime);
    Text.writeString(out, user);
    Text.writeString(out, schedulingInfo);
    out.writeLong(superstepCount);
    counter.write(out);
}