Example usage for java.io DataOutput writeLong

List of usage examples for java.io DataOutput writeLong

Introduction

In this page you can find the example usage for java.io DataOutput writeLong.

Prototype

void writeLong(long v) throws IOException;

Source Link

Document

Writes a long value, which is comprised of eight bytes, to the output stream.

Usage

From source file:com.marklogic.mapreduce.ForestDocument.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeLong(fragmentOrdinal);
    WritableUtils.writeStringArray(out, collections);
}

From source file:com.sirius.hadoop.job.onlinetime.StatusKey.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeUTF(userId != null ? userId : StringUtils.EMPTY);
    out.writeLong(time);
}

From source file:edu.iu.lda.TopicCountList.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeInt(topicCount.size());/*from   w  ww  . j  a  va2s .  com*/
    for (int i = 0; i < topicCount.size(); i++) {
        out.writeLong(topicCount.getLong(i));
    }
}

From source file:org.apache.bigtop.bigpetstore.generator.PetStoreTransactionInputSplit.java

public void write(DataOutput dataOutputStream) throws IOException {
    dataOutputStream.writeInt(records);//from w  w  w .ja  v a 2  s .c  o m
    dataOutputStream.writeUTF(state.name());
    dataOutputStream.writeLong(customerIdRange.getMinimum());
    dataOutputStream.writeLong(customerIdRange.getMaximum());
}

From source file:com.davidgildeh.hadoop.input.simpledb.SimpleDBInputSplit.java

/**
 * Serialises the Split Object so it can be persisted to disk
 * // w  w w . j  a  va  2 s  .  co m
 * @param output            The output stream to write to
 * @throws IOException 
 */
public void write(DataOutput output) throws IOException {

    output.writeLong(startRow);
    output.writeLong(endRow);
    if (splitToken == null) {
        output.writeUTF("NULL");
    } else {
        output.writeUTF(splitToken);
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("Writing SimpleDBInputSplit: " + this.toString());
    }
}

From source file:edu.umn.cs.spatialHadoop.osm.OSMEdge.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeLong(edgeId);
    out.writeLong(nodeId1);//ww w  . j  ava  2s. co  m
    out.writeDouble(lat1);
    out.writeDouble(lon1);
    out.writeLong(nodeId2);
    out.writeDouble(lat2);
    out.writeDouble(lon2);
    out.writeLong(wayId);
    out.writeUTF(tags);
}

From source file:org.apache.hama.bsp.Directive.java

public void write(DataOutput out) throws IOException {
    out.writeLong(this.timestamp);
    out.writeInt(this.type.value());
    if (getType().value() == Directive.Type.Request.value()) {
        if (this.actionList == null) {
            WritableUtils.writeVInt(out, 0);
        } else {/*from   w w  w .  j a  v a2 s .co m*/
            WritableUtils.writeVInt(out, actionList.size());
            for (GroomServerAction action : this.actionList) {
                WritableUtils.writeEnum(out, action.getActionType());
                action.write(out);
            }
        }
        String[] groomServerNames = groomServerPeers.keySet().toArray(new String[0]);
        WritableUtils.writeCompressedStringArray(out, groomServerNames);

        List<String> groomServerAddresses = new ArrayList<String>(groomServerNames.length);
        for (String groomName : groomServerNames) {
            groomServerAddresses.add(groomServerPeers.get(groomName));
        }
        WritableUtils.writeCompressedStringArray(out, groomServerAddresses.toArray(new String[0]));
    } else if (getType().value() == Directive.Type.Response.value()) {
        this.status.write(out);
    } else {
        throw new IllegalStateException("Wrong directive type:" + getType());
    }

}

From source file:com.blackberry.logdriver.boom.LogLineData.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeLong(timestamp);
    out.writeLong(createTime);/*  w  w  w  .ja  va  2 s  .  co m*/
    out.writeLong(blockNumber);
    out.writeLong(lineNumber);
    out.writeInt(eventId);
}

From source file:org.apache.hadoop.hbase.hbql.filter.PageFilter.java

public void write(final DataOutput out) throws IOException {
    Configuration conf = HBaseConfiguration.create();
    out.writeLong(this.pageSize);
    out.writeBoolean(this.getVerbose());
    HbaseObjectWritable.writeObject(out, this.getFilter(), Writable.class, conf);
}

From source file:com.netflix.aegisthus.input.AegSplit.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeLong(end);
    WritableUtils.writeStringArray(out, hosts);
    WritableUtils.writeString(out, path.toUri().toString());
    out.writeBoolean(compressed);// ww w  . j a  v a 2 s .c o m
    if (compressed) {
        WritableUtils.writeString(out, compressedPath.toUri().toString());
    }
    out.writeLong(start);
    WritableUtils.writeEnum(out, type);
    if (convertors != null) {
        String[] parts = new String[2];
        out.writeInt(convertors.size());
        for (Map.Entry<String, AbstractType> e : convertors.entrySet()) {
            parts[0] = e.getKey();
            parts[1] = e.getValue().toString();
            WritableUtils.writeStringArray(out, parts);
        }
    } else {
        out.writeInt(0);
    }
}