List of usage examples for org.apache.hadoop.io WritableUtils writeVInt
public static void writeVInt(DataOutput stream, int i) throws IOException
From source file:org.commoncrawl.util.WikipediaPage.java
License:Apache License
/** * Deserializes this object.// w w w .j av a2s . c o m */ public void write(DataOutput out) throws IOException { byte[] bytes = page.getBytes(); WritableUtils.writeVInt(out, bytes.length); out.write(bytes, 0, bytes.length); out.writeUTF(language); }
From source file:org.deephacks.confit.internal.hbase.MultiKeyValueComparisonFilter.java
License:Apache License
@Override public void write(DataOutput output) throws IOException { WritableUtils.writeVInt(output, sid.length); output.write(sid);//from w w w. j a va2 s .co m WritableUtils.writeVInt(output, maxResults); WritableUtils.writeVInt(output, restrictions.size()); for (QualifierRestriction restriction : restrictions) { int val = RestrictionType.valueOf(restriction).ordinal(); WritableUtils.writeVInt(output, val); restriction.write(output); } }
From source file:org.godhuli.rhipe.RHBytesWritable.java
License:Apache License
public void write(final DataOutput out) throws IOException { WritableUtils.writeVInt(out, size); out.write(bytes, 0, size); }
From source file:org.godhuli.rhipe.RHMRHelper.java
License:Apache License
public void writeCMD(int s) throws IOException { WritableUtils.writeVInt(clientOut_, s); // clientOut_.writeInt(s); }
From source file:org.goldenorb.io.input.RawSplit.java
License:Apache License
/** * Writes to the given DataOutput.//from www.j a va 2 s . c o m * * @param out * - DataOutput */ public void write(DataOutput out) throws IOException { Text.writeString(out, splitClass); out.writeLong(dataLength); bytes.write(out); WritableUtils.writeVInt(out, locations.length); for (int i = 0; i < locations.length; i++) { Text.writeString(out, locations[i]); } }
From source file:org.htuple.Tuple.java
License:Apache License
@Override public void write(DataOutput out) throws IOException { WritableUtils.writeVInt(out, fields.size()); for (Object element : fields) { SerializationUtils.write(out, element); }// w ww .j ava 2 s .co m }
From source file:org.hypertable.hadoop.mapreduce.KeyWritable.java
License:Open Source License
@Override public void write(DataOutput out) throws IOException { if (!m_dirty) { WritableUtils.writeVInt(out, m_input_buffer.position()); out.write(m_input_buffer.array(), 0, m_input_buffer.position()); return;//from w w w . ja v a 2 s. c o m } convert_strings_to_buffers(); int position; int serial_length = row_buffer_length + column_family_buffer_length + column_qualifier_buffer_length + 30; if (m_output_buffer == null || serial_length > m_output_buffer.capacity()) { m_output_buffer = ByteBuffer.allocate(serial_length + 16); m_output_buffer.order(ByteOrder.LITTLE_ENDIAN); } else { m_output_buffer.clear(); m_output_buffer.limit(serial_length); } m_output_buffer.putShort((short) flag.getValue()); if (row_buffer != null) { writeVInt(m_output_buffer, row_buffer_length); position = m_output_buffer.position(); m_output_buffer.put(row_buffer, row_buffer_offset, row_buffer_length); row_buffer = m_output_buffer.array(); row_buffer_offset = position; } else writeVInt(m_output_buffer, 0); if (column_family_buffer != null) { writeVInt(m_output_buffer, column_family_buffer_length); position = m_output_buffer.position(); m_output_buffer.put(column_family_buffer, column_family_buffer_offset, column_family_buffer_length); column_family_buffer = m_output_buffer.array(); column_family_buffer_offset = position; } else writeVInt(m_output_buffer, 0); if (column_qualifier_buffer != null) { writeVInt(m_output_buffer, column_qualifier_buffer_length); position = m_output_buffer.position(); m_output_buffer.put(column_qualifier_buffer, column_qualifier_buffer_offset, column_qualifier_buffer_length); column_qualifier_buffer = m_output_buffer.array(); column_qualifier_buffer_offset = position; } else writeVInt(m_output_buffer, 0); if (!isSetTimestamp()) { timestamp = SerializedCellsFlag.AUTO_ASSIGN; setTimestampIsSet(true); } m_output_buffer.putLong(timestamp); if (!isSetRevision()) { revision = SerializedCellsFlag.AUTO_ASSIGN; setRevisionIsSet(true); } m_output_buffer.putLong(revision); WritableUtils.writeVInt(out, m_output_buffer.position()); out.write(m_output_buffer.array(), 0, m_output_buffer.position()); }
From source file:org.hypertable.hadoop.util.Serialization.java
License:Apache License
/** * Write ByteBuffer with a WritableableUtils.vint prefix. * @param out//w w w . ja v a2 s. co m * @param buf * @throws IOException */ public static void writeByteBuffer(final DataOutput out, ByteBuffer buf) throws IOException { if (buf == null || buf.limit() == 0) { WritableUtils.writeVInt(out, 0); } else { buf.mark(); byte[] data = new byte[buf.remaining()]; buf.get(data); writeByteArray(out, data); buf.reset(); } }
From source file:org.imageterrier.termpayload.NNTermPayloadCoordinator.java
License:Mozilla Public License
@Override public void writePayload(DataOutput out, int[] payload) throws IOException { WritableUtils.writeVInt(out, payload.length); for (int p : payload) WritableUtils.writeVInt(out, p); }
From source file:org.imageterrier.termpayload.PositionTermPayloadCoordinator.java
License:Mozilla Public License
@Override public void writePayload(DataOutput out, int[] position) throws IOException { for (int p : position) WritableUtils.writeVInt(out, p); }