List of usage examples for org.apache.hadoop.io Writable write
void write(DataOutput out) throws IOException;
out
. From source file:org.apache.pig.data.BinInterSedes.java
License:Apache License
@Override @SuppressWarnings("unchecked") public void writeDatum(DataOutput out, Object val, byte type) throws IOException { switch (type) { case DataType.TUPLE: writeTuple(out, (Tuple) val); break;/* ww w . j a v a 2 s.c o m*/ case DataType.BAG: writeBag(out, (DataBag) val); break; case DataType.MAP: { writeMap(out, (Map<String, Object>) val); break; } case DataType.INTERNALMAP: { out.writeByte(INTERNALMAP); Map<Object, Object> m = (Map<Object, Object>) val; out.writeInt(m.size()); Iterator<Map.Entry<Object, Object>> i = m.entrySet().iterator(); while (i.hasNext()) { Map.Entry<Object, Object> entry = i.next(); writeDatum(out, entry.getKey()); writeDatum(out, entry.getValue()); } break; } case DataType.INTEGER: int i = (Integer) val; if (i == 0) { out.writeByte(INTEGER_0); } else if (i == 1) { out.writeByte(INTEGER_1); } else if (Byte.MIN_VALUE <= i && i <= Byte.MAX_VALUE) { out.writeByte(INTEGER_INBYTE); out.writeByte(i); } else if (Short.MIN_VALUE <= i && i <= Short.MAX_VALUE) { out.writeByte(INTEGER_INSHORT); out.writeShort(i); } else { out.writeByte(INTEGER); out.writeInt(i); } break; case DataType.LONG: long lng = (Long) val; if (lng == 0) { out.writeByte(LONG_0); } else if (lng == 1) { out.writeByte(LONG_1); } else if (Byte.MIN_VALUE <= lng && lng <= Byte.MAX_VALUE) { out.writeByte(LONG_INBYTE); out.writeByte((int) lng); } else if (Short.MIN_VALUE <= lng && lng <= Short.MAX_VALUE) { out.writeByte(LONG_INSHORT); out.writeShort((int) lng); } else if (Integer.MIN_VALUE <= lng && lng <= Integer.MAX_VALUE) { out.writeByte(LONG_ININT); out.writeInt((int) lng); } else { out.writeByte(LONG); out.writeLong(lng); } break; case DataType.DATETIME: out.writeByte(DATETIME); out.writeLong(((DateTime) val).getMillis()); out.writeShort(((DateTime) val).getZone().getOffset((DateTime) val) / ONE_MINUTE); break; case DataType.FLOAT: out.writeByte(FLOAT); out.writeFloat((Float) val); break; case DataType.BIGINTEGER: out.writeByte(BIGINTEGER); writeBigInteger(out, (BigInteger) val); break; case DataType.BIGDECIMAL: out.writeByte(BIGDECIMAL); writeBigDecimal(out, (BigDecimal) val); break; case DataType.DOUBLE: out.writeByte(DOUBLE); out.writeDouble((Double) val); break; case DataType.BOOLEAN: if ((Boolean) val) out.writeByte(BOOLEAN_TRUE); else out.writeByte(BOOLEAN_FALSE); break; case DataType.BYTE: out.writeByte(BYTE); out.writeByte((Byte) val); break; case DataType.BYTEARRAY: { DataByteArray bytes = (DataByteArray) val; SedesHelper.writeBytes(out, bytes.mData); break; } case DataType.CHARARRAY: { SedesHelper.writeChararray(out, (String) val); break; } case DataType.GENERIC_WRITABLECOMPARABLE: out.writeByte(GENERIC_WRITABLECOMPARABLE); // store the class name, so we know the class to create on read writeDatum(out, val.getClass().getName()); Writable writable = (Writable) val; writable.write(out); break; case DataType.NULL: out.writeByte(NULL); break; default: throw new RuntimeException("Unexpected data type " + val.getClass().getName() + " found in stream. " + "Note only standard Pig type is supported when you output from UDF/LoadFunc"); } }
From source file:org.apache.pig.data.DataReaderWriter.java
License:Apache License
@SuppressWarnings("unchecked") public static void writeDatum(DataOutput out, Object val) throws IOException { // Read the data type byte type = DataType.findType(val); switch (type) { case DataType.TUPLE: Tuple t = (Tuple) val; out.writeByte(DataType.TUPLE);//w ww . j av a 2 s . c om int sz = t.size(); out.writeInt(sz); for (int i = 0; i < sz; i++) { DataReaderWriter.writeDatum(out, t.get(i)); } break; case DataType.BAG: DataBag bag = (DataBag) val; out.writeByte(DataType.BAG); out.writeLong(bag.size()); Iterator<Tuple> it = bag.iterator(); while (it.hasNext()) { DataReaderWriter.writeDatum(out, it.next()); } break; case DataType.MAP: { out.writeByte(DataType.MAP); Map<String, Object> m = (Map<String, Object>) val; out.writeInt(m.size()); Iterator<Map.Entry<String, Object>> i = m.entrySet().iterator(); while (i.hasNext()) { Map.Entry<String, Object> entry = i.next(); writeDatum(out, entry.getKey()); writeDatum(out, entry.getValue()); } break; } case DataType.INTERNALMAP: { out.writeByte(DataType.INTERNALMAP); Map<Object, Object> m = (Map<Object, Object>) val; out.writeInt(m.size()); Iterator<Map.Entry<Object, Object>> i = m.entrySet().iterator(); while (i.hasNext()) { Map.Entry<Object, Object> entry = i.next(); writeDatum(out, entry.getKey()); writeDatum(out, entry.getValue()); } break; } case DataType.INTEGER: out.writeByte(DataType.INTEGER); out.writeInt((Integer) val); break; case DataType.LONG: out.writeByte(DataType.LONG); out.writeLong((Long) val); break; case DataType.FLOAT: out.writeByte(DataType.FLOAT); out.writeFloat((Float) val); break; case DataType.DOUBLE: out.writeByte(DataType.DOUBLE); out.writeDouble((Double) val); break; case DataType.BOOLEAN: out.writeByte(DataType.BOOLEAN); out.writeBoolean((Boolean) val); break; case DataType.BYTE: out.writeByte(DataType.BYTE); out.writeByte((Byte) val); break; case DataType.DATETIME: out.writeByte(DataType.DATETIME); out.writeLong(((DateTime) val).getMillis()); out.writeShort(((DateTime) val).getZone().getOffset((DateTime) val) / 60000); break; case DataType.BYTEARRAY: { out.writeByte(DataType.BYTEARRAY); DataByteArray bytes = (DataByteArray) val; out.writeInt(bytes.size()); out.write(bytes.mData); break; } case DataType.BIGINTEGER: out.writeByte(DataType.BIGINTEGER); writeDatum(out, ((BigInteger) val).toByteArray()); break; case DataType.BIGDECIMAL: out.writeByte(DataType.BIGDECIMAL); writeDatum(out, ((BigDecimal) val).toString()); break; case DataType.CHARARRAY: { String s = (String) val; byte[] utfBytes = s.getBytes(DataReaderWriter.UTF8); int length = utfBytes.length; if (length < DataReaderWriter.UNSIGNED_SHORT_MAX) { out.writeByte(DataType.CHARARRAY); out.writeShort(length); out.write(utfBytes); } else { out.writeByte(DataType.BIGCHARARRAY); out.writeInt(length); out.write(utfBytes); } break; } case DataType.GENERIC_WRITABLECOMPARABLE: out.writeByte(DataType.GENERIC_WRITABLECOMPARABLE); //store the class name, so we know the class to create on read writeDatum(out, val.getClass().getName()); Writable writable = (Writable) val; writable.write(out); break; case DataType.NULL: out.writeByte(DataType.NULL); break; default: throw new RuntimeException("Unexpected data type " + type + " found in stream."); } }
From source file:org.apache.sysml.runtime.util.LocalFileUtils.java
License:Apache License
/** * Writes an arbitrary writable to local file system, using a fused buffered writer * with special support for matrix blocks. * //w ww .j av a 2 s . co m * @param fname file name to write * @param mb Hadoop writable * @throws IOException if IOException occurs */ public static void writeWritableToLocal(String fname, Writable mb) throws IOException { FileOutputStream fos = new FileOutputStream(fname); FastBufferedDataOutputStream out = new FastBufferedDataOutputStream(fos, BUFFER_SIZE); try { mb.write(out); } finally { IOUtilFunctions.closeSilently(out); IOUtilFunctions.closeSilently(fos); } }
From source file:org.commoncrawl.util.TimeSeriesDataFile.java
License:Open Source License
/** append a record to the file ... * //from w w w . jav a 2 s . co m * @param key * @param value * @throws IOException */ public synchronized long appendRecordToLogFile(long key, Writable value) throws IOException { LogFileHeader header = new LogFileHeader(); boolean preExistingHeader = fileName.exists(); RandomAccessFile file = new RandomAccessFile(fileName, "rw"); long recordPositionOut = -1; try { if (preExistingHeader) { long headerOffset = readLogFileHeader(file, header); if (header._writePos == 0) { recordPositionOut = headerOffset; } else { recordPositionOut = header._writePos; } // seelk to appropriate write position file.seek(recordPositionOut); } else { recordPositionOut = writeLogFileHeader(file, header); } DataOutputBuffer buffer = new DataOutputBuffer(); // write out sync bytes ... buffer.writeInt(SyncBytes); // write out placeholder for record length buffer.writeInt(0); // write out placeholder for crc buffer.writeLong(0); // write out key + value to buffer WritableUtils.writeVLong(buffer, key); // write out value ... value.write(buffer); // write out trailing record size (4 bytes sync + 4 bytes record length + 4 bytes crc + key/value buffer + buffer.writeInt(buffer.getLength()); // reset crc crc.reset(); //calc crc crc.update(buffer.getData(), RECORD_HEADER_LENGTH, buffer.getLength() - RECORD_HEADER_LENGTH); // ok fix up record ... // write out record length // total length - sync bytes(4) - record length(4), at offset 4 writeInt(buffer.getLength() - 8, 4, buffer.getData()); // and write out crc // at offset 8 (after sync(4) and length(4) writeLong(crc.getValue(), 8, buffer.getData()); // and then the data file.write(buffer.getData(), 0, buffer.getLength()); // now update header ... header._itemCount += 1; header._writePos = file.getFilePointer(); header._lastRecordLength = buffer.getLength() - 4; header._lastRecordKey = key; // now write out header anew ... writeLogFileHeader(file, header); } finally { if (file != null) { file.close(); } } return recordPositionOut; }
From source file:org.goldenorb.zookeeper.ZookeeperUtils.java
License:Apache License
/** * /*from w ww . j a v a2s. co m*/ * @param w * - Writable * @returns byte[] */ public static byte[] writableToByteArray(Writable w) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput out = new DataOutputStream(baos); w.write(out); return baos.toByteArray(); }
From source file:org.kitesdk.morphline.hadoop.rcfile.ReadRCFileTest.java
License:Apache License
private void createRCFile(final String fileName, final int numRecords, final int maxColumns, boolean addNullValue) throws IOException { // Write the sequence file SequenceFile.Metadata metadata = getMetadataForRCFile(); Configuration conf = new Configuration(); conf.set(RCFile.COLUMN_NUMBER_CONF_STR, String.valueOf(maxColumns)); Path inputFile = dfs.makeQualified(new Path(testDirectory, fileName)); RCFile.Writer rcFileWriter = new RCFile.Writer(dfs, conf, inputFile, null, metadata, null); for (int row = 0; row < numRecords; row++) { BytesRefArrayWritable dataWrite = new BytesRefArrayWritable(maxColumns); dataWrite.resetValid(maxColumns); for (int column = 0; column < maxColumns; column++) { Writable sampleText = new Text("ROW-NUM:" + row + ", COLUMN-NUM:" + column); // Set the last column of the last row as null if (addNullValue && column == maxColumns - 1 && row == numRecords - 1) { sampleText = NullWritable.get(); }/*from w w w. j a v a2s.c o m*/ ByteArrayDataOutput dataOutput = ByteStreams.newDataOutput(); sampleText.write(dataOutput); dataWrite.set(column, new BytesRefWritable(dataOutput.toByteArray())); } rcFileWriter.append(dataWrite); } rcFileWriter.close(); }
From source file:org.opencloudengine.flamingo.mapreduce.util.WritableUtils.java
License:Apache License
/** * Write object to a byte array.//from w ww .j a va 2 s . co m * * @param writableObject Object to write from. * @return Byte array with serialized object. */ public static byte[] writeToByteArray(Writable writableObject) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); DataOutput output = new DataOutputStream(outputStream); try { writableObject.write(output); } catch (IOException e) { throw new IllegalStateException("writeToByteArray: IOStateException", e); } return outputStream.toByteArray(); }
From source file:org.qcri.algebra.DummyRecordWriter.java
License:Apache License
private void cloneWritable(Writable from, Writable to) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); from.write(dos); dos.close();/*from ww w . j a va 2 s. c o m*/ ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInputStream dis = new DataInputStream(bais); to.readFields(dis); }
From source file:org.qcri.pca.MahoutCompatibilityTest.java
License:Apache License
private static void writeAndRead(Writable toWrite, Writable toRead) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); try {/*from ww w . ja v a 2 s . c o m*/ toWrite.write(dos); } finally { Closeables.close(dos, true); } ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInputStream dis = new DataInputStream(bais); try { toRead.readFields(dis); } finally { Closeables.close(dos, true); } }
From source file:org.sf.xrime.model.label.Labels.java
License:Apache License
@Override public void write(DataOutput out) throws IOException { if (labels == null) { out.writeInt(0);//from w w w .ja va 2s. c om return; } out.writeInt(labels.size()); for (String key : labels.keySet()) { Text.writeString(out, key); Writable value = labels.get(key); Text.writeString(out, value.getClass().getName()); value.write(out); } }