List of usage examples for org.apache.hadoop.io FloatWritable readFields
@Override public void readFields(DataInput in) throws IOException
From source file:com.moz.fiji.hive.io.FijiCellWritable.java
License:Apache License
/** * Reads and converts data according to the specified schema. * * @param in DataInput to deserialize this object from. * @param schema Schema to be used for deserializing this data. * @return the data read and converted according to the schema. * @throws IOException if there was an error reading. *//*from www.ja va2 s. co m*/ private static Object readData(DataInput in, Schema schema) throws IOException { switch (schema.getType()) { case INT: Integer intData = WritableUtils.readVInt(in); return intData; case LONG: Long longData = WritableUtils.readVLong(in); return longData; case DOUBLE: DoubleWritable doubleWritable = (DoubleWritable) WritableFactories.newInstance(DoubleWritable.class); doubleWritable.readFields(in); return doubleWritable.get(); case ENUM: case STRING: String stringData = WritableUtils.readString(in); return stringData; case FLOAT: FloatWritable floatWritable = (FloatWritable) WritableFactories.newInstance(FloatWritable.class); floatWritable.readFields(in); return floatWritable.get(); case ARRAY: List<Object> listData = Lists.newArrayList(); Integer numElements = WritableUtils.readVInt(in); for (int c = 0; c < numElements; c++) { Object listElement = readData(in, schema.getElementType()); listData.add(listElement); } return listData; case RECORD: GenericRecord recordData = new GenericData.Record(schema); Integer numFields = WritableUtils.readVInt(in); for (int c = 0; c < numFields; c++) { String fieldName = WritableUtils.readString(in); Object fieldData = readData(in, schema.getField(fieldName).schema()); recordData.put(fieldName, fieldData); } return recordData; case MAP: Map<String, Object> mapData = Maps.newHashMap(); Integer numEntries = WritableUtils.readVInt(in); for (int c = 0; c < numEntries; c++) { String key = WritableUtils.readString(in); Object value = readData(in, schema.getValueType()); mapData.put(key, value); } return mapData; case UNION: Integer tag = WritableUtils.readVInt(in); Schema unionSubSchema = schema.getTypes().get(tag); Object unionData = readData(in, unionSubSchema); return unionData; case BYTES: byte[] bytesData = WritableUtils.readCompressedByteArray(in); return bytesData; case BOOLEAN: BooleanWritable booleanWritable = (BooleanWritable) WritableFactories .newInstance(BooleanWritable.class); booleanWritable.readFields(in); return booleanWritable.get(); case NULL: return null; default: throw new UnsupportedOperationException("Unsupported type: " + schema.getType()); } }
From source file:org.goldenorb.io.checkpoint.CheckPointDataTest.java
License:Apache License
/** * Tests the CheckPointDataInput class by reading several different types of Writables from the checkpoint. * Asserts that Writables that were written in are of the same value and type when reading in from HDFS. * // w w w .jav a 2s. c o m * @throws Exception */ @Test public void testCheckpointInput() throws Exception { int superStep = 0; int partition = 0; OrbConfiguration orbConf = new OrbConfiguration(); orbConf.set("fs.default.name", "hdfs://localhost:" + cluster.getNameNodePort()); orbConf.setJobNumber("0"); orbConf.setFileOutputPath("test"); CheckPointDataInput checkpointInput = new CheckPointDataInput(orbConf, superStep, partition); // Data is read on a FIFO basis IntWritable intInput = new IntWritable(); intInput.readFields(checkpointInput); LongWritable longInput = new LongWritable(); longInput.readFields(checkpointInput); Text textInput = new Text(); textInput.readFields(checkpointInput); FloatWritable floatInput = new FloatWritable(); floatInput.readFields(checkpointInput); checkpointInput.close(); assertThat(checkpointInput, notNullValue()); assertEquals(intInput.get(), 4); assertEquals(longInput.get(), 9223372036854775807L); assertEquals(textInput.toString(), "test"); assertTrue(floatInput.get() == 3.14159F); }
From source file:org.shaf.core.util.IOUtilsTest.java
License:Apache License
/** * Test writing of {@code float} value.// w ww . ja v a 2 s .c o m */ @Test public void testWriteFloat() { byte[] buf = null; float value = 123.456f; try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(baos);) { IOUtils.writeObject(value, out); buf = baos.toByteArray(); } catch (IOException exc) { fail(exc.getMessage()); } try (ByteArrayInputStream bais = new ByteArrayInputStream(buf); DataInputStream in = new DataInputStream(bais);) { FloatWritable probe = new FloatWritable(); probe.readFields(in); assertEquals(value, probe.get(), 0.0001); } catch (IOException exc) { fail(exc.getMessage()); } }
From source file:tlfetl.card.TLFDWHValue.java
@Override public void readFields(DataInput di) throws IOException { FloatWritable writableAmount = new FloatWritable(); IntWritable writableCount = new IntWritable(); writableAmount.readFields(di); writableCount.readFields(di);//from w ww. j a va 2s . com amount = writableAmount.get(); count = writableCount.get(); }