Example usage for java.io DataInput readFloat

List of usage examples for java.io DataInput readFloat

Introduction

In this page you can find the example usage for java.io DataInput readFloat.

Prototype

float readFloat() throws IOException;

Source Link

Document

Reads four input bytes and returns a float value.

Usage

From source file:org.apache.hawq.pxf.service.io.GPDBWritable.java

@Override
public void readFields(DataInput in) throws IOException {
    /*/*from  w  w  w. j a v a2  s . c  om*/
     * extract pkt len.
    *
    * GPSQL-1107:
    * The DataInput might already be empty (EOF), but we can't check it beforehand.
    * If that's the case, pktlen is updated to -1, to mark that the object is still empty.
    * (can be checked with isEmpty()).
    */
    pktlen = readPktLen(in);
    if (isEmpty()) {
        return;
    }

    /* extract the version and col cnt */
    int version = in.readShort();
    int curOffset = 4 + 2;
    int colCnt;

    /* !!! Check VERSION !!! */
    if (version != GPDBWritable.VERSION && version != GPDBWritable.PREV_VERSION) {
        throw new IOException("Current GPDBWritable version(" + GPDBWritable.VERSION
                + ") does not match input version(" + version + ")");
    }

    if (version == GPDBWritable.VERSION) {
        errorFlag = in.readByte();
        curOffset += 1;
    }

    colCnt = in.readShort();
    curOffset += 2;

    /* Extract Column Type */
    colType = new int[colCnt];
    DBType[] coldbtype = new DBType[colCnt];
    for (int i = 0; i < colCnt; i++) {
        int enumType = (in.readByte());
        curOffset += 1;
        if (enumType == DBType.BIGINT.ordinal()) {
            colType[i] = BIGINT.getOID();
            coldbtype[i] = DBType.BIGINT;
        } else if (enumType == DBType.BOOLEAN.ordinal()) {
            colType[i] = BOOLEAN.getOID();
            coldbtype[i] = DBType.BOOLEAN;
        } else if (enumType == DBType.FLOAT8.ordinal()) {
            colType[i] = FLOAT8.getOID();
            coldbtype[i] = DBType.FLOAT8;
        } else if (enumType == DBType.INTEGER.ordinal()) {
            colType[i] = INTEGER.getOID();
            coldbtype[i] = DBType.INTEGER;
        } else if (enumType == DBType.REAL.ordinal()) {
            colType[i] = REAL.getOID();
            coldbtype[i] = DBType.REAL;
        } else if (enumType == DBType.SMALLINT.ordinal()) {
            colType[i] = SMALLINT.getOID();
            coldbtype[i] = DBType.SMALLINT;
        } else if (enumType == DBType.BYTEA.ordinal()) {
            colType[i] = BYTEA.getOID();
            coldbtype[i] = DBType.BYTEA;
        } else if (enumType == DBType.TEXT.ordinal()) {
            colType[i] = TEXT.getOID();
            coldbtype[i] = DBType.TEXT;
        } else {
            throw new IOException("Unknown GPDBWritable.DBType ordinal value");
        }
    }

    /* Extract null bit array */
    byte[] nullbytes = new byte[getNullByteArraySize(colCnt)];
    in.readFully(nullbytes);
    curOffset += nullbytes.length;
    boolean[] colIsNull = byteArrayToBooleanArray(nullbytes, colCnt);

    /* extract column value */
    colValue = new Object[colCnt];
    for (int i = 0; i < colCnt; i++) {
        if (!colIsNull[i]) {
            /* Skip the alignment padding */
            int skipbytes = roundUpAlignment(curOffset, coldbtype[i].getAlignment()) - curOffset;
            for (int j = 0; j < skipbytes; j++) {
                in.readByte();
            }
            curOffset += skipbytes;

            /* For fixed length type, increment the offset according to type type length here.
                 * For var length type (BYTEA, TEXT), we'll read 4 byte length header and the
             * actual payload.
             */
            int varcollen = -1;
            if (coldbtype[i].isVarLength()) {
                varcollen = in.readInt();
                curOffset += 4 + varcollen;
            } else {
                curOffset += coldbtype[i].getTypeLength();
            }

            switch (DataType.get(colType[i])) {
            case BIGINT: {
                colValue[i] = in.readLong();
                break;
            }
            case BOOLEAN: {
                colValue[i] = in.readBoolean();
                break;
            }
            case FLOAT8: {
                colValue[i] = in.readDouble();
                break;
            }
            case INTEGER: {
                colValue[i] = in.readInt();
                break;
            }
            case REAL: {
                colValue[i] = in.readFloat();
                break;
            }
            case SMALLINT: {
                colValue[i] = in.readShort();
                break;
            }

            /* For BYTEA column, it has a 4 byte var length header. */
            case BYTEA: {
                colValue[i] = new byte[varcollen];
                in.readFully((byte[]) colValue[i]);
                break;
            }
            /* For text formatted column, it has a 4 byte var length header
             * and it's always null terminated string.
            * So, we can remove the last "\0" when constructing the string.
            */
            case TEXT: {
                byte[] data = new byte[varcollen];
                in.readFully(data, 0, varcollen);
                colValue[i] = new String(data, 0, varcollen - 1, CHARSET);
                break;
            }

            default:
                throw new IOException("Unknown GPDBWritable ColType");
            }
        }
    }

    /* Skip the ending alignment padding */
    int skipbytes = roundUpAlignment(curOffset, 8) - curOffset;
    for (int j = 0; j < skipbytes; j++) {
        in.readByte();
    }
    curOffset += skipbytes;

    if (errorFlag != 0) {
        throw new IOException("Received error value " + errorFlag + " from format");
    }
}

From source file:org.apache.horn.core.AbstractNeuralNetwork.java

@SuppressWarnings({ "rawtypes", "unchecked" })
@Override//from  www . ja v  a 2  s .co m
public void readFields(DataInput input) throws IOException {
    // read model type
    this.modelType = WritableUtils.readString(input);
    // read learning rate
    this.learningRate = input.readFloat();
    // read model path
    this.modelPath = WritableUtils.readString(input);

    if (this.modelPath.equals("null")) {
        this.modelPath = null;
    }

    // read feature transformer
    int bytesLen = input.readInt();
    byte[] featureTransformerBytes = new byte[bytesLen];
    for (int i = 0; i < featureTransformerBytes.length; ++i) {
        featureTransformerBytes[i] = input.readByte();
    }

    Class<? extends FloatFeatureTransformer> featureTransformerCls = (Class<? extends FloatFeatureTransformer>) SerializationUtils
            .deserialize(featureTransformerBytes);

    Constructor[] constructors = featureTransformerCls.getDeclaredConstructors();
    Constructor constructor = constructors[0];

    try {
        this.featureTransformer = (FloatFeatureTransformer) constructor.newInstance(new Object[] {});
    } catch (InstantiationException e) {
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        e.printStackTrace();
    } catch (IllegalArgumentException e) {
        e.printStackTrace();
    } catch (InvocationTargetException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.horn.core.LayeredNeuralNetwork.java

@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);

    this.finalLayerIdx = input.readInt();
    this.dropRate = input.readFloat();

    // read neuron classes
    int neuronClasses = input.readInt();
    this.neuronClassList = Lists.newArrayList();
    for (int i = 0; i < neuronClasses; ++i) {
        try {/*w w  w.  ja v a2  s  .c  om*/
            Class<? extends Neuron> clazz = (Class<? extends Neuron>) Class.forName(input.readUTF());
            neuronClassList.add(clazz);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    // read squash functions
    int squashingFunctionSize = input.readInt();
    this.squashingFunctionList = Lists.newArrayList();
    for (int i = 0; i < squashingFunctionSize; ++i) {
        this.squashingFunctionList.add(FunctionFactory.createFloatFunction(WritableUtils.readString(input)));
    }

    // read weights and construct matrices of previous updates
    int numOfMatrices = input.readInt();
    this.weightMatrixList = Lists.newArrayList();
    this.prevWeightUpdatesList = Lists.newArrayList();
    for (int i = 0; i < numOfMatrices; ++i) {
        FloatMatrix matrix = FloatMatrixWritable.read(input);
        this.weightMatrixList.add(matrix);
        this.prevWeightUpdatesList.add(new DenseFloatMatrix(matrix.getRowCount(), matrix.getColumnCount()));
    }

}

From source file:org.apache.horn.core.RecurrentLayeredNeuralNetwork.java

@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);

    this.finalLayerIdx = input.readInt();
    this.dropRate = input.readFloat();

    // read neuron classes
    int neuronClasses = input.readInt();
    this.neuronClassList = Lists.newArrayList();
    for (int i = 0; i < neuronClasses; ++i) {
        try {//  w w w.j  ava  2 s  .  co m
            Class<? extends Neuron> clazz = (Class<? extends Neuron>) Class.forName(input.readUTF());
            neuronClassList.add(clazz);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    // read squash functions
    int squashingFunctionSize = input.readInt();
    this.squashingFunctionList = Lists.newArrayList();
    for (int i = 0; i < squashingFunctionSize; ++i) {
        this.squashingFunctionList.add(FunctionFactory.createFloatFunction(WritableUtils.readString(input)));
    }

    this.recurrentStepSize = input.readInt();
    this.numOutCells = input.readInt();
    int recurrentLayerListSize = input.readInt();
    this.recurrentLayerList = Lists.newArrayList();
    for (int i = 0; i < recurrentLayerListSize; i++) {
        this.recurrentLayerList.add(input.readBoolean());
    }

    // read weights and construct matrices of previous updates
    int numOfMatrices = input.readInt();
    this.weightMatrixLists = Lists.newArrayListWithExpectedSize(this.recurrentStepSize);
    this.prevWeightUpdatesLists = Lists.newArrayList();

    for (int step = 0; step < this.recurrentStepSize; step++) {
        this.weightMatrixList = Lists.newArrayList();
        this.prevWeightUpdatesList = Lists.newArrayList();

        for (int j = 0; j < this.layerSizeList.size() - 2; j++) {
            FloatMatrix matrix = FloatMatrixWritable.read(input);
            this.weightMatrixList.add(matrix);
            this.prevWeightUpdatesList.add(new DenseFloatMatrix(matrix.getRowCount(), matrix.getColumnCount()));
        }
        // if the cell has output layer, read from input
        if (step >= this.recurrentStepSize - this.numOutCells) {
            FloatMatrix matrix = FloatMatrixWritable.read(input);
            this.weightMatrixList.add(matrix);
            this.prevWeightUpdatesList.add(new DenseFloatMatrix(matrix.getRowCount(), matrix.getColumnCount()));
        }
        this.weightMatrixLists.add(this.weightMatrixList);
        this.prevWeightUpdatesLists.add(this.prevWeightUpdatesList);
    }
}

From source file:org.apache.nutch.crawl.CrawlDatum.java

public void readFields(DataInput in) throws IOException {
    byte version = in.readByte(); // read version
    if (version > CUR_VERSION) // check version
        throw new VersionMismatchException(CUR_VERSION, version);

    status = in.readByte();/*w w w.j a va2  s.  c om*/
    fetchTime = in.readLong();
    retries = in.readByte();
    if (version > 5) {
        fetchInterval = in.readInt();
    } else
        fetchInterval = Math.round(in.readFloat());
    score = in.readFloat();
    if (version > 2) {
        modifiedTime = in.readLong();
        int cnt = in.readByte();
        if (cnt > 0) {
            signature = new byte[cnt];
            in.readFully(signature);
        } else
            signature = null;
    }

    if (version > 3) {
        boolean hasMetadata = false;
        if (version < 7) {
            org.apache.hadoop.io.MapWritable oldMetaData = new org.apache.hadoop.io.MapWritable();
            if (in.readBoolean()) {
                hasMetadata = true;
                metaData = new org.apache.hadoop.io.MapWritable();
                oldMetaData.readFields(in);
            }
            for (Writable key : oldMetaData.keySet()) {
                metaData.put(key, oldMetaData.get(key));
            }
        } else {
            if (in.readBoolean()) {
                hasMetadata = true;
                metaData = new org.apache.hadoop.io.MapWritable();
                metaData.readFields(in);
            }
        }
        if (hasMetadata == false)
            metaData = null;
    }
    // translate status codes
    if (version < 5) {
        if (oldToNew.containsKey(status))
            status = oldToNew.get(status);
        else
            status = STATUS_DB_UNFETCHED;

    }
}

From source file:org.apache.pig.data.BinInterSedes.java

/**
 * Expects binInterSedes data types (NOT DataType types!)
 * <p>/*from w w w .  j  a  v  a 2  s .  c  o  m*/
 *
 * @see org.apache.pig.data.InterSedes#readDatum(java.io.DataInput, byte)
 */
@Override
public Object readDatum(DataInput in, byte type) throws IOException, ExecException {
    switch (type) {
    case TUPLE_0:
    case TUPLE_1:
    case TUPLE_2:
    case TUPLE_3:
    case TUPLE_4:
    case TUPLE_5:
    case TUPLE_6:
    case TUPLE_7:
    case TUPLE_8:
    case TUPLE_9:
    case TUPLE:
    case TINYTUPLE:
    case SMALLTUPLE:
        return SedesHelper.readGenericTuple(in, type);

    case BAG:
    case TINYBAG:
    case SMALLBAG:
        return readBag(in, type);

    case MAP:
    case TINYMAP:
    case SMALLMAP:
        return readMap(in, type);

    case INTERNALMAP:
        return readInternalMap(in);

    case INTEGER_0:
        return Integer.valueOf(0);
    case INTEGER_1:
        return Integer.valueOf(1);
    case INTEGER_INBYTE:
        return Integer.valueOf(in.readByte());
    case INTEGER_INSHORT:
        return Integer.valueOf(in.readShort());
    case INTEGER:
        return Integer.valueOf(in.readInt());

    case LONG_0:
        return Long.valueOf(0);
    case LONG_1:
        return Long.valueOf(1);
    case LONG_INBYTE:
        return Long.valueOf(in.readByte());
    case LONG_INSHORT:
        return Long.valueOf(in.readShort());
    case LONG_ININT:
        return Long.valueOf(in.readInt());
    case LONG:
        return Long.valueOf(in.readLong());

    case DATETIME:
        return new DateTime(in.readLong(), DateTimeZone.forOffsetMillis(in.readShort() * ONE_MINUTE));

    case FLOAT:
        return Float.valueOf(in.readFloat());

    case DOUBLE:
        return Double.valueOf(in.readDouble());

    case BIGINTEGER:
        return readBigInteger(in);

    case BIGDECIMAL:
        return readBigDecimal(in);

    case BOOLEAN_TRUE:
        return Boolean.valueOf(true);

    case BOOLEAN_FALSE:
        return Boolean.valueOf(false);

    case BYTE:
        return Byte.valueOf(in.readByte());

    case TINYBYTEARRAY:
    case SMALLBYTEARRAY:
    case BYTEARRAY:
        return new DataByteArray(SedesHelper.readBytes(in, type));

    case CHARARRAY:
    case SMALLCHARARRAY:
        return SedesHelper.readChararray(in, type);

    case GENERIC_WRITABLECOMPARABLE:
        return readWritable(in);

    case SCHEMA_TUPLE_BYTE_INDEX:
    case SCHEMA_TUPLE_SHORT_INDEX:
    case SCHEMA_TUPLE:
        return readSchemaTuple(in, type);

    case NULL:
        return null;

    default:
        throw new RuntimeException("Unexpected data type " + type + " found in stream.");
    }
}

From source file:org.apache.pig.data.DataReaderWriter.java

public static Object readDatum(DataInput in, byte type) throws IOException, ExecException {
    switch (type) {
    case DataType.TUPLE:
        return bytesToTuple(in);

    case DataType.BAG:
        return bytesToBag(in);

    case DataType.MAP:
        return bytesToMap(in);

    case DataType.INTERNALMAP:
        return bytesToInternalMap(in);

    case DataType.INTEGER:
        return Integer.valueOf(in.readInt());

    case DataType.LONG:
        return Long.valueOf(in.readLong());

    case DataType.FLOAT:
        return Float.valueOf(in.readFloat());

    case DataType.DOUBLE:
        return Double.valueOf(in.readDouble());

    case DataType.BOOLEAN:
        return Boolean.valueOf(in.readBoolean());

    case DataType.BYTE:
        return Byte.valueOf(in.readByte());

    case DataType.BYTEARRAY: {
        int size = in.readInt();
        byte[] ba = new byte[size];
        in.readFully(ba);//  www  .j  a v  a 2 s  .  c  o m
        return new DataByteArray(ba);
    }

    case DataType.BIGCHARARRAY:
        return bytesToBigCharArray(in);

    case DataType.CHARARRAY:
        return bytesToCharArray(in);

    case DataType.GENERIC_WRITABLECOMPARABLE:
        return bytesToWritable(in);

    case DataType.NULL:
        return null;

    default:
        throw new RuntimeException("Unexpected data type " + type + " found in stream.");
    }
}

From source file:org.apache.pig.data.SchemaTuple.java

protected static float read(DataInput in, float v) throws IOException {
    return in.readFloat();
}

From source file:org.apache.tez.mapreduce.hadoop.MRTaskStatus.java

@Override
public void readFields(DataInput in) throws IOException {
    taskAttemptId = TezTaskAttemptID.read(in);
    state = WritableUtils.readEnum(in, State.class);
    progress = in.readFloat();
    diagnostics = WritableUtils.readString(in);
    userStatusInfo = WritableUtils.readString(in);
    phase = WritableUtils.readEnum(in, Phase.class);
    counters = new TezCounters();

    counters.readFields(in);/*  w ww.  j  ava  2  s  .  c om*/

    localOutputSize = in.readLong();
    startTime = in.readLong();
    finishTime = in.readLong();
    sortFinishTime = in.readLong();
    mapFinishTime = in.readLong();
    shuffleFinishTime = in.readLong();

    int numFailedDependencies = in.readInt();
    for (int i = 0; i < numFailedDependencies; i++) {
        TezTaskAttemptID taskAttemptId = TezTaskAttemptID.read(in);
        failedTaskDependencies.add(taskAttemptId);
    }

}

From source file:org.cloudata.core.common.io.CObjectWritable.java

/** Read a {@link CWritable}, {@link String}, primitive type, or an array of
 * the preceding. *///from   w  w w .j  a v  a  2 s  .  c  o m
@SuppressWarnings("unchecked")
public static Object readObject(DataInput in, CObjectWritable objectWritable, CloudataConf conf,
        boolean arrayComponent, Class componentClass) throws IOException {
    String className;
    if (arrayComponent) {
        className = componentClass.getName();
    } else {
        className = CUTF8.readString(in);
        //SANGCHUL
        //   System.out.println("SANGCHUL] className:" + className);
    }

    Class<?> declaredClass = PRIMITIVE_NAMES.get(className);
    if (declaredClass == null) {
        try {
            declaredClass = conf.getClassByName(className);
        } catch (ClassNotFoundException e) {
            //SANGCHUL
            e.printStackTrace();
            throw new RuntimeException("readObject can't find class[className=" + className + "]", e);
        }
    }

    Object instance;

    if (declaredClass.isPrimitive()) { // primitive types

        if (declaredClass == Boolean.TYPE) { // boolean
            instance = Boolean.valueOf(in.readBoolean());
        } else if (declaredClass == Character.TYPE) { // char
            instance = Character.valueOf(in.readChar());
        } else if (declaredClass == Byte.TYPE) { // byte
            instance = Byte.valueOf(in.readByte());
        } else if (declaredClass == Short.TYPE) { // short
            instance = Short.valueOf(in.readShort());
        } else if (declaredClass == Integer.TYPE) { // int
            instance = Integer.valueOf(in.readInt());
        } else if (declaredClass == Long.TYPE) { // long
            instance = Long.valueOf(in.readLong());
        } else if (declaredClass == Float.TYPE) { // float
            instance = Float.valueOf(in.readFloat());
        } else if (declaredClass == Double.TYPE) { // double
            instance = Double.valueOf(in.readDouble());
        } else if (declaredClass == Void.TYPE) { // void
            instance = null;
        } else {
            throw new IllegalArgumentException("Not a primitive: " + declaredClass);
        }

    } else if (declaredClass.isArray()) { // array
        //System.out.println("SANGCHUL] is array");
        int length = in.readInt();
        //System.out.println("SANGCHUL] array length : " + length);
        //System.out.println("Read:in.readInt():" + length);
        if (declaredClass.getComponentType() == Byte.TYPE) {
            byte[] bytes = new byte[length];
            in.readFully(bytes);
            instance = bytes;
        } else if (declaredClass.getComponentType() == ColumnValue.class) {
            instance = readColumnValue(in, conf, declaredClass, length);
        } else {
            Class componentType = declaredClass.getComponentType();

            // SANGCHUL
            //System.out.println("SANGCHUL] componentType : " + componentType.getName());

            instance = Array.newInstance(componentType, length);
            for (int i = 0; i < length; i++) {
                Object arrayComponentInstance = readObject(in, null, conf, !componentType.isArray(),
                        componentType);
                Array.set(instance, i, arrayComponentInstance);
                //Array.set(instance, i, readObject(in, conf));
            }
        }
    } else if (declaredClass == String.class) { // String
        instance = CUTF8.readString(in);
    } else if (declaredClass.isEnum()) { // enum
        instance = Enum.valueOf((Class<? extends Enum>) declaredClass, CUTF8.readString(in));
    } else if (declaredClass == ColumnValue.class) {
        //ColumnValue?  ?? ?? ?  ? ?.
        //? ?   ? ? ? ? . 
        Class instanceClass = null;
        try {
            short typeDiff = in.readShort();
            if (typeDiff == TYPE_DIFF) {
                instanceClass = conf.getClassByName(CUTF8.readString(in));
            } else {
                instanceClass = declaredClass;
            }
        } catch (ClassNotFoundException e) {
            throw new RuntimeException("readObject can't find class", e);
        }
        ColumnValue columnValue = new ColumnValue();
        columnValue.readFields(in);
        instance = columnValue;
    } else { // Writable

        Class instanceClass = null;
        try {
            short typeDiff = in.readShort();
            // SANGCHUL
            //System.out.println("SANGCHUL] typeDiff : " + typeDiff);
            //System.out.println("Read:in.readShort():" + typeDiff);
            if (typeDiff == TYPE_DIFF) {
                // SANGCHUL
                String classNameTemp = CUTF8.readString(in);
                //System.out.println("SANGCHUL] typeDiff : " + classNameTemp);
                instanceClass = conf.getClassByName(classNameTemp);
                //System.out.println("Read:UTF8.readString(in):" + instanceClass.getClass());
            } else {
                instanceClass = declaredClass;
            }
        } catch (ClassNotFoundException e) {

            // SANGCHUL
            e.printStackTrace();
            throw new RuntimeException("readObject can't find class", e);
        }

        CWritable writable = CWritableFactories.newInstance(instanceClass, conf);
        writable.readFields(in);
        //System.out.println("Read:writable.readFields(in)");
        instance = writable;

        if (instanceClass == NullInstance.class) { // null
            declaredClass = ((NullInstance) instance).declaredClass;
            instance = null;
        }
    }

    if (objectWritable != null) { // store values
        objectWritable.declaredClass = declaredClass;
        objectWritable.instance = instance;
    }

    return instance;
}