Example usage for java.io DataInput readInt

List of usage examples for java.io DataInput readInt

Introduction

In this page you can find the example usage for java.io DataInput readInt.

Prototype

int readInt() throws IOException;

Source Link

Document

Reads four input bytes and returns an int value.

Usage

From source file:com.fiorano.openesb.application.application.LogManager.java

/************************************************[ Streaming ]************************************************/

public void fromStream(java.io.DataInput is, int versionNo) throws java.io.IOException {

    super.fromStream(is, versionNo);
    int count = is.readInt();
    if (count != 0) {
        Properties properties = new Properties();
        String name;/*from ww  w  .  j  a  v a 2 s  .c o  m*/
        String value;
        for (int i = 0; i < count; i++) {
            properties.put(UTFReaderWriter.readUTF(is), UTFReaderWriter.readUTF(is));
        }
        this.setProps(properties);
    }
}

From source file:ml.shifu.shifu.core.dtrain.dataset.PersistBasicFloatNetwork.java

private int[] readIntArray(DataInput in) throws IOException {
    int size = in.readInt();
    int[] array = new int[size];
    for (int i = 0; i < size; i++) {
        array[i] = in.readInt();//from w w w.ja  v  a  2  s .c om
    }
    return array;
}

From source file:ml.shifu.shifu.core.dtrain.dataset.PersistBasicFloatNetwork.java

private double[] readDoubleArray(DataInput in) throws IOException {
    int size = in.readInt();
    double[] array = new double[size];
    for (int i = 0; i < size; i++) {
        array[i] = in.readDouble();//from  w  ww .  j av  a2 s.c o m
    }
    return array;
}

From source file:parquet.hadoop.ParquetInputSplit.java

private BlockMetaData readBlock(DataInput in) throws IOException {
    final BlockMetaData block = new BlockMetaData();
    int size = in.readInt();
    for (int i = 0; i < size; i++) {
        block.addColumn(readColumn(in));
    }/* w w w. j av  a  2 s  . co m*/
    block.setRowCount(in.readLong());
    block.setTotalByteSize(in.readLong());
    if (!in.readBoolean()) {
        block.setPath(in.readUTF().intern());
    }
    return block;
}

From source file:org.apache.mahout.df.data.Dataset.java

@Override
public void readFields(DataInput in) throws IOException {
    int nbAttributes = in.readInt();
    attributes = new Attribute[nbAttributes];
    for (int attr = 0; attr < nbAttributes; attr++) {
        String name = WritableUtils.readString(in);
        attributes[attr] = Attribute.valueOf(name);
    }/* w ww  . j a  v a2s . co m*/

    labels = WritableUtils.readStringArray(in);

    ignored = DFUtils.readIntArray(in);

    // only CATEGORICAL attributes have values
    values = new String[nbAttributes][];
    for (int attr = 0; attr < nbAttributes; attr++) {
        if (attributes[attr].isCategorical()) {
            values[attr] = WritableUtils.readStringArray(in);
        }
    }

    labelId = in.readInt();
    nbInstances = in.readInt();
}

From source file:org.apache.mahout.classifier.KnnMR.data.Dataset.java

@Override
public void readFields(DataInput in) throws IOException {
    int nbAttributes = in.readInt();
    attributes = new Attribute[nbAttributes];
    for (int attr = 0; attr < nbAttributes; attr++) {
        String name = WritableUtils.readString(in);
        attributes[attr] = Attribute.valueOf(name);
    }//from   w  w w.java  2 s .  c o m

    ignored = Chi_RWUtils.readIntArray(in);

    // only CATEGORICAL attributes have values
    values = new String[nbAttributes][];
    for (int attr = 0; attr < nbAttributes; attr++) {
        if (attributes[attr].isCategorical()) {
            values[attr] = WritableUtils.readStringArray(in);
        }
    }

    labelId = in.readInt();
    nbInstances = in.readInt();
}

From source file:org.apache.hadoop.hbase.hbql.filter.RecordFilterList.java

public void readFields(final DataInput in) throws IOException {
    Configuration conf = HBaseConfiguration.create();
    byte opByte = in.readByte();
    operator = Operator.values()[opByte];
    int size = in.readInt();
    if (size > 0) {
        filters = new ArrayList<Filter>(size);
        for (int i = 0; i < size; i++) {
            Filter filter = (Filter) HbaseObjectWritable.readObject(in, conf);
            filters.add(filter);/*  ww w  .  j  a  va2 s  .co m*/
        }
    }
}

From source file:org.apache.hadoop.hdfs.protocol.PolicyInfo.java

public void readFields(DataInput in) throws IOException {
    this.srcPath = new Path(Text.readString(in));
    this.description = Text.readString(in);
    for (int n = in.readInt(); n > 0; n--) {
        String name = Text.readString(in);
        String value = Text.readString(in);
        properties.setProperty(name, value);
    }/*  w w w.  j a v  a2s.  c om*/
    for (int n = in.readInt(); n > 0; n--) {
        String destPath = Text.readString(in);
        Properties p = new Properties();
        for (int m = in.readInt(); m > 0; m--) {
            String name = Text.readString(in);
            String value = Text.readString(in);
            p.setProperty(name, value);
        }
        this.addDestPath(destPath, p);
    }
}

From source file:org.apache.hadoop.hbase.regionserver.wal.WALEdit.java

public void readFields(DataInput in) throws IOException {
    kvs.clear();/*from  w  w  w .  j a v  a2  s.co m*/
    if (scopes != null) {
        scopes.clear();
    }
    int versionOrLength = in.readInt();
    // TODO: Change version when we protobuf.  Also, change way we serialize KV!  Pb it too.
    if (versionOrLength == VERSION_2) {
        // this is new style HLog entry containing multiple KeyValues.
        int numEdits = in.readInt();
        for (int idx = 0; idx < numEdits; idx++) {
            if (compressionContext != null) {
                this.add(KeyValueCompression.readKV(in, compressionContext));
            } else {
                this.add(KeyValue.create(in));
            }
        }
        int numFamilies = in.readInt();
        if (numFamilies > 0) {
            if (scopes == null) {
                scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
            }
            for (int i = 0; i < numFamilies; i++) {
                byte[] fam = Bytes.readByteArray(in);
                int scope = in.readInt();
                scopes.put(fam, scope);
            }
        }
    } else {
        // this is an old style HLog entry. The int that we just
        // read is actually the length of a single KeyValue
        this.add(KeyValue.create(versionOrLength, in));
    }
}

From source file:org.apache.hadoop.hdfs.protocol.LocatedBlocks.java

public void readFields(DataInput in) throws IOException {
    this.fileLength = in.readLong();
    underConstruction = in.readBoolean();
    // read located blocks
    int nrBlocks = in.readInt();
    this.blocks = new ArrayList<LocatedBlock>(nrBlocks);
    for (int idx = 0; idx < nrBlocks; idx++) {
        LocatedBlock blk = new LocatedBlock();
        blk.readFields(in);/*from  www.  ja v  a  2s.co  m*/
        this.blocks.add(blk);
    }
}