Example usage for java.io DataInput readInt

List of usage examples for java.io DataInput readInt

Introduction

In this page you can find the example usage for java.io DataInput readInt.

Prototype

int readInt() throws IOException;

Source Link

Document

Reads four input bytes and returns an int value.

Usage

From source file:org.apache.horn.core.LayeredNeuralNetwork.java

@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);

    this.finalLayerIdx = input.readInt();
    this.dropRate = input.readFloat();

    // read neuron classes
    int neuronClasses = input.readInt();
    this.neuronClassList = Lists.newArrayList();
    for (int i = 0; i < neuronClasses; ++i) {
        try {/*from   ww  w.j  av  a  2s.c  o m*/
            Class<? extends Neuron> clazz = (Class<? extends Neuron>) Class.forName(input.readUTF());
            neuronClassList.add(clazz);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    // read squash functions
    int squashingFunctionSize = input.readInt();
    this.squashingFunctionList = Lists.newArrayList();
    for (int i = 0; i < squashingFunctionSize; ++i) {
        this.squashingFunctionList.add(FunctionFactory.createFloatFunction(WritableUtils.readString(input)));
    }

    // read weights and construct matrices of previous updates
    int numOfMatrices = input.readInt();
    this.weightMatrixList = Lists.newArrayList();
    this.prevWeightUpdatesList = Lists.newArrayList();
    for (int i = 0; i < numOfMatrices; ++i) {
        FloatMatrix matrix = FloatMatrixWritable.read(input);
        this.weightMatrixList.add(matrix);
        this.prevWeightUpdatesList.add(new DenseFloatMatrix(matrix.getRowCount(), matrix.getColumnCount()));
    }

}

From source file:org.archive.io.hdfs.HDFSWriterDocument.java

/**
 * Reads the fields of this object from <code>in</code>.
 *
 * @param in input object to de-serialize from
 *///from w w w  .j  a va 2  s . c  o m
public void readFields(DataInput in) throws IOException {
    int length = in.readInt();
    byte[] docBytes = new byte[length];
    in.readFully(docBytes);
    load(docBytes, 0, length);
}

From source file:org.apache.hadoop.mapred.TaskTrackerStatus.java

public void readFields(DataInput in) throws IOException {
    this.trackerName = Text.readString(in);
    this.host = Text.readString(in);
    this.httpPort = in.readInt();
    this.failures = in.readInt();
    this.maxMapTasks = in.readInt();
    this.maxReduceTasks = in.readInt();
    resStatus.readFields(in);// ww w  .  jav  a 2 s .  c om
    taskReports.clear();
    int numTasks = in.readInt();

    for (int i = 0; i < numTasks; i++) {
        taskReports.add(TaskStatus.readTaskStatus(in));
    }
    getHealthStatus().readFields(in);
}

From source file:com.chinamobile.bcbsp.fault.storage.Fault.java

/**
 * read fault information.//from  w  w  w .j av  a 2s  .com
 * @param in
 *        fault information to be read.
 */
@Override
public void readFields(DataInput in) throws IOException {
    this.type = WritableUtils.readEnum(in, Type.class);
    this.level = WritableUtils.readEnum(in, Level.class);
    this.timeOfFailure = Text.readString(in);
    this.workerNodeName = Text.readString(in);
    this.jobName = Text.readString(in);
    this.staffName = Text.readString(in);
    this.exceptionMessage = Text.readString(in);
    this.superStep_Stage = in.readInt();
}

From source file:org.apache.hama.bsp.Counters.java

/**
 * Read a set of groups.//from   ww w.  jav a2 s . c o  m
 */
@Override
public synchronized void readFields(DataInput in) throws IOException {
    int numClasses = in.readInt();
    counters.clear();
    while (numClasses-- > 0) {
        String groupName = Text.readString(in);
        Group group = new Group(groupName);
        group.readFields(in);
        counters.put(groupName, group);
    }
}

From source file:org.apache.hadoop.hbase.index.IndexSpecification.java

/**
 * @param Data Input Stream/*from  ww w. j  a  v  a  2  s  .  c  o  m*/
 * @throws IOException
 */
public void readFields(DataInput in) throws IOException {
    this.name = Bytes.readByteArray(in);
    try {
        HTableDescriptor.isLegalTableName(this.name);
    } catch (IllegalArgumentException e) {
        String msg = "Received unexpected data while parsing the column qualifiers :"
                + Bytes.toString(this.name) + ".";
        Log.warn(msg + " Could be an non-indexed table.");
        throw new EOFException(msg);
    }
    int indexColsSize = in.readInt();
    indexColumns.clear();
    for (int i = 0; i < indexColsSize; i++) {
        ColumnQualifier cq = new ColumnQualifier();
        // Need to revisit this place. May be some other valid value though invalid
        // comes up.
        try {
            cq.readFields(in);
        } catch (IllegalArgumentException e) {
            throw new EOFException("Received unexpected data while parsing the column qualifiers.");
        }
        internalAdd(cq);
    }
    this.maxVersions = in.readInt();
    this.ttl = in.readLong();
}

From source file:org.apache.hama.ml.perception.SmallMultiLayerPerceptron.java

@SuppressWarnings("rawtypes")
@Override//from  ww w.java  2 s . co  m
public void readFields(DataInput input) throws IOException {
    this.MLPType = WritableUtils.readString(input);
    this.learningRate = input.readDouble();
    this.regularization = input.readDouble();
    this.momentum = input.readDouble();
    this.numberOfLayers = input.readInt();
    this.squashingFunctionName = WritableUtils.readString(input);
    this.costFunctionName = WritableUtils.readString(input);

    this.squashingFunction = FunctionFactory.createDoubleFunction(this.squashingFunctionName);
    this.costFunction = FunctionFactory.createDoubleDoubleFunction(this.costFunctionName);

    // read the number of neurons for each layer
    this.layerSizeArray = new int[this.numberOfLayers];
    for (int i = 0; i < numberOfLayers; ++i) {
        this.layerSizeArray[i] = input.readInt();
    }
    this.weightMatrice = new DenseDoubleMatrix[this.numberOfLayers - 1];
    for (int i = 0; i < numberOfLayers - 1; ++i) {
        this.weightMatrice[i] = (DenseDoubleMatrix) MatrixWritable.read(input);
    }

    // read feature transformer
    int bytesLen = input.readInt();
    byte[] featureTransformerBytes = new byte[bytesLen];
    for (int i = 0; i < featureTransformerBytes.length; ++i) {
        featureTransformerBytes[i] = input.readByte();
    }
    Class featureTransformerCls = (Class) SerializationUtils.deserialize(featureTransformerBytes);
    Constructor constructor = featureTransformerCls.getConstructors()[0];
    try {
        this.featureTransformer = (FeatureTransformer) constructor.newInstance(new Object[] {});
    } catch (InstantiationException e) {
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        e.printStackTrace();
    } catch (IllegalArgumentException e) {
        e.printStackTrace();
    } catch (InvocationTargetException e) {
        e.printStackTrace();
    }
}

From source file:org.apache.hadoop.mapred.Counters.java

/**
 * Read a set of groups.//from ww  w  . ja v  a2 s .  com
 */
public synchronized void readFields(DataInput in) throws IOException {
    int numClasses = in.readInt();
    counters.clear();
    while (numClasses-- > 0) {
        String groupName = Text.readString(in);
        Group group = new Group(groupName);
        group.readFields(in);
        counters.put(groupName, group);
    }
}

From source file:org.apache.hadoop.raid.protocol.PolicyInfo.java

public void readFields(DataInput in) throws IOException {
    String text = Text.readString(in);
    if (text.length() == 0) {
        this.srcPath = null;
    } else {/*from w  w  w  .j  a v  a  2 s  . co m*/
        this.srcPath = new Path(text);
    }
    text = Text.readString(in);
    if (text.length() == 0) {
        this.fileListPath = null;
    } else {
        this.fileListPath = new Path(text);
    }
    this.policyName = Text.readString(in);
    this.codecId = Text.readString(in);
    this.description = Text.readString(in);
    for (int n = in.readInt(); n > 0; n--) {
        String name = Text.readString(in);
        String value = Text.readString(in);
        properties.setProperty(name, value);
    }
}

From source file:eu.stratosphere.nephele.jobgraph.AbstractJobVertex.java

@SuppressWarnings("unchecked")
@Override//from w w w  . j a va2 s. c o  m
public void read(final DataInput in) throws IOException {

    if (jobGraph == null) {
        throw new IOException("jobGraph is null, cannot deserialize");
    }

    // Read number of subtasks
    this.numberOfSubtasks = in.readInt();

    // Number of execution retries
    this.numberOfExecutionRetries = in.readInt();

    // Read vertex to share instances with
    if (in.readBoolean()) {
        final JobVertexID id = new JobVertexID();
        id.read(in);
        final AbstractJobVertex vertexToShareInstancesWith = this.jobGraph.findVertexByID(id);
        if (vertexToShareInstancesWith == null) {
            throw new IOException("Cannot find vertex with id " + id + " share instances with");
        }

        this.vertexToShareInstancesWith = vertexToShareInstancesWith;
    }

    // Find the class loader for the job
    final ClassLoader cl = LibraryCacheManager.getClassLoader(this.getJobGraph().getJobID());
    if (cl == null) {
        throw new IOException("Cannot find class loader for vertex " + getID());
    }

    // Re-instantiate the configuration object with the correct class loader and read the configuration
    this.configuration = new Configuration(cl);
    this.configuration.read(in);

    // Read number of forward edges
    final int numForwardEdges = in.readInt();

    // Now reconnect to other vertices via the reconstruction map
    final JobVertexID tmpID = new JobVertexID();
    for (int i = 0; i < numForwardEdges; i++) {
        if (in.readBoolean()) {
            tmpID.read(in);
            final AbstractJobVertex jv = jobGraph.findVertexByID(tmpID);
            if (jv == null) {
                throw new IOException("Cannot find vertex with id " + tmpID);
            }

            final ChannelType channelType = EnumUtils.readEnum(in, ChannelType.class);
            final DistributionPattern distributionPattern = EnumUtils.readEnum(in, DistributionPattern.class);
            final int indexOfInputGate = in.readInt();

            try {
                this.connectTo(jv, channelType, i, indexOfInputGate, distributionPattern);
            } catch (JobGraphDefinitionException e) {
                throw new IOException(StringUtils.stringifyException(e));
            }
        } else {
            this.forwardEdges.add(null);
        }
    }

    // Read the invokable class
    final boolean isNotNull = in.readBoolean();
    if (!isNotNull) {
        return;
    }

    // Read the name of the expected class
    final String className = StringRecord.readString(in);

    try {
        this.invokableClass = (Class<? extends AbstractInvokable>) Class.forName(className, true, cl);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException("Class " + className + " not found in one of the supplied jar files: "
                + StringUtils.stringifyException(cnfe));
    }
}