Example usage for org.apache.hadoop.io WritableUtils writeString

List of usage examples for org.apache.hadoop.io WritableUtils writeString

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableUtils writeString.

Prototype

public static void writeString(DataOutput out, String s) throws IOException 

Source Link

Usage

From source file:org.apache.hama.ml.ann.SmallLayeredNeuralNetwork.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    super.write(output);

    // write squashing functions
    output.writeInt(this.squashingFunctionList.size());
    for (DoubleFunction aSquashingFunctionList : this.squashingFunctionList) {
        WritableUtils.writeString(output, aSquashingFunctionList.getFunctionName());
    }/*ww w. j  a va2 s.com*/

    // write weight matrices
    output.writeInt(this.weightMatrixList.size());
    for (DoubleMatrix aWeightMatrixList : this.weightMatrixList) {
        MatrixWritable.write(aWeightMatrixList, output);
    }

    // DO NOT WRITE WEIGHT UPDATE
}

From source file:org.apache.hama.ml.perception.SmallMultiLayerPerceptron.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    WritableUtils.writeString(output, MLPType);
    output.writeDouble(learningRate);/*from  w w w .  ja v  a 2s.  com*/
    output.writeDouble(regularization);
    output.writeDouble(momentum);
    output.writeInt(numberOfLayers);
    WritableUtils.writeString(output, squashingFunctionName);
    WritableUtils.writeString(output, costFunctionName);

    // write the number of neurons for each layer
    for (int i = 0; i < this.numberOfLayers; ++i) {
        output.writeInt(this.layerSizeArray[i]);
    }
    for (int i = 0; i < numberOfLayers - 1; ++i) {
        MatrixWritable matrixWritable = new MatrixWritable(this.weightMatrice[i]);
        matrixWritable.write(output);
    }

    // serialize the feature transformer
    Class<? extends FeatureTransformer> featureTransformerCls = this.featureTransformer.getClass();
    byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls);
    output.writeInt(featureTransformerBytes.length);
    output.write(featureTransformerBytes);
}

From source file:org.apache.hama.ml.perception.TestSmallMultiLayerPerceptron.java

License:Apache License

/**
 * Test the output of an example MLP.//w  w  w. ja v  a  2s.c om
 */
@Test
public void testOutput() {
    // write the MLP meta-data manually
    String modelPath = "/tmp/sampleModel-testOutput.data";
    Configuration conf = new Configuration();
    try {
        FileSystem fs = FileSystem.get(conf);
        FSDataOutputStream output = fs.create(new Path(modelPath), true);

        String MLPType = SmallMultiLayerPerceptron.class.getName();
        double learningRate = 0.5;
        double regularization = 0.0;
        double momentum = 0.1;
        String squashingFunctionName = "Sigmoid";
        String costFunctionName = "SquaredError";
        int[] layerSizeArray = new int[] { 3, 2, 3, 3 };
        int numberOfLayers = layerSizeArray.length;

        WritableUtils.writeString(output, MLPType);
        output.writeDouble(learningRate);
        output.writeDouble(regularization);
        output.writeDouble(momentum);
        output.writeInt(numberOfLayers);
        WritableUtils.writeString(output, squashingFunctionName);
        WritableUtils.writeString(output, costFunctionName);

        // write the number of neurons for each layer
        for (int i = 0; i < numberOfLayers; ++i) {
            output.writeInt(layerSizeArray[i]);
        }

        double[][] matrix01 = { // 4 by 2
                { 0.5, 0.2 }, { 0.1, 0.1 }, { 0.2, 0.5 }, { 0.1, 0.5 } };

        double[][] matrix12 = { // 3 by 3
                { 0.1, 0.2, 0.5 }, { 0.2, 0.5, 0.2 }, { 0.5, 0.5, 0.1 } };

        double[][] matrix23 = { // 4 by 3
                { 0.2, 0.5, 0.2 }, { 0.5, 0.1, 0.5 }, { 0.1, 0.2, 0.1 }, { 0.1, 0.2, 0.5 } };

        DoubleMatrix[] matrices = { new DenseDoubleMatrix(matrix01), new DenseDoubleMatrix(matrix12),
                new DenseDoubleMatrix(matrix23) };
        for (DoubleMatrix mat : matrices) {
            MatrixWritable.write(mat, output);
        }

        // serialize the feature transformer
        FeatureTransformer transformer = new DefaultFeatureTransformer();
        Class<? extends FeatureTransformer> featureTransformerCls = transformer.getClass();
        byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls);
        output.writeInt(featureTransformerBytes.length);
        output.write(featureTransformerBytes);

        output.close();

    } catch (IOException e) {
        e.printStackTrace();
    }

    // initial the mlp with existing model meta-data and get the output
    MultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(modelPath);
    DoubleVector input = new DenseDoubleVector(new double[] { 1, 2, 3 });
    try {
        DoubleVector result = mlp.output(input);
        assertArrayEquals(new double[] { 0.6636557, 0.7009963, 0.7213835 }, result.toArray(), 0.0001);
    } catch (Exception e1) {
        e1.printStackTrace();
    }

    // delete meta-data
    try {
        FileSystem fs = FileSystem.get(conf);
        fs.delete(new Path(modelPath), true);
    } catch (IOException e) {
        e.printStackTrace();
    }

}

From source file:org.apache.hcatalog.mapreduce.HCatSplit.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    String partitionInfoString = HCatUtil.serialize(partitionInfo);

    // write partitionInfo into output
    WritableUtils.writeString(output, partitionInfoString);

    WritableUtils.writeString(output, baseMapRedSplit.getClass().getName());
    Writable baseSplitWritable = (Writable) baseMapRedSplit;
    //write  baseSplit into output
    baseSplitWritable.write(output);/*from   w  w  w . j  a v  a2  s .c o  m*/

    //write the table schema into output
    String tableSchemaString = HCatUtil.serialize(tableSchema);
    WritableUtils.writeString(output, tableSchemaString);
}

From source file:org.apache.hive.hcatalog.mapreduce.HCatSplit.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    String partitionInfoString = HCatUtil.serialize(partitionInfo);

    // write partitionInfo into output
    WritableUtils.writeString(output, partitionInfoString);

    WritableUtils.writeString(output, baseMapRedSplit.getClass().getName());
    Writable baseSplitWritable = (Writable) baseMapRedSplit;
    //write  baseSplit into output
    baseSplitWritable.write(output);//from  w w w  . ja  v  a 2s  . c  om
}

From source file:org.apache.horn.core.AbstractLayeredNeuralNetwork.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    super.write(output);
    // write regularization weight
    output.writeFloat(this.regularizationWeight);
    // write momentum weight
    output.writeFloat(this.momentumWeight);

    // write cost function
    WritableUtils.writeString(output, costFunction.getFunctionName());

    // write layer size list
    output.writeInt(this.layerSizeList.size());
    for (Integer aLayerSizeList : this.layerSizeList) {
        output.writeInt(aLayerSizeList);
    }/*from  w  ww. j a va 2 s  . c  o  m*/

    WritableUtils.writeEnum(output, this.trainingMethod);
    WritableUtils.writeEnum(output, this.learningStyle);
}

From source file:org.apache.horn.core.AbstractNeuralNetwork.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    // write model type
    WritableUtils.writeString(output, modelType);
    // write learning rate
    output.writeFloat(learningRate);//www  .  ja v  a 2 s  . c  om
    // write model path
    if (this.modelPath != null) {
        WritableUtils.writeString(output, modelPath);
    } else {
        WritableUtils.writeString(output, "null");
    }

    // serialize the class
    Class<? extends FloatFeatureTransformer> featureTransformerCls = this.featureTransformer.getClass();
    byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls);
    output.writeInt(featureTransformerBytes.length);
    output.write(featureTransformerBytes);
}

From source file:org.apache.horn.core.LayeredNeuralNetwork.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    super.write(output);
    output.writeInt(finalLayerIdx);//from   ww w.  jav a  2  s. com
    output.writeFloat(dropRate);

    // write neuron classes
    output.writeInt(this.neuronClassList.size());
    for (Class<? extends Neuron> clazz : this.neuronClassList) {
        output.writeUTF(clazz.getName());
    }

    // write squashing functions
    output.writeInt(this.squashingFunctionList.size());
    for (FloatFunction aSquashingFunctionList : this.squashingFunctionList) {
        WritableUtils.writeString(output, aSquashingFunctionList.getFunctionName());
    }

    // write weight matrices
    output.writeInt(this.weightMatrixList.size());
    for (FloatMatrix aWeightMatrixList : this.weightMatrixList) {
        FloatMatrixWritable.write(aWeightMatrixList, output);
    }

    // DO NOT WRITE WEIGHT UPDATE
}

From source file:org.apache.horn.core.RecurrentLayeredNeuralNetwork.java

License:Apache License

@Override
public void write(DataOutput output) throws IOException {
    super.write(output);
    output.writeInt(finalLayerIdx);//  w w  w  .jav  a2  s .com
    output.writeFloat(dropRate);

    // write neuron classes
    output.writeInt(this.neuronClassList.size());
    for (Class<? extends Neuron> clazz : this.neuronClassList) {
        output.writeUTF(clazz.getName());
    }

    // write squashing functions
    output.writeInt(this.squashingFunctionList.size());
    for (FloatFunction aSquashingFunctionList : this.squashingFunctionList) {
        WritableUtils.writeString(output, aSquashingFunctionList.getFunctionName());
    }

    // write recurrent step size
    output.writeInt(this.recurrentStepSize);

    // write recurrent step size
    output.writeInt(this.numOutCells);

    // write recurrent layer list
    output.writeInt(this.recurrentLayerList.size());
    for (Boolean isReccurentLayer : recurrentLayerList) {
        output.writeBoolean(isReccurentLayer);
    }

    // write weight matrices
    output.writeInt(this.getSizeOfWeightmatrix());
    for (List<FloatMatrix> aWeightMatrixLists : this.weightMatrixLists) {
        for (FloatMatrix aWeightMatrixList : aWeightMatrixLists) {
            FloatMatrixWritable.write(aWeightMatrixList, output);
        }
    }

    // DO NOT WRITE WEIGHT UPDATE
}

From source file:org.apache.mahout.classifier.chi_rw.data.Dataset.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    out.writeInt(attributes.length); // nb attributes
    for (Attribute attr : attributes) {
        WritableUtils.writeString(out, attr.name());
    }/*from   www  . ja  v  a  2  s  . c o  m*/

    Chi_RWUtils.writeArray(out, ignored);

    // only CATEGORICAL attributes have values
    for (String[] vals : values) {
        if (vals != null) {
            WritableUtils.writeStringArray(out, vals);
        }
    }

    // only NUMERICAL attributes have values
    for (double[] vals : nvalues) {
        if (vals != null) {
            Chi_RWUtils.writeArray(out, vals);
        }
    }

    for (double[] vals : minmaxvalues) {
        if (vals != null) {
            Chi_RWUtils.writeArray(out, vals);
        }
    }

    out.writeInt(labelId);
    out.writeInt(nbInstances);
}