Example usage for org.apache.hadoop.io Text readString

List of usage examples for org.apache.hadoop.io Text readString

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text readString.

Prototype

public static String readString(DataInput in) throws IOException 

Source Link

Document

Read a UTF8 encoded string from in

Usage

From source file:it.crs4.pydoop.mapreduce.pipes.CommonStub.java

License:Apache License

protected void initSoket() throws Exception {
    int port = Integer.parseInt(System.getenv("mapreduce.pipes.command.port"));

    System.err.println("port:" + port);

    java.net.InetAddress address = java.net.InetAddress.getLocalHost();

    socket = new Socket(address.getHostName(), port);
    InputStream input = socket.getInputStream();
    OutputStream output = socket.getOutputStream();

    System.err.println("ready to read");
    // try to read
    dataInput = new DataInputStream(input);

    WritableUtils.readVInt(dataInput);/*w  w  w  .  j a v  a 2 s  . c o  m*/

    String str = Text.readString(dataInput);

    Text.readString(dataInput);

    dataOut = new DataOutputStream(output);
    WritableUtils.writeVInt(dataOut, 57);
    String s = createDigest("password".getBytes(), str);

    Text.writeString(dataOut, s);

    // start
    WritableUtils.readVInt(dataInput);
    int cuttentAnswer = WritableUtils.readVInt(dataInput);
    System.out.println("CURRENT_PROTOCOL_VERSION:" + cuttentAnswer);

    // get configuration
    // should be MessageType.SET_JOB_CONF.code
    WritableUtils.readVInt(dataInput);

    // array length

    int j = WritableUtils.readVInt(dataInput);
    for (int i = 0; i < j; i++) {
        Text.readString(dataInput);
        i++;
        Text.readString(dataInput);
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.PipeApplicationRunnableStub.java

License:Apache License

public void binaryProtocolStub() {
    try {//from www.ja va 2s .  c o m

        initSoket();
        System.out.println("start OK");

        // RUN_MAP.code
        // should be 3

        int answer = WritableUtils.readVInt(dataInput);
        System.out.println("RunMap:" + answer);
        FileSplit split = new FileSplit();
        readObject(split, dataInput);

        WritableUtils.readVInt(dataInput);
        WritableUtils.readVInt(dataInput);
        // end runMap
        // get InputTypes
        WritableUtils.readVInt(dataInput);
        String inText = Text.readString(dataInput);
        System.out.println("Key class:" + inText);
        inText = Text.readString(dataInput);
        System.out.println("Value class:" + inText);

        @SuppressWarnings("unused")
        int inCode = 0;

        // read all data from sender and write to output
        while ((inCode = WritableUtils.readVInt(dataInput)) == 4) {
            FloatWritable key = new FloatWritable();
            NullWritable value = NullWritable.get();
            readObject(key, dataInput);
            System.out.println("value:" + key.get());
            readObject(value, dataInput);
        }

        WritableUtils.writeVInt(dataOut, 54);

        dataOut.flush();
        dataOut.close();

    } catch (Exception x) {
        x.printStackTrace();
    } finally {
        closeSoket();
    }

}

From source file:it.crs4.seal.common.SequenceId.java

License:Open Source License

/**
 * Read the two fields.//from ww  w. jav  a 2 s. c om
 * Encoded as:
 *    location length: Vint
 *    location: UTF bytes
 *    read: byte
 */
@Override
public void readFields(DataInput in) throws IOException {
    location = Text.readString(in);
    read = in.readByte();
}

From source file:kogiri.mapreduce.common.kmermatch.KmerMatchInputSplit.java

License:Open Source License

@Override
public void readFields(DataInput in) throws IOException {
    this.kmerIndexPath = new Path[in.readInt()];
    for (int i = 0; i < this.kmerIndexPath.length; i++) {
        this.kmerIndexPath[i] = new Path(Text.readString(in));
    }//from w w  w  .j a v a 2s . co  m
    this.partition = new KmerRangePartition();
    this.partition.read(in);
}

From source file:kogiri.mapreduce.preprocess.common.kmerhistogram.KmerRangePartition.java

License:Open Source License

public void read(DataInput in) throws IOException {
    this.kmerSize = in.readInt();
    this.numPartitions = in.readInt();
    this.partitionIndex = in.readInt();
    this.partitionSize = new BigInteger(Text.readString(in));
    this.partitionBegin = new BigInteger(Text.readString(in));
    this.parititionEnd = new BigInteger(Text.readString(in));
}

From source file:kogiri.mapreduce.preprocess.common.kmerindex.KmerIndexSplit.java

License:Open Source License

@Override
public void readFields(DataInput in) throws IOException {
    this.indexPaths = new Path[in.readInt()];
    for (int i = 0; i < this.indexPaths.length; i++) {
        this.indexPaths[i] = new Path(Text.readString(in));
    }/*from   ww  w .  j  a  va 2  s.co m*/

    this.locations = new String[in.readInt()];
    for (int i = 0; i < this.locations.length; i++) {
        this.locations[i] = Text.readString(in);
    }
}

From source file:ml.grafos.okapi.graphs.betweeness.ShortestPathData.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    source = Text.readString(in);
    from = Text.readString(in);/*from w  w  w .  ja  va2 s .com*/
    distance = in.readInt();

    int size = in.readInt();
    shortestPathSources = new ArrayList<>();
    for (int i = 0; i < size; i++) {
        shortestPathSources.add(Text.readString(in));
    }
}

From source file:ml.grafos.okapi.graphs.betweeness.ShortestPathList.java

License:Apache License

@Override
public void readFields(DataInput in) throws IOException {
    this.predecessors.clear();
    setDistance(in.readInt());//from w  w w .jav  a  2s .  c o m
    int size = in.readInt();
    for (int i = 0; i < size; i++) {
        predecessors.add(Text.readString(in));
    }
}

From source file:ml.shifu.guagua.yarn.GuaguaYarnTask.java

License:Apache License

@SuppressWarnings({ "unchecked", "unused" })
private <T> T getSplitDetails(Path file, long offset) throws IOException {
    FileSystem fs = file.getFileSystem(getYarnConf());
    FSDataInputStream inFile = null;/*from   w w w .  java2 s  . c om*/
    T split = null;
    try {
        inFile = fs.open(file);
        inFile.seek(offset);
        String className = Text.readString(inFile);
        Class<T> cls;
        try {
            cls = (Class<T>) getYarnConf().getClassByName(className);
        } catch (ClassNotFoundException ce) {
            IOException wrap = new IOException(String.format("Split class %s not found", className));
            wrap.initCause(ce);
            throw wrap;
        }
        SerializationFactory factory = new SerializationFactory(getYarnConf());
        Deserializer<T> deserializer = (Deserializer<T>) factory.getDeserializer(cls);
        deserializer.open(inFile);
        split = deserializer.deserialize(null);
    } finally {
        IOUtils.closeStream(inFile);
    }
    return split;
}

From source file:net.thevis.groovyhadoop.backport.CombineFileSplit.java

License:Apache License

public void readFields(DataInput in) throws IOException {
    totLength = in.readLong();/* w w  w.  j av a  2 s .  c om*/
    int arrLength = in.readInt();
    lengths = new long[arrLength];
    for (int i = 0; i < arrLength; i++) {
        lengths[i] = in.readLong();
    }
    int filesLength = in.readInt();
    paths = new Path[filesLength];
    for (int i = 0; i < filesLength; i++) {
        paths[i] = new Path(Text.readString(in));
    }
    arrLength = in.readInt();
    startoffset = new long[arrLength];
    for (int i = 0; i < arrLength; i++) {
        startoffset[i] = in.readLong();
    }
}