Example usage for org.apache.hadoop.io Text toString

List of usage examples for org.apache.hadoop.io Text toString

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text toString.

Prototype

@Override
public String toString() 

Source Link

Document

Convert text back to string

Usage

From source file:com.jeffy.mr.WordCountMapper.java

License:Apache License

public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    ////  www .jav a  2s .c o m
    StringTokenizer itr = new StringTokenizer(value.toString());
    while (itr.hasMoreTokens()) {
        word.set(itr.nextToken());
        context.write(word, counter);//????hdfs
    }
}

From source file:com.jfolson.hive.serde.RTypedBytesWritableOutput.java

License:Apache License

public void writeRawText(Text t) throws IOException {
    out.writeRawString(t.toString());
}

From source file:com.jfolson.hive.serde.RTypedBytesWritableOutput.java

License:Apache License

public void writeText(Text t) throws IOException {
    out.writeString(t.toString());
}

From source file:com.jhkt.playgroundArena.hadoop.tasks.jobs.mapper.AverageMapper.java

License:Apache License

protected void map(Text key, Text value, Mapper<Text, Text, Text, AverageWritable>.Context context)
        throws IOException, InterruptedException {

    String[] valueSplit = value.toString().split(JPAHadoopConstants.DEFAULT_DELIM_REG_EXP);

    _key.set(valueSplit[0]);//from w w  w.  j av  a  2  s  . c o m
    Double sum = Double.parseDouble(valueSplit[1]);
    _aWriter.setSum(sum);

    context.write(_key, _aWriter);
}

From source file:com.jhkt.playgroundArena.hadoop.tasks.jobs.mapper.BloomFilterMapper.java

License:Apache License

protected void map(Text key, Text value, Mapper<Text, Text, Text, BloomFilter>.Context context)
        throws IOException, InterruptedException {
    String[] valueSplit = value.toString().split(JPAHadoopConstants.DEFAULT_DELIM_REG_EXP);
    _bFilter.add(new Key(valueSplit[0].getBytes()));
}

From source file:com.jhkt.playgroundArena.hadoop.tasks.jobs.mapper.CountMapper.java

License:Apache License

protected void map(Text key, Text value, Mapper<Text, Text, Text, IntWritable>.Context context)
        throws IOException, InterruptedException {
    String[] valueSplit = value.toString().split(JPAHadoopConstants.DEFAULT_DELIM_REG_EXP);

    _currentValue.set(valueSplit[1]);//from   w  w  w.j  av  a2s.  c  o m
    context.write(_currentValue, ONE);
}

From source file:com.jhkt.playgroundArena.hadoop.tasks.jobs.mapper.DistributedCacheMapper.java

License:Apache License

protected void map(Text key, Text value, Mapper<Text, Text, Text, Text>.Context context)
        throws IOException, InterruptedException {
    String joinValue = joinData.get(key);
    if (joinValue != null) {
        _currentValue.set(value.toString() + JPAHadoopConstants.DEFAULT_SPLITTER + joinValue);

        /*// w  ww. j  ava  2 s .c om
         * Output the result direclty into HDFS as we don't have any reducer for further processing
         */
        context.write(key, _currentValue);
    }
}

From source file:com.jhkt.playgroundArena.hadoop.tasks.jobs.mapper.ReverseMapper.java

License:Apache License

protected void map(Text key, Text value, Mapper<Text, Text, Text, Text>.Context context)
        throws IOException, InterruptedException {

    String[] valueSplit = value.toString().split(JPAHadoopConstants.DEFAULT_DELIM_REG_EXP);
    _value.set(valueSplit[1] + " " + valueSplit[0]);
    context.write(key, _value);/*w w w .j  a v a 2  s  . co m*/
}

From source file:com.jumptap.h2redis.RedisHMRecordWriter.java

License:Open Source License

@Override
public void write(Text text, Text tuple) throws IOException, InterruptedException {
    String[] kv = tuple.toString().split(",");
    write(keyMaker.key(text.toString()), keyMaker.hkey(kv[0].trim()), kv[1].trim());
}

From source file:com.jumptap.h2redis.RedisOutputMapper.java

License:Open Source License

@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    int keyIdx = conf.getInt(RedisDriver.REDIS_KEY_FIELD, -1);
    int hashIdx = conf.getInt(RedisDriver.REDIS_HASHKEY_FIELD, -1);
    int valIdx = conf.getInt(RedisDriver.REDIS_HASHVAL_FIELD, -1);

    if (keyIdx == -1 || hashIdx == -1 || valIdx == -1)
        return;//from   ww  w  . j  a v  a2s .  co m

    String[] payload = StringUtils.getStrings(value.toString());
    String keyStr = payload[keyIdx];
    String hashStr = payload[hashIdx];
    String valStr = payload[valIdx];

    // check filters
    Pattern p = conf.getPattern(RedisDriver.REDIS_KEY_FILTER, null);
    if (p != null && p.matcher(keyStr).find()) {
        return;
    }

    p = conf.getPattern(RedisDriver.REDIS_HASH_FILTER, null);
    if (p != null && p.matcher(hashStr).find()) {
        return;
    }

    p = conf.getPattern(RedisDriver.REDIS_VAL_FILTER, null);
    if (p != null && p.matcher(valStr).find()) {
        return;
    }

    outkey.set(keyStr);
    outvalue.set(hashStr + "," + valStr);
    context.write(outkey, outvalue);
}