Example usage for org.apache.hadoop.io Text toString

List of usage examples for org.apache.hadoop.io Text toString

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text toString.

Prototype

@Override
public String toString() 

Source Link

Document

Convert text back to string

Usage

From source file:alluxio.client.hadoop.AbstractIOMapper.java

License:Apache License

/**
 * Map file name and offset into statistical data.
 * <p>/*  ww  w. j a v a2s  .c  o m*/
 * The map task is to get the <tt>key</tt>, which contains the file name, and the <tt>value</tt>,
 * which is the offset within the file.
 *
 * The parameters are passed to the abstract method
 * {@link #doIO(Reporter, String,long)}, which performs the io operation,
 * usually read or write data, and then
 * {@link #collectStats(OutputCollector, String,long, Object)} is called
 * to prepare stat data for a subsequent reducer.
 */
@Override
public void map(Text key, LongWritable value, OutputCollector<Text, Text> output, Reporter reporter)
        throws IOException {
    String name = key.toString();
    long longValue = value.get();

    reporter.setStatus("starting " + name + " ::host = " + mHostname);

    mStream = getIOStream(name);
    T statValue = null;
    long tStart = System.currentTimeMillis();
    try {
        statValue = doIO(reporter, name, longValue);
    } finally {
        if (mStream != null) {
            mStream.close();
        }
    }
    long tEnd = System.currentTimeMillis();
    long execTime = tEnd - tStart;
    collectStats(output, name, execTime, statValue);

    reporter.setStatus("finished " + name + " ::host = " + mHostname);
}

From source file:alluxio.client.hadoop.AccumulatingReducer.java

License:Apache License

/**
 * This method accumulates values based on their type.
 *
 * @param key the type of values//from w w  w. j  a v  a2  s  .c  o  m
 * @param values the values to accumulates
 * @param output collect the result of accumulating
 * @param reporter to report progress and update status information
 */
public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter)
        throws IOException {
    String field = key.toString();

    reporter.setStatus("starting " + field + " ::host = " + mHostname);

    // concatenate strings
    if (field.startsWith(VALUE_TYPE_STRING)) {
        StringBuilder sSum = new StringBuilder();
        while (values.hasNext()) {
            sSum.append(values.next().toString()).append(";");
        }
        output.collect(key, new Text(sSum.toString()));
        reporter.setStatus("finished " + field + " ::host = " + mHostname);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_FLOAT)) {
        float fSum = 0;
        while (values.hasNext()) {
            fSum += Float.parseFloat(values.next().toString());
        }
        output.collect(key, new Text(String.valueOf(fSum)));
        reporter.setStatus("finished " + field + " ::host = " + mHostname);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_LONG)) {
        long lSum = 0;
        while (values.hasNext()) {
            lSum += Long.parseLong(values.next().toString());
        }
        output.collect(key, new Text(String.valueOf(lSum)));
    }
    reporter.setStatus("finished " + field + " ::host = " + mHostname);
}

From source file:alluxio.hadoop.fs.AccumulatingReducer.java

License:Apache License

public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter)
        throws IOException {
    String field = key.toString();

    reporter.setStatus("starting " + field + " ::host = " + mHostname);

    // concatenate strings
    if (field.startsWith(VALUE_TYPE_STRING)) {
        StringBuffer sSum = new StringBuffer();
        while (values.hasNext()) {
            sSum.append(values.next().toString()).append(";");
        }//w w w  .  j a  v a2  s.  c  om
        output.collect(key, new Text(sSum.toString()));
        reporter.setStatus("finished " + field + " ::host = " + mHostname);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_FLOAT)) {
        float fSum = 0;
        while (values.hasNext()) {
            fSum += Float.parseFloat(values.next().toString());
        }
        output.collect(key, new Text(String.valueOf(fSum)));
        reporter.setStatus("finished " + field + " ::host = " + mHostname);
        return;
    }
    // sum long values
    if (field.startsWith(VALUE_TYPE_LONG)) {
        long lSum = 0;
        while (values.hasNext()) {
            lSum += Long.parseLong(values.next().toString());
        }
        output.collect(key, new Text(String.valueOf(lSum)));
    }
    reporter.setStatus("finished " + field + " ::host = " + mHostname);
}

From source file:Analysis.A10_Weekday_v_Weekend_Listens.Listen_History_Weekday_Weekend_Mapper.java

public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    // get user info
    String[] userInfo = value.toString().split("\t");
    String playTime = userInfo[1].trim();

    try {/*from  ww w  .  ja va2 s.  c o m*/

        if (playTime.equals("")) {
            context.getCounter(DAY_COUNTER_GROUP, NULL_OR_EMPTY).increment(1);
        } else {
            Date date = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").parse(playTime);

            // Then get the day of week from the Date based on specific locale.
            String dayOfWeek = new SimpleDateFormat("EEEE", Locale.ENGLISH).format(date);

            //System.out.println(dayOfWeek); // Friday
            if (days.contains(dayOfWeek)) {
                context.getCounter(DAY_COUNTER_GROUP, dayOfWeek).increment(1);
            }
        }

    } catch (ParseException e) {
        e.printStackTrace();
    }

}

From source file:Analysis.A1_Total_Unique_Artists_on_Service.Distinct_Artist_Mapper.java

public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    // get artist info
    String[] artistInfo = value.toString().split("\t");

    // cleanup artist name by removing all whitespaces
    String aName = artistInfo[2] + "\t"; //.replaceAll("\\s+","");

    // extract artist name
    artistName = new Text(aName);

    context.write(artistName, NullWritable.get());
}

From source file:Analysis.A2_Top_20_Most_Popular_Artists.Top_20_Most_Popular_Artist_Mapper.java

public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    // get artist info
    String[] artistInfo = value.toString().split("\t");

    // cleanup artist name by removing all whitespaces
    String aName = artistInfo[2] + "\t"; //.replaceAll("\\s+","");

    // extract artist name
    artistName = new Text(aName);

    context.write(artistName, one);//from   w w w  .  j av a  2s . c  om
}

From source file:Analysis.A2_Top_20_Most_Popular_Artists.Top_20_Most_Popular_Artist_Reducer.java

public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
    int totalUniquePlayCount = 0;

    // get count and add
    for (IntWritable uniqueCount : values) {
        totalUniquePlayCount += uniqueCount.get();
    }//from w w w. j  a  v a  2s.  c o m

    //add this artist with its play count to tree map
    top20.put(totalUniquePlayCount, key.toString());

    // if map size has grown > 20 then remove first entry as tree map sorts in ascending order
    if (top20.size() > 20) {
        top20.remove(top20.lastKey());
    }
}

From source file:Analysis.A3_Total_Users_By_Gender.User_Gender_Count_Mapper.java

public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    // get user info
    String[] userInfo = value.toString().split("\t");
    String userGender = userInfo[1];

    // extract user gender
    gender = new Text(userGender);

    context.write(gender, one);// w w  w  .ja  va  2 s  .  c  o  m
}

From source file:Analysis.A4_High_Traffic_Countries.Top_10_Countries_by_User_Traffic_Mapper.java

public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    // get user info
    String[] userInfo = value.toString().split("\t");
    String cID = userInfo[3];/*from www.ja  v  a2s.c  o m*/

    // extract artist name
    country = new Text(cID);

    context.write(country, one);
}

From source file:Analysis.A4_High_Traffic_Countries.Top_10_Countries_by_User_Traffic_Reducer.java

public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
    int count = 0;
    for (IntWritable value : values) {
        count += value.get();/*from   www  . j a v  a  2s .  c  o m*/
    }

    //add this country with its play count to tree map
    top10.put(count, key.toString());

    // if map size has grown > 10 then remove first entry as tree map sorts in ascending order
    if (top10.size() > 20) {
        top10.remove(top10.lastKey());
    }
}