Example usage for org.apache.hadoop.io Text toString

List of usage examples for org.apache.hadoop.io Text toString

Introduction

In this page you can find the example usage for org.apache.hadoop.io Text toString.

Prototype

@Override
public String toString() 

Source Link

Document

Convert text back to string

Usage

From source file:cn.com.diditaxi.hive.cf.UDFToChar.java

License:Apache License

public Text evaluate(Text dateText, Text patternText) {
    if (dateText == null || patternText == null) {
        return null;
    }//from   ww  w.ja va 2 s . c o  m
    if (dateText.toString().trim().length() == 10) {
        standardFormatter.applyPattern("yyyy-MM-dd");
    }

    try {
        if (!patternText.equals(lastPatternText)) {
            formatter.applyPattern(patternText.toString());
            lastPatternText.set(patternText);
        }
    } catch (Exception e) {
        return null;
    }

    Date date;
    try {
        date = standardFormatter.parse(dateText.toString());
        result.set(formatter.format(date));
        return result;
    } catch (ParseException e) {
        return null;
    }
}

From source file:cn.lhfei.hadoop.ch02.MaxTemperatureMapper.java

License:Apache License

@Override
protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context)
        throws IOException, InterruptedException {

    String line = value.toString();
    String year = line.substring(15, 19);
    int airTemperature;

    if (line.charAt(87) == '+') { // parseInt doesn't like leading plus
        airTemperature = Integer.parseInt(line.substring(88, 92));
    } else {//from  w  w  w .  j  a  va 2  s. c om
        airTemperature = Integer.parseInt(line.substring(87, 92));
    }
    String quality = line.substring(92, 93);
    if (airTemperature != MISSING && quality.matches("[01459]")) {
        context.write(new Text(year), new IntWritable(airTemperature));
    }
}

From source file:cn.lhfei.hadoop.ch05.v2.MaxTemperatureMapper.java

License:Apache License

@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

    String line = value.toString();
    String year = line.substring(15, 19);
    String temp = line.substring(87, 92);
    if (!missing(temp)) {
        int airTemperature = Integer.parseInt(temp);
        context.write(new Text(year), new IntWritable(airTemperature));
    }/*from   w w  w.  j  av a2s  .  c o  m*/
}

From source file:cn.lhfei.hadoop.ch05.v3.NcdcRecordParser.java

License:Apache License

public void parse(Text record) {
    parse(record.toString());
}

From source file:cn.lhfei.hive.udf.SimpleUDFExample.java

License:Apache License

public Text evaluate(Text input) {
    if (input == null)
        return null;
    return new Text("Hello " + input.toString());
}

From source file:cn.lhfei.spark.hive.udf.LowerUDF.java

License:Apache License

public Text evaluate(final Text s) {
    if (s == null) {
        return null;
    }// w  w  w  . j a v  a 2  s . c  om
    return new Text(s.toString().toLowerCase());
}

From source file:co.cask.cdap.spark.app.SparkAppUsingGetDataset.java

License:Apache License

@Nullable
static Tuple2<LogKey, LogStats> parse(Text log) {
    Matcher matcher = CLF_LOG_PATTERN.matcher(log.toString());
    if (matcher.find()) {
        String ip = matcher.group(1);
        String user = matcher.group(3);
        String request = matcher.group(5);
        int code = Integer.parseInt(matcher.group(6));
        int size = Integer.parseInt(matcher.group(7));
        return new Tuple2<>(new LogKey(ip, user, request, code), new LogStats(1, size));
    }/*from  w  w w  . j ava 2 s .  c o m*/
    return null;
}

From source file:co.cask.tephra.persist.HDFSTransactionLogReaderSupplier.java

License:Apache License

public HDFSTransactionLogReaderSupplier(SequenceFile.Reader reader) {
    this.reader = reader;
    Text versionInfo = reader.getMetadata().get(new Text(TxConstants.TransactionLog.VERSION_KEY));
    this.version = versionInfo == null ? 1 : Byte.parseByte(versionInfo.toString());
}

From source file:co.nubetech.hiho.mapred.MySQLLoadDataMapper.java

License:Apache License

/**
 * Map file tables to DB tables. File name can be &lt;tablename&gt; or
 * &lt;tablename&gt;-m-XXXXX./*from  www  .  ja va  2s  . c  o m*/
 *
 * @param key the filename of the data stream
 * @return the DB table name
 * @see {@link
 *      org.apache.hadoop.mapreduce.lib.output.FileOutputFormat#getUniqueFile}
 */
private String keyToTablename(Text key) {
    String filename = key.toString();
    if (filename.matches("\\w+-[mr]-[0-9]{5}"))
        return filename.substring(0, filename.length() - 8);
    else
        return filename;
}

From source file:co.nubetech.hiho.mapreduce.OracleLoadMapper.java

License:Apache License

public void map(Text key, FSDataInputStream val, Context context) throws IOException, InterruptedException {
    ftpClient.appendFile(key.toString(), val);
    logger.debug("Appended to file " + key);
    val.close();/*from w  w w. j  av  a 2  s .c  o m*/
}