Example usage for org.apache.hadoop.io LongWritable get

List of usage examples for org.apache.hadoop.io LongWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable get.

Prototype

public long get() 

Source Link

Document

Return the value of this LongWritable.

Usage

From source file:clustering.simhash.Step1Reducer.java

License:Apache License

/**
 * @param key    simhash//from w  w w. j  a va2s .c om
 * @param values entry_id@@g_no::g_name##g_model
 *               {@inheritDoc}
 */
@Override
protected void reduce(LongWritable key, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {

    for (Text value : values) {
        String[] docIdAndCommoInfo = value.toString().split("::");

        SimHash thisHash = SimHash.Builder.of(docIdAndCommoInfo[1]).build(key.get());

        int id = this._pool.hasSimilar(thisHash, this.threshold);
        if (id == -1) { // does not contain
            id = count.incrementAndGet();
            this._pool.update(thisHash, id);
        }
        this.outputKey.set(id);
        // group_id \t entry_id@@g_no::g_name##g_model
        context.write(this.outputKey, value);
    }
}

From source file:cn.com.diditaxi.hive.cf.UDFToChar.java

License:Apache License

public Text evaluate(LongWritable i) {
    if (i == null) {
        return null;
    } else {//from   w w  w  . j a  v a  2  s. c o  m
        out.reset();
        LazyLong.writeUTF8NoException(out, i.get());
        result.set(out.getData(), 0, out.getCount());
        return result;
    }
}

From source file:cn.com.diditaxi.hive.cf.UDFToChar.java

License:Apache License

public Text evaluate(LongWritable i, Text format) {
    if (i == null || format == null) {
        return null;
    } else {//from  w ww .j a  v  a  2 s  .  c  om
        String pattern = format.toString().replace("9", "#");
        decimalFormat.applyPattern(pattern);
        result.set(decimalFormat.format(i.get()));
        return result;
    }
}

From source file:co.nubetech.hiho.mapreduce.DBInputAvroMapper.java

License:Apache License

public GenericRecord getKeyRecord(Schema keySchema, LongWritable key) {
    GenericRecord keyRecord = new GenericData.Record(keySchema);
    keyRecord.put(0, key.get());
    return keyRecord;
}

From source file:co.nubetech.hiho.mapreduce.TestDBInputAvroMapper.java

License:Apache License

@Test
public final void testMapperValidValues() throws IOException, InterruptedException {
    OutputCollector<AvroValue<Pair>, NullWritable> output = mock(OutputCollector.class);
    Reporter reporter = mock(Reporter.class);

    DBInputAvroMapper mapper = new DBInputAvroMapper();

    ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn");
    ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn");
    ColumnInfo dateColumn = new ColumnInfo(1, Types.DATE, "dateColumn");
    ColumnInfo longColumn = new ColumnInfo(1, Types.BIGINT, "longColumn");
    ColumnInfo booleanColumn = new ColumnInfo(1, Types.BOOLEAN, "booleanColumn");
    ColumnInfo doubleColumn = new ColumnInfo(1, Types.DOUBLE, "doubleColumn");
    ColumnInfo floatColumn = new ColumnInfo(1, Types.FLOAT, "floatColumn");
    ColumnInfo charColumn = new ColumnInfo(1, Types.CHAR, "charColumn");
    ColumnInfo timeColumn = new ColumnInfo(1, Types.TIME, "timeColumn");
    ColumnInfo timeStampColumn = new ColumnInfo(1, Types.TIMESTAMP, "timeStampColumn");

    ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();

    columns.add(intColumn);//from w  w  w .j  a  v  a 2s.c  om
    columns.add(stringColumn);
    columns.add(dateColumn);
    columns.add(longColumn);
    columns.add(booleanColumn);
    columns.add(doubleColumn);
    columns.add(floatColumn);
    columns.add(charColumn);
    columns.add(timeColumn);
    columns.add(timeStampColumn);

    ArrayList values = new ArrayList();
    values.add(new Integer(12));
    values.add(new String("sam"));
    values.add(new Date());
    values.add(new Long(26564l));
    values.add(true);
    values.add(1.235);
    values.add(new Float(1.0f));
    values.add('a');
    values.add(new Time(new Date().getTime()));
    values.add(new Time(new Date().getTime()));

    GenericDBWritable val = new GenericDBWritable(columns, values);
    LongWritable key = new LongWritable(1);

    Schema pairSchema = DBMapper.getPairSchema(val.getColumns());
    Schema keySchema = Pair.getKeySchema(pairSchema);
    Schema valueSchema = Pair.getValueSchema(pairSchema);
    Pair pair = new Pair<GenericRecord, GenericRecord>(pairSchema);

    GenericRecord keyRecord = new GenericData.Record(keySchema);
    keyRecord.put(0, key.get());
    GenericRecord valueRecord = new GenericData.Record(valueSchema);
    List<Schema.Field> fieldSchemas = valueSchema.getFields();
    for (int i = 0; i < val.getValues().size(); ++i) {
        Schema.Type type = fieldSchemas.get(i).schema().getType();
        if (type.equals(Schema.Type.STRING)) {
            Utf8 utf8 = new Utf8((String) val.getValues().get(i).toString());
            valueRecord.put(i, utf8);
        } else {
            valueRecord.put(i, val.getValues().get(i));
        }
    }
    pair.key(keyRecord);
    pair.value(valueRecord);

    mapper.map(key, val, output, reporter);

    verify(output).collect(new AvroValue<Pair>(pair), nullWritable);
}

From source file:co.nubetech.hiho.mapreduce.TestDBInputAvroMapper.java

License:Apache License

@Test
public void testGetKeyRecord() {
    DBInputAvroMapper mapper = new DBInputAvroMapper();

    ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn");
    ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn");
    ColumnInfo dateColumn = new ColumnInfo(1, Types.DATE, "dateColumn");
    ColumnInfo longColumn = new ColumnInfo(1, Types.BIGINT, "longColumn");
    ColumnInfo booleanColumn = new ColumnInfo(1, Types.BOOLEAN, "booleanColumn");
    ColumnInfo doubleColumn = new ColumnInfo(1, Types.DOUBLE, "doubleColumn");
    // ColumnInfo floatColumn = new ColumnInfo(1, Types.FLOAT,
    // "floatColumn");
    ColumnInfo charColumn = new ColumnInfo(1, Types.CHAR, "charColumn");
    ColumnInfo timeColumn = new ColumnInfo(1, Types.TIME, "timeColumn");
    ColumnInfo timeStampColumn = new ColumnInfo(1, Types.TIMESTAMP, "timeStampColumn");

    ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();

    columns.add(intColumn);//from  w ww .ja v a  2  s .  c  o  m
    columns.add(stringColumn);
    columns.add(dateColumn);
    columns.add(longColumn);
    columns.add(booleanColumn);
    columns.add(doubleColumn);
    // columns.add(floatColumn);
    columns.add(charColumn);
    columns.add(timeColumn);
    columns.add(timeStampColumn);

    ArrayList values = new ArrayList();
    values.add(new Integer(12));
    values.add(new String("sam"));
    values.add(new Date());
    values.add(new Long(26564l));
    values.add(true);
    values.add(1.235);
    // values.add(new Float(1.0f));
    values.add('a');
    values.add(new Time(new Date().getTime()));
    values.add(new Time(new Date().getTime()));

    GenericDBWritable val = new GenericDBWritable(columns, values);
    LongWritable key = new LongWritable(1);

    Schema pairSchema = DBMapper.getPairSchema(val.getColumns());
    Schema keySchema = Pair.getKeySchema(pairSchema);

    GenericRecord keyRecord = new GenericData.Record(keySchema);
    keyRecord.put(0, key.get());
    assertEquals(keyRecord, mapper.getKeyRecord(keySchema, key));
}

From source file:com.ailk.oci.ocnosql.tools.load.mutiple.MutipleColumnImporterMapper.java

License:Apache License

/**
 * Convert a line of TSV text into an HBase table row.
 *///  w w  w  .j a v a 2 s . c  o  m
@Override
public void map(LongWritable offset, Text value, Context context) throws IOException {
    byte[] lineBytes = value.getBytes();
    ts = System.currentTimeMillis();

    try {
        MutipleColumnImportTsv.TsvParser.ParsedLine parsed = parser.parse(lineBytes, value.getLength());
        String newRowKey = rowkeyGenerator.generateByGenRKStep(value.toString(), false);//???rowkey

        Put put = new Put(newRowKey.getBytes());
        for (int i = 0; i < parsed.getColumnCount(); i++) {
            String columnQualifierStr = new String(parser.getQualifier(i));
            String rowStr = newRowKey + new String(parser.getFamily(i) + columnQualifierStr);
            if (notNeedLoadColumnQulifiers.contains(columnQualifierStr)) {
                continue;
            }
            KeyValue kv = new KeyValue(rowStr.getBytes(), 0, newRowKey.getBytes().length, //roffset,rofflength
                    parser.getFamily(i), 0, parser.getFamily(i).length, parser.getQualifier(i), 0,
                    parser.getQualifier(i).length, ts, KeyValue.Type.Put, lineBytes, parsed.getColumnOffset(i),
                    parsed.getColumnLength(i));

            KeyValue newKv = new KeyValue(newRowKey.getBytes(), kv.getFamily(), kv.getQualifier(), ts,
                    kv.getValue());
            kv = null;
            put.add(newKv);
        }
        context.write(new ImmutableBytesWritable(newRowKey.getBytes()), put);
    } catch (MutipleColumnImportTsv.TsvParser.BadTsvLineException badLine) {
        if (skipBadLines) {
            System.err.println("Bad line at offset: " + offset.get() + ":\n" + badLine.getMessage());
            incrementBadLineCount(1);
            return;
        } else {
            throw new IOException(badLine);
        }
    } catch (IllegalArgumentException e) {
        if (skipBadLines) {
            System.err.println("Bad line at offset: " + offset.get() + ":\n" + e.getMessage());
            incrementBadLineCount(1);
            return;
        } else {
            throw new IOException(e);
        }
    } catch (InterruptedException e) {
        e.printStackTrace();
    } catch (RowKeyGeneratorException e) {
        System.err.println("gen rowkey error, please check config in the ocnosqlTab.xml." + e.getMessage());
        throw new IOException(e);
    } finally {
        totalLineCount.increment(1);
    }
}

From source file:com.ailk.oci.ocnosql.tools.load.single.SingleColumnImporterMapper.java

License:Apache License

/**
 * Convert a line of TSV text into an HBase table row.
 * //ww  w. j  ava2  s. co m
 */
@Override
public void map(LongWritable offset, Text value, Context context) throws IOException {
    byte[] lineBytes = value.getBytes();

    try {
        TsvParser.ParsedLine parsed = parser.parse(lineBytes, value.getLength());
        //
        Text[] texts = new Text[parsed.getColumnCount()];
        int index = 0;
        for (int i = 0; i < parsed.getColumnCount(); i++) {
            //            if (i == parser.getRowKeyColumnIndex()){
            //               continue;
            //            }
            text = new Text();
            //?
            text.append(lineBytes, parsed.getColumnOffset(i), parsed.getColumnLength(i));
            texts[index] = text;
            index++;
        }
        writer.set(texts);
        /*
        //rowkey
        String oriRowKey = new String(lineBytes, parsed.getRowKeyOffset(), parsed.getRowKeyLength());
                
        // hash rowkey
        String newRowKey = oriRowKey;
        if(rowkeyGenerator != null){
           newRowKey = (String)rowkeyGenerator.generate(oriRowKey);
        }
        */
        String newRowKey = rowkeyGenerator.generateByGenRKStep(value.toString(), false);//???rowkey
        //LOG.info("single column newRowKey = " + newRowKey);
        context.write(new ImmutableBytesWritable(newRowKey.getBytes()), writer);
    } catch (BadTsvLineException badLine) {
        if (skipBadLines) {
            LOG.error("Bad line at offset: " + offset.get() + ":\n" + badLine.getMessage());
            badLineCount.increment(1);
            return;
        } else {
            throw new IOException(badLine);
        }
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:com.alectenharmsel.research.LineCountReducer.java

License:Apache License

public void reduce(Text key, Iterable<LongWritable> counts, Context context)
        throws IOException, InterruptedException {
    long total = 0;

    for (LongWritable tmp : counts) {
        total += tmp.get();
    }/*from  w  w  w .j a v  a  2 s  .  co  m*/

    context.getCounter(LcCounters.NUM_LINES).increment(total);
    context.write(key, new LongWritable(total));
}

From source file:com.alectenharmsel.research.SrcTokReducer.java

License:Apache License

public void reduce(Text key, Iterable<LongWritable> counts, Context context)
        throws IOException, InterruptedException {
    long sum = 0;
    for (LongWritable tmp : counts) {
        sum += tmp.get();
    }//w w  w .  j a  v a2 s.c  o  m

    context.write(key, new Text(String.valueOf(sum)));
}