Example usage for org.apache.hadoop.io LongWritable get

List of usage examples for org.apache.hadoop.io LongWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable get.

Prototype

public long get() 

Source Link

Document

Return the value of this LongWritable.

Usage

From source file:com.jfolson.hive.serde.RTypedBytesWritableOutput.java

License:Apache License

public void writeRawLong(LongWritable lw) throws IOException {
    out.writeRawLong(lw.get());
}

From source file:com.jfolson.hive.serde.RTypedBytesWritableOutput.java

License:Apache License

public void writeLong(LongWritable lw) throws IOException {
    out.writeLong(lw.get());
}

From source file:com.kylinolap.cube.measure.LongMaxAggregator.java

License:Apache License

@Override
public void aggregate(LongWritable value) {
    if (max == null)
        max = new LongWritable(value.get());
    else if (max.get() < value.get())
        max.set(value.get());//w ww .  j  a  v a2s .  c  o m
}

From source file:com.kylinolap.cube.measure.LongMinAggregator.java

License:Apache License

@Override
public void aggregate(LongWritable value) {
    if (min == null)
        min = new LongWritable(value.get());
    else if (min.get() > value.get())
        min.set(value.get());/*from   w w  w  .  j  a  v a 2  s  .  c o  m*/
}

From source file:com.kylinolap.cube.measure.LongSerializer.java

License:Apache License

@Override
public void serialize(LongWritable value, ByteBuffer out) {
    BytesUtil.writeVLong(value.get(), out);
}

From source file:com.kylinolap.cube.measure.LongSumAggregator.java

License:Apache License

@Override
public void aggregate(LongWritable value) {
    sum.set(sum.get() + value.get());
}

From source file:com.kylinolap.job.hadoop.cube.RangeKeyDistributionMapperTest.java

License:Apache License

@SuppressWarnings("unchecked")
@Test/*  w  w w  . ja v  a2s . c  o m*/
public void testMapperWithoutHeader() throws IOException {

    Text inputKey1 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey2 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 122, 1, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey3 = new Text(new byte[] { 2, 2, 2, 2, 2, 2, 2, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey4 = new Text(new byte[] { 3, 3, 3, 3, 3, 3, 3, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey5 = new Text(new byte[] { 4, 4, 4, 4, 4, 4, 4, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey6 = new Text(new byte[] { 5, 5, 5, 5, 5, 5, 5, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey7 = new Text(new byte[] { 6, 6, 6, 6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });

    mapDriver.addInput(inputKey1, new Text("abc"));
    mapDriver.addInput(inputKey2, new Text("abc"));
    mapDriver.addInput(inputKey3, new Text("abc"));
    mapDriver.addInput(inputKey4, new Text("abc"));
    mapDriver.addInput(inputKey5, new Text("abc"));
    mapDriver.addInput(inputKey6, new Text("abc"));
    mapDriver.addInput(inputKey7, new Text("abc"));

    List<Pair<Text, LongWritable>> result = mapDriver.run();

    assertEquals(1, result.size());

    byte[] key1 = result.get(0).getFirst().getBytes();
    LongWritable value1 = result.get(0).getSecond();
    assertArrayEquals(new byte[] { 6, 6, 6, 6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 }, key1);
    assertEquals(147, value1.get());
}

From source file:com.kylinolap.job.hadoop.cube.RangeKeyDistributionMapperTest.java

License:Apache License

@SuppressWarnings("unchecked")
@Test//www  . java2 s .c om
public void testMapperWithHeader() throws IOException {

    Text inputKey1 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0,
            0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey2 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0,
            0, 0, 0, 0, 0, 127, 11, 122, 1, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey3 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 2, 2,
            2, 2, 2, 2, 2, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey4 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 3, 3,
            3, 3, 3, 3, 3, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey5 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 4, 4,
            4, 4, 4, 4, 4, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey6 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 5, 5,
            5, 5, 5, 5, 5, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
    Text inputKey7 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6,
            6, 6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });

    mapDriver.addInput(inputKey1, new Text("abc"));
    mapDriver.addInput(inputKey2, new Text("abc"));
    mapDriver.addInput(inputKey3, new Text("abc"));
    mapDriver.addInput(inputKey4, new Text("abc"));
    mapDriver.addInput(inputKey5, new Text("abc"));
    mapDriver.addInput(inputKey6, new Text("abc"));
    mapDriver.addInput(inputKey7, new Text("abc"));

    List<Pair<Text, LongWritable>> result = mapDriver.run();

    assertEquals(1, result.size());

    byte[] key1 = result.get(0).getFirst().getBytes();
    LongWritable value1 = result.get(0).getSecond();
    assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6, 6,
            6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 }, key1);
    assertEquals(273, value1.get());

}

From source file:com.kylinolap.job.hadoop.cube.RangeKeyDistributionReducer.java

License:Apache License

@Override
public void reduce(Text key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {
    for (LongWritable v : values) {
        bytesRead += v.get();
    }//from  w w  w.  ja  v  a 2 s  .c  o  m

    if (bytesRead >= ONE_GIGA_BYTES) {
        gbPoints.add(new Text(key));
        bytesRead = 0; // reset bytesRead
    }
}

From source file:com.kylinolap.job.hadoop.cube.RowKeyDistributionCheckerReducer.java

License:Apache License

@Override
public void reduce(Text key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {

    long length = 0;
    for (LongWritable v : values) {
        length += v.get();
    }/*from w  w  w  .  ja  va 2s .c  om*/

    outputKey.set(length);
    context.write(key, outputKey);
}