Example usage for org.apache.hadoop.io LongWritable LongWritable

List of usage examples for org.apache.hadoop.io LongWritable LongWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable LongWritable.

Prototype

public LongWritable(long value) 

Source Link

Usage

From source file:co.nubetech.hiho.mapreduce.TestDBInputAvroMapper.java

License:Apache License

@Test
public void testGetValueRecord() {
    DBInputAvroMapper mapper = new DBInputAvroMapper();

    ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn");
    ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn");
    ColumnInfo dateColumn = new ColumnInfo(1, Types.DATE, "dateColumn");
    ColumnInfo longColumn = new ColumnInfo(1, Types.BIGINT, "longColumn");
    ColumnInfo booleanColumn = new ColumnInfo(1, Types.BOOLEAN, "booleanColumn");
    ColumnInfo doubleColumn = new ColumnInfo(1, Types.DOUBLE, "doubleColumn");
    // ColumnInfo floatColumn = new ColumnInfo(1, Types.FLOAT,
    // "floatColumn");
    ColumnInfo charColumn = new ColumnInfo(1, Types.CHAR, "charColumn");
    ColumnInfo timeColumn = new ColumnInfo(1, Types.TIME, "timeColumn");
    ColumnInfo timeStampColumn = new ColumnInfo(1, Types.TIMESTAMP, "timeStampColumn");

    ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();

    columns.add(intColumn);/*from   w ww.ja v  a  2  s  .c om*/
    columns.add(stringColumn);
    columns.add(dateColumn);
    columns.add(longColumn);
    columns.add(booleanColumn);
    columns.add(doubleColumn);
    // columns.add(floatColumn);
    columns.add(charColumn);
    columns.add(timeColumn);
    columns.add(timeStampColumn);

    ArrayList values = new ArrayList();
    values.add(new Integer(12));
    values.add(new String("sam"));
    values.add(new Date());
    values.add(new Long(26564l));
    values.add(true);
    values.add(1.235);
    // values.add(new Float(1.0f));
    values.add('a');
    values.add(new Time(new Date().getTime()));
    values.add(new Time(new Date().getTime()));

    GenericDBWritable val = new GenericDBWritable(columns, values);
    LongWritable key = new LongWritable(1);

    Schema pairSchema = DBMapper.getPairSchema(val.getColumns());
    Schema keySchema = Pair.getKeySchema(pairSchema);
    Schema valueSchema = Pair.getValueSchema(pairSchema);

    GenericRecord valueRecord = new GenericData.Record(valueSchema);
    List<Schema.Field> fieldSchemas = valueSchema.getFields();
    for (int i = 0; i < val.getValues().size(); ++i) {
        Schema.Type type = fieldSchemas.get(i).schema().getType();
        if (type.equals(Schema.Type.STRING)) {
            Utf8 utf8 = new Utf8((String) val.getValues().get(i).toString());
            valueRecord.put(i, utf8);
        } else {
            valueRecord.put(i, val.getValues().get(i));
        }
    }

    assertEquals(valueRecord, mapper.getValueRecord(valueSchema, val));
}

From source file:co.nubetech.hiho.mapreduce.TestDBInputDelimMapper.java

License:Apache License

@Test
public final void testMapperValidValues() throws IOException, InterruptedException {
    Mapper.Context context = mock(Mapper.Context.class);
    Configuration conf = new Configuration();
    conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, ",");
    when(context.getConfiguration()).thenReturn(conf);

    DBInputDelimMapper mapper = new DBInputDelimMapper();

    ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn");
    ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn");
    ColumnInfo dateColumn = new ColumnInfo(1, Types.DATE, "dateColumn");
    ColumnInfo longColumn = new ColumnInfo(1, Types.BIGINT, "longColumn");
    ColumnInfo booleanColumn = new ColumnInfo(1, Types.BOOLEAN, "booleanColumn");
    ColumnInfo doubleColumn = new ColumnInfo(1, Types.DOUBLE, "doubleColumn");
    ColumnInfo charColumn = new ColumnInfo(1, Types.CHAR, "charColumn");
    ColumnInfo timeColumn = new ColumnInfo(1, Types.TIME, "timeColumn");
    ColumnInfo timeStampColumn = new ColumnInfo(1, Types.TIMESTAMP, "timeStampColumn");
    ColumnInfo floatColumn = new ColumnInfo(1, Types.FLOAT, "floatColumn");

    ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();
    columns.add(intColumn);/*from   w w w. j  ava  2 s . c  o  m*/
    columns.add(stringColumn);
    columns.add(dateColumn);
    columns.add(longColumn);
    columns.add(booleanColumn);
    columns.add(doubleColumn);
    columns.add(charColumn);
    columns.add(timeColumn);
    columns.add(timeStampColumn);
    columns.add(floatColumn);

    ArrayList<Comparable> values = new ArrayList<Comparable>();
    values.add(new Integer(12));
    values.add(new String("sam"));
    values.add(new Date());
    values.add(new Long(26564l));
    values.add(true);
    values.add(1.235);
    values.add('a');
    values.add(new Time(new Date().getTime()));
    values.add(new Time(new Date().getTime()));
    values.add(new Float(1.0f));

    GenericDBWritable val = new GenericDBWritable(columns, values);
    LongWritable key = new LongWritable(1);
    mapper.map(key, val, context);

    Text outkey = new Text();
    Text outval = new Text();
    StringBuilder builder = new StringBuilder();
    builder.append(new Integer(12) + "," + new String("sam") + "," + new Date() + "," + new Long(26564l) + ","
            + true + "," + 1.235 + "," + 'a' + "," + new Time(new Date().getTime()) + ","
            + new Time(new Date().getTime()) + "," + new Float(1.0f));

    outval.set(builder.toString());
    verify(context).write(outkey, outval);
}

From source file:co.nubetech.hiho.mapreduce.TestDBInputDelimMapper.java

License:Apache License

@Test
public final void testMapperValidValuesDelmiter() throws IOException, InterruptedException {
    Mapper.Context context = mock(Mapper.Context.class);
    Configuration conf = new Configuration();
    String delimiter = "DELIM";
    conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, delimiter);
    when(context.getConfiguration()).thenReturn(conf);

    DBInputDelimMapper mapper = new DBInputDelimMapper();

    ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn");
    ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR, "stringColumn");
    ColumnInfo dateColumn = new ColumnInfo(1, Types.DATE, "dateColumn");
    ColumnInfo longColumn = new ColumnInfo(1, Types.BIGINT, "longColumn");
    ColumnInfo booleanColumn = new ColumnInfo(1, Types.BOOLEAN, "booleanColumn");
    ColumnInfo doubleColumn = new ColumnInfo(1, Types.DOUBLE, "doubleColumn");
    ColumnInfo charColumn = new ColumnInfo(1, Types.CHAR, "charColumn");
    ColumnInfo timeColumn = new ColumnInfo(1, Types.TIME, "timeColumn");
    ColumnInfo timeStampColumn = new ColumnInfo(1, Types.TIMESTAMP, "timeStampColumn");
    ColumnInfo floatColumn = new ColumnInfo(1, Types.FLOAT, "floatColumn");

    ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();
    columns.add(intColumn);//from  w  w  w  .  ja v  a  2  s.  c  o m
    columns.add(stringColumn);
    columns.add(dateColumn);
    columns.add(longColumn);
    columns.add(booleanColumn);
    columns.add(doubleColumn);
    columns.add(charColumn);
    columns.add(timeColumn);
    columns.add(timeStampColumn);
    columns.add(floatColumn);

    ArrayList<Comparable> values = new ArrayList<Comparable>();
    values.add(new Integer(12));
    values.add(new String("sam"));
    values.add(new Date());
    values.add(new Long(26564l));
    values.add(true);
    values.add(1.235);
    values.add('a');
    values.add(new Time(new Date().getTime()));
    values.add(new Time(new Date().getTime()));
    values.add(new Float(1.0f));

    GenericDBWritable val = new GenericDBWritable(columns, values);
    LongWritable key = new LongWritable(1);
    mapper.map(key, val, context);

    Text outkey = new Text();
    Text outval = new Text();
    StringBuilder builder = new StringBuilder();
    builder.append(new Integer(12) + delimiter + new String("sam") + delimiter + new Date() + delimiter
            + new Long(26564l) + delimiter + true + delimiter + 1.235 + delimiter + 'a' + delimiter
            + new Time(new Date().getTime()) + delimiter + new Time(new Date().getTime()) + delimiter
            + new Float(1.0f));

    outval.set(builder.toString());
    verify(context).write(outkey, outval);
}

From source file:co.nubetech.hiho.mapreduce.TestDBInputDelimMapper.java

License:Apache License

@Test
public final void testMapperNullValues() throws IOException, InterruptedException {
    Mapper.Context context = mock(Mapper.Context.class);
    Configuration conf = new Configuration();
    conf.set(HIHOConf.INPUT_OUTPUT_DELIMITER, ",");
    when(context.getConfiguration()).thenReturn(conf);

    DBInputDelimMapper mapper = new DBInputDelimMapper();
    ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();
    ArrayList values = new ArrayList();
    GenericDBWritable val = new GenericDBWritable(columns, values);
    LongWritable key = new LongWritable(1);
    mapper.map(key, val, context);

    Text outkey = new Text();
    Text outval = new Text();
    verify(context).write(outkey, outval);
}

From source file:co.nubetech.hiho.mapreduce.TestGenericDBLoadDataMapper.java

License:Apache License

@Test
public final void testMapperWithValidValues() throws Exception {

    Mapper.Context context = mock(Mapper.Context.class);
    GenericDBLoadDataMapper mapper = new GenericDBLoadDataMapper();

    mapper.setDelimiter(",");
    ArrayList<ColumnInfo> tableInfo = new ArrayList<ColumnInfo>();

    ColumnInfo columnInfo1 = new ColumnInfo();
    columnInfo1.setIndex(0);//www .j a  v  a  2  s.  c  o  m
    columnInfo1.setName("id");
    columnInfo1.setType(Types.BIGINT);

    ColumnInfo columnInfo2 = new ColumnInfo();
    columnInfo2.setIndex(1);
    columnInfo2.setName("name");
    columnInfo2.setType(Types.VARCHAR);

    ColumnInfo columnInfo3 = new ColumnInfo();
    columnInfo3.setIndex(2);
    columnInfo3.setName("isValid");
    columnInfo3.setType(Types.BOOLEAN);

    /*ColumnInfo columnInfo4 = new ColumnInfo();
    columnInfo4.setIndex(3);
    columnInfo4.setName("date");
    columnInfo4.setType(Types.DATE);*/

    ColumnInfo columnInfo5 = new ColumnInfo();
    columnInfo5.setIndex(4);
    columnInfo5.setName("percent");
    columnInfo5.setType(Types.DOUBLE);

    tableInfo.add(columnInfo1);
    tableInfo.add(columnInfo2);
    tableInfo.add(columnInfo3);
    //tableInfo.add(columnInfo4);
    tableInfo.add(columnInfo5);

    mapper.setTableInfo(tableInfo);

    mapper.map(new LongWritable(0l), new Text("1,Sam,true,84.0"), context);

    ArrayList values = new ArrayList();
    values.add(1l);
    values.add("Sam");
    values.add(true);
    values.add(84.0);
    GenericDBWritable gdw = new GenericDBWritable(tableInfo, values);
    verify(context).write(gdw, null);

}

From source file:co.nubetech.hiho.mapreduce.TestGenericDBLoadDataMapper.java

License:Apache License

@Test
public final void testMapperWithNullValues() throws Exception {

    Mapper.Context context = mock(Mapper.Context.class);
    GenericDBLoadDataMapper mapper = new GenericDBLoadDataMapper();

    mapper.setDelimiter(",");
    ArrayList<ColumnInfo> tableInfo = new ArrayList<ColumnInfo>();

    ColumnInfo columnInfo1 = new ColumnInfo();
    columnInfo1.setIndex(0);//  w w  w  .j a  va 2s  . c  o  m
    columnInfo1.setName("id");
    columnInfo1.setType(Types.BIGINT);

    ColumnInfo columnInfo2 = new ColumnInfo();
    columnInfo2.setIndex(1);
    columnInfo2.setName("name");
    columnInfo2.setType(Types.VARCHAR);

    ColumnInfo columnInfo3 = new ColumnInfo();
    columnInfo3.setIndex(2);
    columnInfo3.setName("isValid");
    columnInfo3.setType(Types.BOOLEAN);

    /*ColumnInfo columnInfo4 = new ColumnInfo();
    columnInfo4.setIndex(3);
    columnInfo4.setName("date");
    columnInfo4.setType(Types.DATE);*/

    ColumnInfo columnInfo5 = new ColumnInfo();
    columnInfo5.setIndex(4);
    columnInfo5.setName("percent");
    columnInfo5.setType(Types.DOUBLE);

    tableInfo.add(columnInfo1);
    tableInfo.add(columnInfo2);
    tableInfo.add(columnInfo3);
    //tableInfo.add(columnInfo4);
    tableInfo.add(columnInfo5);

    mapper.setTableInfo(tableInfo);

    mapper.map(new LongWritable(0l), new Text("1, ,true,84.0"), context);

    ArrayList values = new ArrayList();
    values.add(1l);
    values.add(null);
    values.add(true);
    values.add(84.0);
    GenericDBWritable gdw = new GenericDBWritable(tableInfo, values);
    verify(context).write(gdw, null);

}

From source file:co.nubetech.hiho.mapreduce.TestGenericDBLoadDataMapper.java

License:Apache License

@Test(expected = IOException.class)
public final void testMapperWithUnequalLengthOfColumnInFileAndTable() throws Exception {

    Mapper.Context context = mock(Mapper.Context.class);
    GenericDBLoadDataMapper mapper = new GenericDBLoadDataMapper();

    mapper.setDelimiter(",");
    ArrayList<ColumnInfo> tableInfo = new ArrayList<ColumnInfo>();

    ColumnInfo columnInfo1 = new ColumnInfo();
    columnInfo1.setIndex(0);// w  w w .  java 2  s .c  o m
    columnInfo1.setName("id");
    columnInfo1.setType(Types.BIGINT);

    ColumnInfo columnInfo2 = new ColumnInfo();
    columnInfo2.setIndex(1);
    columnInfo2.setName("name");
    columnInfo2.setType(Types.VARCHAR);

    ColumnInfo columnInfo3 = new ColumnInfo();
    columnInfo3.setIndex(2);
    columnInfo3.setName("isValid");
    columnInfo3.setType(Types.BOOLEAN);

    /*ColumnInfo columnInfo4 = new ColumnInfo();
    columnInfo4.setIndex(3);
    columnInfo4.setName("date");
    columnInfo4.setType(Types.DATE);*/

    ColumnInfo columnInfo5 = new ColumnInfo();
    columnInfo5.setIndex(4);
    columnInfo5.setName("percent");
    columnInfo5.setType(Types.DOUBLE);

    tableInfo.add(columnInfo1);
    tableInfo.add(columnInfo2);
    tableInfo.add(columnInfo3);
    //tableInfo.add(columnInfo4);
    tableInfo.add(columnInfo5);

    mapper.setTableInfo(tableInfo);

    mapper.map(new LongWritable(0l), new Text("1,Sam,true,84.0,42"), context);
}

From source file:co.nubetech.hiho.merge.TestMergeJob.java

License:Apache License

@Test
public void testMergeByCustomObjectKeyWithSequenceFileInputFormat() throws Exception {
    Student student1 = setStudent(new Text("Sam"), new Text("US"), new IntWritable(1),
            new LongWritable(9999999998l), new DoubleWritable(99.12));
    Student student2 = setStudent(new Text("John"), new Text("AUS"), new IntWritable(2),
            new LongWritable(9999999999l), new DoubleWritable(90.12));
    Student student3 = setStudent(new Text("Mary"), new Text("UK"), new IntWritable(3),
            new LongWritable(9999999988l), new DoubleWritable(69.12));
    Student student4 = setStudent(new Text("Kelvin"), new Text("UK"), new IntWritable(4),
            new LongWritable(9999998888l), new DoubleWritable(59.12));

    HashMap<Student, Text> inputData1 = new HashMap<Student, Text>();
    inputData1.put(student1, new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(student2, new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(student3, new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");

    HashMap<Student, Text> inputData2 = new HashMap<Student, Text>();
    inputData2.put(student2, new Text("Austin Farley,4794 Donec Ave,1-230-823-8164,13508"));
    inputData2.put(student3, new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(student4, new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");

    String[] args = new String[] { "-newPath", "/input1", "-oldPath", "/input2", "-mergeBy", "key",
            "-outputPath", "output", "-inputFormat",
            "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat", "-inputKeyClassName",
            "co.nubetech.hiho.testdata.Student", "-inputValueClassName", "org.apache.hadoop.io.Text" };
    MergeJob job = runMergeJobs(args);//from   w  ww .j a v  a2  s.c  o m
    assertEquals(3, job.getTotalRecordsNew());
    assertEquals(3, job.getTotalRecordsOld());
    assertEquals(0, job.getBadRecords());
    assertEquals(4, job.getOutput());

    FileSystem outputFS = getFileSystem();
    Path outputPath = new Path(outputFS.getHomeDirectory(), "output/part-r-00000");
    Configuration conf = new Configuration();
    SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf);
    Writable writableKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable writableValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    List<Student> expectedOutput = new ArrayList<Student>();
    expectedOutput.add(student1);
    expectedOutput.add(student2);
    expectedOutput.add(student3);
    expectedOutput.add(student4);
    int count = 0;
    while (reader.next(writableKey, writableValue)) {
        logger.debug("key and value is: " + writableKey + ", " + writableValue);
        assertTrue("Matched output " + writableKey, expectedOutput.contains(writableKey));
        count++;
    }
    IOUtils.closeStream(reader);
    assertEquals(4, count);
}

From source file:co.nubetech.hiho.merge.TestMergeJob.java

License:Apache License

@Test
public void testMergeByLongWritableKeyWithSequenceFileInputFormat() throws Exception {
    HashMap<LongWritable, Text> inputData1 = new HashMap<LongWritable, Text>();
    inputData1.put(new LongWritable(1), new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(new LongWritable(2), new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(new LongWritable(3), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");

    HashMap<LongWritable, Text> inputData2 = new HashMap<LongWritable, Text>();
    inputData2.put(new LongWritable(1), new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(new LongWritable(2), new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    inputData2.put(new LongWritable(4), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");

    String[] args = new String[] { "-newPath", "/input1", "-oldPath", "/input2", "-mergeBy", "key",
            "-outputPath", "output", "-inputFormat",
            "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat", "-inputKeyClassName",
            "org.apache.hadoop.io.LongWritable", "-inputValueClassName", "org.apache.hadoop.io.Text" };
    MergeJob job = runMergeJobs(args);/*from  w  w  w. j  av  a 2 s  . c  o  m*/
    assertEquals(3, job.getTotalRecordsNew());
    assertEquals(3, job.getTotalRecordsOld());
    assertEquals(0, job.getBadRecords());
    assertEquals(4, job.getOutput());

    FileSystem outputFS = getFileSystem();
    Path outputPath = new Path(outputFS.getHomeDirectory(), "output/part-r-00000");
    Configuration conf = new Configuration();
    SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf);
    Writable writableKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable writableValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    List<LongWritable> expectedOutput = new ArrayList<LongWritable>();
    expectedOutput.add(new LongWritable(1));
    expectedOutput.add(new LongWritable(2));
    expectedOutput.add(new LongWritable(3));
    expectedOutput.add(new LongWritable(4));
    int count = 0;
    while (reader.next(writableKey, writableValue)) {
        logger.debug("key and value is: " + writableKey + ", " + writableValue);
        assertTrue("Matched output " + writableKey, expectedOutput.contains(writableKey));
        count++;
    }
    IOUtils.closeStream(reader);
    assertEquals(4, count);

}

From source file:co.nubetech.hiho.merge.TestMergeKeyReducer.java

License:Apache License

@Test
public void testReducerForLongWritableKey() throws IOException, InterruptedException {
    LongWritable key = new LongWritable(Long.parseLong("123"));
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);//from   w ww. jav  a  2 s. c  o  m

    HihoValue hihoValue1 = new HihoValue();
    HihoValue hihoValue2 = new HihoValue();
    Text value1 = new Text("value1");
    Text value2 = new Text("value2");
    hihoValue1.setVal(value1);
    hihoValue2.setVal(value2);
    hihoValue1.setIsOld(true);
    hihoValue2.setIsOld(false);
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();
    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeKeyReducer mergeReducer = new MergeKeyReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(key, value2);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT).getValue());
}