Example usage for org.apache.hadoop.io IntWritable get

List of usage examples for org.apache.hadoop.io IntWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io IntWritable get.

Prototype

public int get() 

Source Link

Document

Return the value of this IntWritable.

Usage

From source file:com.digitalpebble.behemoth.BehemothDocument.java

License:Apache License

public void readAnnotationFields(Annotation annot, DataInput in, List<String> types) throws IOException {
    IntWritable posType = new IntWritable();
    posType.readFields(in);/*  w  ww  . j a v a 2  s  .  com*/
    annot.setType(types.get(posType.get()));
    annot.setStart(WritableUtils.readVLong(in));
    annot.setEnd(WritableUtils.readVLong(in));
    HashMap<String, String> features = null;
    int numFeatures = in.readInt();
    if (numFeatures > 0)
        features = new HashMap<String, String>(numFeatures);
    for (int i = 0; i < numFeatures; i++) {
        posType.readFields(in);
        String fname = types.get(posType.get());
        String fvalue = WritableUtils.readString(in);
        features.put(fname, fvalue);
    }
    annot.setFeatures(features);
}

From source file:com.elex.dmp.lda.CVB0Driver.java

License:Apache License

private static int getNumTerms(Configuration conf, Path dictionaryPath) throws IOException {
    FileSystem fs = dictionaryPath.getFileSystem(conf);
    Text key = new Text();
    IntWritable value = new IntWritable();
    int maxTermId = -1;
    for (FileStatus stat : fs.globStatus(dictionaryPath)) {
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, stat.getPath(), conf);
        while (reader.next(key, value)) {
            maxTermId = Math.max(maxTermId, value.get());
        }/*ww w . j ava 2  s  . c om*/
    }
    return maxTermId + 1;
}

From source file:com.ema.hadoop.bestclient.BCReducer.java

@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
    int sum = 0;//from   www. j  a v  a  2  s .  c om
    for (IntWritable v : values) {
        sum += v.get();
    }
    context.write(new Text(key), new IntWritable(sum));
}

From source file:com.facebook.hive.orc.lazy.OrcLazyIntObjectInspector.java

License:Open Source License

@Override
public Object getPrimitiveJavaObject(Object o) {
    IntWritable writable = (IntWritable) getPrimitiveWritableObject(o);
    return writable == null ? null : Integer.valueOf(writable.get());
}

From source file:com.facebook.hive.orc.TestOrcFile.java

License:Apache License

private void compareRowsWithoutNextIsNull(OrcStruct row, RandomRowInputs inputs, int rowNumber,
        NumberOfNulls numNulls, boolean usingPrimitives) throws Exception {

    ReallyBigRow expected = null;/*from w  w  w . j a  v  a 2  s  .  c  o  m*/
    switch (numNulls) {
    case MANY:
    case SOME:
        expected = createRandomRowWithNulls(inputs.intValues, inputs.doubleValues, inputs.stringValues,
                inputs.byteValues, inputs.words, rowNumber, numNulls);
        break;
    case NONE:
        expected = createRandomRow(inputs.intValues, inputs.doubleValues, inputs.stringValues,
                inputs.byteValues, inputs.words, rowNumber);
        break;
    }

    OrcLazyBoolean lazyBoolean1 = (OrcLazyBoolean) row.getFieldValue(0);
    BooleanWritable boolean1 = (BooleanWritable) lazyBoolean1.materialize();
    if (boolean1 == null) {
        assertNull(expected.boolean1);
    } else {
        assertEquals(expected.boolean1.booleanValue(), boolean1.get());
    }

    ByteWritable byte1 = (ByteWritable) ((OrcLazyByte) row.getFieldValue(1)).materialize();
    if (byte1 == null) {
        assertNull(expected.byte1);
    } else {
        assertEquals(expected.byte1.byteValue(), byte1.get());
    }

    OrcLazyShort lazyShort1 = (OrcLazyShort) row.getFieldValue(2);
    ShortWritable short1 = (ShortWritable) lazyShort1.materialize();
    if (short1 == null) {
        assertNull(expected.short1);
    } else {
        assertEquals(expected.short1.shortValue(), short1.get());
    }

    OrcLazyInt lazyInt1 = (OrcLazyInt) row.getFieldValue(3);
    IntWritable int1 = (IntWritable) lazyInt1.materialize();
    if (int1 == null) {
        assertNull(expected.int1);
    } else {
        assertEquals(expected.int1.intValue(), int1.get());
    }

    OrcLazyLong lazyLong1 = (OrcLazyLong) row.getFieldValue(4);
    LongWritable long1 = (LongWritable) lazyLong1.materialize();
    if (long1 == null) {
        assertNull(expected.long1);
    } else {
        assertEquals(expected.long1.longValue(), long1.get());
    }

    OrcLazyShort lazyShort2 = (OrcLazyShort) row.getFieldValue(5);
    ShortWritable short2 = (ShortWritable) lazyShort2.materialize();
    if (short2 == null) {
        assertNull(expected.short2);
    } else {
        assertEquals(expected.short2.shortValue(), short2.get());
    }

    OrcLazyInt lazyInt2 = (OrcLazyInt) row.getFieldValue(6);
    IntWritable int2 = (IntWritable) lazyInt2.materialize();
    if (int2 == null) {
        assertNull(expected.int2);
    } else {
        assertEquals(expected.int2.intValue(), int2.get());
    }

    OrcLazyLong lazyLong2 = (OrcLazyLong) row.getFieldValue(7);
    LongWritable long2 = (LongWritable) lazyLong2.materialize();
    if (long2 == null) {
        assertNull(expected.long2);
    } else {
        assertEquals(expected.long2.longValue(), long2.get());
    }

    OrcLazyShort lazyShort3 = (OrcLazyShort) row.getFieldValue(8);
    ShortWritable short3 = (ShortWritable) lazyShort3.materialize();
    if (short3 == null) {
        assertNull(expected.short3);
    } else {
        assertEquals(expected.short3.shortValue(), short3.get());
    }

    OrcLazyInt lazyInt3 = (OrcLazyInt) row.getFieldValue(9);
    IntWritable int3 = (IntWritable) lazyInt3.materialize();
    if (int3 == null) {
        assertNull(expected.int3);
    } else {
        assertEquals(expected.int3.intValue(), int3.get());
    }

    OrcLazyLong lazyLong3 = (OrcLazyLong) row.getFieldValue(10);
    LongWritable long3 = (LongWritable) lazyLong3.materialize();
    if (long3 == null) {
        assertNull(expected.long3);
    } else {
        assertEquals(expected.long3.longValue(), long3.get());
    }

    OrcLazyFloat lazyFloat1 = (OrcLazyFloat) row.getFieldValue(11);
    FloatWritable float1 = (FloatWritable) lazyFloat1.materialize();
    if (float1 == null) {
        assertNull(expected.float1);
    } else {
        assertEquals(expected.float1.floatValue(), float1.get(), 0.0001);
    }

    OrcLazyDouble lazyDouble1 = (OrcLazyDouble) row.getFieldValue(12);
    DoubleWritable double1 = (DoubleWritable) lazyDouble1.materialize();
    if (double1 == null) {
        assertNull(expected.double1);
    } else {
        assertEquals(expected.double1.doubleValue(), double1.get(), 0.0001);
    }

    BytesWritable bytes1 = (BytesWritable) ((OrcLazyBinary) row.getFieldValue(13)).materialize();
    if (bytes1 == null) {
        assertNull(expected.bytes1);
    } else {
        assertEquals(expected.bytes1, bytes1);
    }

    Text string1 = (Text) ((OrcLazyString) row.getFieldValue(14)).materialize();
    if (string1 == null) {
        assertNull(expected.string1);
    } else {
        assertEquals(expected.string1, string1);
    }

    Text string2 = (Text) ((OrcLazyString) row.getFieldValue(15)).materialize();
    if (string2 == null) {
        assertNull(expected.string2);
    } else {
        assertEquals(expected.string2, string2);
    }

    Text string3 = (Text) ((OrcLazyString) row.getFieldValue(16)).materialize();
    if (string3 == null) {
        assertNull(expected.string3);
    } else {
        assertEquals(expected.string3, string3);
    }

    OrcStruct middle = (OrcStruct) ((OrcLazyStruct) row.getFieldValue(17)).materialize();
    if (middle == null) {
        assertNull(expected.middle);
    } else {
        final List<InnerStruct> expectedList = expected.middle.list;
        final List<OrcStruct> actualList = (List) middle.getFieldValue(0);
        compareListOfStructs(expectedList, actualList);
        final List<String> actualFieldNames = middle.getFieldNames();
        final List<String> expectedFieldNames = ImmutableList.of("list");
        compareLists(expectedFieldNames, actualFieldNames);
    }

    List list = (List) ((OrcLazyList) row.getFieldValue(18)).materialize();
    if (list == null) {
        assertNull(expected.list);
    } else {
        compareListOfStructs(expected.list, list);
    }

    Map map = (Map) ((OrcLazyMap) row.getFieldValue(19)).materialize();
    if (map == null) {
        assertNull(expected.map);
    } else {
        compareMap(expected.map, map);
    }

    if (usingPrimitives) {
        compareRowsUsingPrimitives(expected, lazyBoolean1, lazyShort1, lazyInt1, lazyLong1, lazyShort2,
                lazyInt2, lazyLong2, lazyShort3, lazyInt3, lazyLong3, lazyFloat1, lazyDouble1);
    }
}

From source file:com.facebook.hiveio.mapreduce.output.HiveTools.java

License:Apache License

/**
 * Map hive record//from   ww w .  jav  a2  s  .c  om
 *
 * @param conf Configuration
 * @param value data
 * @return hive record
 */
public static HiveWritableRecord mapToHiveRecord(Configuration conf, MapWritable value) {
    HiveTableSchema schema = HiveTableSchemas.lookup(conf, getHiveTableName());
    HiveWritableRecord record = HiveRecordFactory.newWritableRecord(schema);
    for (Map.Entry<Writable, Writable> entry : value.entrySet()) {
        IntWritable intKey = (IntWritable) entry.getKey();
        LongWritable longValue = (LongWritable) entry.getValue();
        record.set(intKey.get(), longValue.get());
    }
    return record;
}

From source file:com.facebook.presto.hive.DwrfHiveRecordCursor.java

License:Apache License

private void parseLongColumn(int column) {
    // don't include column number in message because it causes boxing which is expensive here
    checkArgument(!isPartitionColumn[column], "Column is a partition key");

    loaded[column] = true;/*from ww  w  .j  a  va2 s.  c  o  m*/
    Object object = getMaterializedValue(column);
    if (object == null) {
        nulls[column] = true;
    } else {
        nulls[column] = false;

        HiveType type = hiveTypes[column];
        if (hiveTypes[column].equals(HIVE_SHORT)) {
            ShortWritable shortWritable = checkWritable(object, ShortWritable.class);
            longs[column] = shortWritable.get();
        } else if (hiveTypes[column].equals(HIVE_TIMESTAMP)) {
            TimestampWritable timestampWritable = (TimestampWritable) object;
            long seconds = timestampWritable.getSeconds();
            int nanos = timestampWritable.getNanos();
            longs[column] = (seconds * 1000) + (nanos / 1_000_000) + timeZoneCorrection;
        } else if (hiveTypes[column].equals(HIVE_BYTE)) {
            ByteWritable byteWritable = checkWritable(object, ByteWritable.class);
            longs[column] = byteWritable.get();
        } else if (hiveTypes[column].equals(HIVE_INT)) {
            IntWritable intWritable = checkWritable(object, IntWritable.class);
            longs[column] = intWritable.get();
        } else if (hiveTypes[column].equals(HIVE_LONG)) {
            LongWritable longWritable = checkWritable(object, LongWritable.class);
            longs[column] = longWritable.get();
        } else {
            throw new RuntimeException(String.format("%s is not a valid LONG type", type));
        }
    }
}

From source file:com.facebook.presto.hive.orc.OrcHiveRecordCursor.java

License:Apache License

private void parseLongColumn(int column) {
    // don't include column number in message because it causes boxing which is expensive here
    checkArgument(!isPartitionColumn[column], "Column is a partition key");

    loaded[column] = true;//from w  ww . j  ava  2 s  . c o  m
    Object object = getFieldValue(row, hiveColumnIndexes[column]);
    if (object == null) {
        nulls[column] = true;
    } else {
        nulls[column] = false;

        HiveType type = hiveTypes[column];
        if (hiveTypes[column].equals(HIVE_SHORT)) {
            ShortWritable shortWritable = (ShortWritable) object;
            longs[column] = shortWritable.get();
        } else if (hiveTypes[column].equals(HIVE_DATE)) {
            longs[column] = ((DateWritable) object).getDays();
        } else if (hiveTypes[column].equals(HIVE_TIMESTAMP)) {
            TimestampWritable timestampWritable = (TimestampWritable) object;
            long seconds = timestampWritable.getSeconds();
            int nanos = timestampWritable.getNanos();
            longs[column] = (seconds * 1000) + (nanos / 1_000_000) + timeZoneCorrection;
        } else if (hiveTypes[column].equals(HIVE_BYTE)) {
            ByteWritable byteWritable = (ByteWritable) object;
            longs[column] = byteWritable.get();
        } else if (hiveTypes[column].equals(HIVE_INT)) {
            IntWritable intWritable = (IntWritable) object;
            longs[column] = intWritable.get();
        } else if (hiveTypes[column].equals(HIVE_LONG)) {
            LongWritable longWritable = (LongWritable) object;
            longs[column] = longWritable.get();
        } else {
            throw new RuntimeException(String.format("%s is not a valid LONG type", type));
        }
    }
}

From source file:com.facebook.presto.hive.OrcHiveRecordCursor.java

License:Apache License

private void parseLongColumn(int column) {
    // don't include column number in message because it causes boxing which is expensive here
    checkArgument(!isPartitionColumn[column], "Column is a partition key");

    loaded[column] = true;//from ww w  . j ava2s  . c o m
    Object object = getFieldValue(row, hiveColumnIndexes[column]);
    if (object == null) {
        nulls[column] = true;
    } else {
        nulls[column] = false;

        HiveType type = hiveTypes[column];
        if (hiveTypes[column].equals(HIVE_SHORT)) {
            ShortWritable shortWritable = (ShortWritable) object;
            longs[column] = shortWritable.get();
        } else if (hiveTypes[column].equals(HIVE_DATE)) {
            longs[column] = ((DateWritable) object).getDays() * MILLIS_IN_DAY;
        } else if (hiveTypes[column].equals(HIVE_TIMESTAMP)) {
            TimestampWritable timestampWritable = (TimestampWritable) object;
            long seconds = timestampWritable.getSeconds();
            int nanos = timestampWritable.getNanos();
            longs[column] = (seconds * 1000) + (nanos / 1_000_000) + timeZoneCorrection;
        } else if (hiveTypes[column].equals(HIVE_BYTE)) {
            ByteWritable byteWritable = (ByteWritable) object;
            longs[column] = byteWritable.get();
        } else if (hiveTypes[column].equals(HIVE_INT)) {
            IntWritable intWritable = (IntWritable) object;
            longs[column] = intWritable.get();
        } else if (hiveTypes[column].equals(HIVE_LONG)) {
            LongWritable longWritable = (LongWritable) object;
            longs[column] = longWritable.get();
        } else {
            throw new RuntimeException(String.format("%s is not a valid LONG type", type));
        }
    }
}

From source file:com.github.ygf.pagerank.InLinksReducer.java

License:Apache License

@Override
protected void reduce(IntWritable inKey, Iterable<IntWritable> inValues, Context context)
        throws IOException, InterruptedException {

    int sum = 0;/*from   w  ww.  j a  v  a  2  s  .  co  m*/

    for (IntWritable inValue : inValues) {
        sum += inValue.get();
    }

    context.write(inKey, new IntWritable(sum));
}