Example usage for org.apache.hadoop.io ShortWritable ShortWritable

List of usage examples for org.apache.hadoop.io ShortWritable ShortWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io ShortWritable ShortWritable.

Prototype

public ShortWritable(short value) 

Source Link

Usage

From source file:com.axiomine.largecollections.kryo.serializers.ShortWritableSerializer.java

License:Apache License

public ShortWritable read(Kryo kryo, Input input, Class<ShortWritable> type) {
    return new ShortWritable(input.readShort());
}

From source file:com.github.ygf.pagerank.PageRankIterationMapper.java

License:Apache License

@Override
public void map(ShortArrayWritable inKey, MatrixBlockWritable inValue, Context context)
        throws IOException, InterruptedException {

    // This task gets each block M_{i,j}, loads the corresponding stripe j
    // of the vector v_{k-1} and produces the partial result of the stripe i
    // of the vector v_k.

    Configuration conf = context.getConfiguration();
    int iter = Integer.parseInt(conf.get("pagerank.iteration"));
    int numPages = Integer.parseInt(conf.get("pagerank.num_pages"));
    short blockSize = Short.parseShort(conf.get("pagerank.block_size"));

    Writable[] blockIndexes = inKey.get();
    short i = ((ShortWritable) blockIndexes[0]).get();
    short j = ((ShortWritable) blockIndexes[1]).get();

    int vjSize = (j > numPages / blockSize) ? (numPages % blockSize) : blockSize;
    FloatWritable[] vj = new FloatWritable[vjSize];

    if (iter == 1) {
        // Initial PageRank vector with 1/n for all pages.
        for (int k = 0; k < vj.length; k++) {
            vj[k] = new FloatWritable(1.0f / numPages);
        }/*from   w w  w .java2  s . c o m*/
    } else {
        // Load the stripe j of the vector v_{k-1} from the MapFiles.
        Path outputDir = MapFileOutputFormat.getOutputPath(context).getParent();
        Path vjDir = new Path(outputDir, "v" + (iter - 1));
        MapFile.Reader[] readers = MapFileOutputFormat.getReaders(vjDir, conf);
        Partitioner<ShortWritable, FloatArrayWritable> partitioner = new HashPartitioner<ShortWritable, FloatArrayWritable>();
        ShortWritable key = new ShortWritable(j);
        FloatArrayWritable value = new FloatArrayWritable();
        MapFileOutputFormat.getEntry(readers, partitioner, key, value);
        Writable[] writables = value.get();
        for (int k = 0; k < vj.length; k++) {
            vj[k] = (FloatWritable) writables[k];
        }
        for (MapFile.Reader reader : readers) {
            reader.close();
        }
    }

    // Initialize the partial result i of the vector v_k.
    int viSize = (i > numPages / blockSize) ? (numPages % blockSize) : blockSize;
    FloatWritable[] vi = new FloatWritable[viSize];
    for (int k = 0; k < vi.length; k++) {
        vi[k] = new FloatWritable(0);
    }

    // Multiply M_{i,j} by the stripe j of the vector v_{k-1} to obtain the
    // partial result i of the vector v_k.
    Writable[][] blockColumns = inValue.get();
    for (int k = 0; k < blockColumns.length; k++) {
        Writable[] blockColumn = blockColumns[k];
        if (blockColumn.length > 0) {
            int vDegree = ((ShortWritable) blockColumn[0]).get();
            for (int columnIndex = 1; columnIndex < blockColumn.length; columnIndex++) {
                int l = ((ShortWritable) blockColumn[columnIndex]).get();
                vi[l].set(vi[l].get() + (1.0f / vDegree) * vj[k].get());
            }
        }
    }

    context.write(new ShortWritable(i), new FloatArrayWritable(vi));
}

From source file:hydrograph.engine.cascading.scheme.hive.parquet.ParquetWritableUtils.java

License:Apache License

private static Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
        throws SerDeException {
    if (obj == null) {
        return null;
    }//from w  w  w .ja  va  2  s .  c om

    switch (inspector.getPrimitiveCategory()) {
    case VOID:
        return null;
    case BOOLEAN:
        return new BooleanWritable(
                ((BooleanObjectInspector) inspector).get(new BooleanWritable((boolean) obj)));
    case BYTE:
        return new ByteWritable(((ByteObjectInspector) inspector).get(new ByteWritable((byte) obj)));
    case DOUBLE:
        return new DoubleWritable(((DoubleObjectInspector) inspector).get(new DoubleWritable((double) obj)));
    case FLOAT:
        return new FloatWritable(((FloatObjectInspector) inspector).get(new FloatWritable((float) obj)));
    case INT:
        return new IntWritable(((IntObjectInspector) inspector).get(new IntWritable((int) obj)));
    case LONG:
        return new LongWritable(((LongObjectInspector) inspector).get(new LongWritable((long) obj)));
    case SHORT:
        return new ShortWritable(((ShortObjectInspector) inspector).get(new ShortWritable((short) obj)));
    case STRING:
        String v;
        if (obj instanceof Long) {
            SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");
            Date date = new Date((long) obj);
            v = df.format(date);
        } else if (obj instanceof BigDecimal) {
            BigDecimal bigDecimalObj = (BigDecimal) obj;
            v = bigDecimalObj.toString();
        } else {
            v = ((StringObjectInspector) inspector).getPrimitiveJavaObject(obj);
        }
        try {
            return new BytesWritable(v.getBytes("UTF-8"));
        } catch (UnsupportedEncodingException e) {
            throw new SerDeException("Failed to encode string in UTF-8", e);
        }
    case DECIMAL:
        HiveDecimal hd;
        if (obj instanceof Double) {
            hd = HiveDecimal.create(new BigDecimal((Double) obj));
        } else if (obj instanceof BigDecimal) {
            hd = HiveDecimal.create((BigDecimal) obj);
        } else {
            // if "obj" is other than Double or BigDecimal and a vaild
            // number, .toString, will get its correct number representation
            // and a BigDecimal object will be created
            hd = HiveDecimal.create(new BigDecimal(obj.toString()));
        }
        return new HiveDecimalWritable(hd);
    case TIMESTAMP:
        return new TimestampWritable(((TimestampObjectInspector) inspector)
                .getPrimitiveJavaObject(new TimestampWritable(new Timestamp((long) obj))));
    case DATE:
        return new DateWritable(((DateObjectInspector) inspector)
                .getPrimitiveJavaObject(new DateWritable(new Date((long) obj))));
    case CHAR:
        String strippedValue = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(obj)
                .getStrippedValue();
        return new BytesWritable(Binary.fromString(strippedValue).getBytes());
    case VARCHAR:
        String value = ((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(obj).getValue();
        return new BytesWritable(Binary.fromString(value).getBytes());
    default:
        throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
    }
}

From source file:org.apache.orc.mapred.TestOrcOutputFormat.java

License:Apache License

@Test
public void testAllTypes() throws Exception {
    conf.set("mapreduce.task.attempt.id", "attempt_20160101_0001_m_000001_0");
    conf.setOutputCommitter(NullOutputCommitter.class);
    final String typeStr = "struct<b1:binary,b2:boolean,b3:tinyint,"
            + "c:char(10),d1:date,d2:decimal(20,5),d3:double,fff:float,int:int,"
            + "l:array<bigint>,map:map<smallint,string>,"
            + "str:struct<u:uniontype<timestamp,varchar(100)>>,ts:timestamp>";
    OrcConf.MAPRED_OUTPUT_SCHEMA.setString(conf, typeStr);
    FileOutputFormat.setOutputPath(conf, workDir);
    TypeDescription type = TypeDescription.fromString(typeStr);

    // build a row object
    OrcStruct row = (OrcStruct) OrcStruct.createValue(type);
    ((BytesWritable) row.getFieldValue(0)).set(new byte[] { 1, 2, 3, 4 }, 0, 4);
    ((BooleanWritable) row.getFieldValue(1)).set(true);
    ((ByteWritable) row.getFieldValue(2)).set((byte) 23);
    ((Text) row.getFieldValue(3)).set("aaabbbcccddd");
    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
    ((DateWritable) row.getFieldValue(4)).set(DateWritable.millisToDays(format.parse("2016-04-01").getTime()));
    ((HiveDecimalWritable) row.getFieldValue(5)).set(new HiveDecimalWritable("1.23"));
    ((DoubleWritable) row.getFieldValue(6)).set(1.5);
    ((FloatWritable) row.getFieldValue(7)).set(4.5f);
    ((IntWritable) row.getFieldValue(8)).set(31415);
    OrcList<LongWritable> longList = (OrcList<LongWritable>) row.getFieldValue(9);
    longList.add(new LongWritable(123));
    longList.add(new LongWritable(456));
    OrcMap<ShortWritable, Text> map = (OrcMap<ShortWritable, Text>) row.getFieldValue(10);
    map.put(new ShortWritable((short) 1000), new Text("aaaa"));
    map.put(new ShortWritable((short) 123), new Text("bbbb"));
    OrcStruct struct = (OrcStruct) row.getFieldValue(11);
    OrcUnion union = (OrcUnion) struct.getFieldValue(0);
    union.set((byte) 1, new Text("abcde"));
    ((OrcTimestamp) row.getFieldValue(12)).set("1996-12-11 15:00:00");
    NullWritable nada = NullWritable.get();
    RecordWriter<NullWritable, OrcStruct> writer = new OrcOutputFormat<OrcStruct>().getRecordWriter(fs, conf,
            "all.orc", Reporter.NULL);
    for (int r = 0; r < 10; ++r) {
        row.setFieldValue(8, new IntWritable(r * 10));
        writer.write(nada, row);/*  ww w .  jav  a2 s. c  o m*/
    }
    union.set((byte) 0, new OrcTimestamp("2011-12-25 12:34:56"));
    for (int r = 0; r < 10; ++r) {
        row.setFieldValue(8, new IntWritable(r * 10 + 100));
        writer.write(nada, row);
    }
    OrcStruct row2 = new OrcStruct(type);
    writer.write(nada, row2);
    row.setFieldValue(8, new IntWritable(210));
    writer.write(nada, row);
    writer.close(Reporter.NULL);

    FileSplit split = new FileSplit(new Path(workDir, "all.orc"), 0, 100000, new String[0]);
    RecordReader<NullWritable, OrcStruct> reader = new OrcInputFormat<OrcStruct>().getRecordReader(split, conf,
            Reporter.NULL);
    nada = reader.createKey();
    row = reader.createValue();
    for (int r = 0; r < 22; ++r) {
        assertEquals(true, reader.next(nada, row));
        if (r == 20) {
            for (int c = 0; c < 12; ++c) {
                assertEquals(null, row.getFieldValue(c));
            }
        } else {
            assertEquals(new BytesWritable(new byte[] { 1, 2, 3, 4 }), row.getFieldValue(0));
            assertEquals(new BooleanWritable(true), row.getFieldValue(1));
            assertEquals(new ByteWritable((byte) 23), row.getFieldValue(2));
            assertEquals(new Text("aaabbbcccd"), row.getFieldValue(3));
            assertEquals(new DateWritable(DateWritable.millisToDays(format.parse("2016-04-01").getTime())),
                    row.getFieldValue(4));
            assertEquals(new HiveDecimalWritable("1.23"), row.getFieldValue(5));
            assertEquals(new DoubleWritable(1.5), row.getFieldValue(6));
            assertEquals(new FloatWritable(4.5f), row.getFieldValue(7));
            assertEquals(new IntWritable(r * 10), row.getFieldValue(8));
            assertEquals(longList, row.getFieldValue(9));
            assertEquals(map, row.getFieldValue(10));
            if (r < 10) {
                union.set((byte) 1, new Text("abcde"));
            } else {
                union.set((byte) 0, new OrcTimestamp("2011-12-25 12:34:56"));
            }
            assertEquals("row " + r, struct, row.getFieldValue(11));
            assertEquals("row " + r, new OrcTimestamp("1996-12-11 15:00:00"), row.getFieldValue(12));
        }
    }
    assertEquals(false, reader.next(nada, row));
}

From source file:org.apache.tajo.plan.util.WritableTypeConverter.java

License:Apache License

public static Writable convertDatum2Writable(Datum value) {
    switch (value.kind()) {
    case INT1://w  w  w  .  j ava  2  s .c  o  m
        return new ByteWritable(value.asByte());
    case INT2:
        return new ShortWritable(value.asInt2());
    case INT4:
        return new IntWritable(value.asInt4());
    case INT8:
        return new LongWritable(value.asInt8());

    case FLOAT4:
        return new FloatWritable(value.asFloat4());
    case FLOAT8:
        return new DoubleWritable(value.asFloat8());

    // NOTE: value should be DateDatum
    case DATE:
        return new DateWritable(value.asInt4() - DateTimeConstants.UNIX_EPOCH_JDATE);

    // NOTE: value should be TimestampDatum
    case TIMESTAMP:
        TimestampWritable result = new TimestampWritable();
        result.setTime(DateTimeUtil.julianTimeToJavaTime(value.asInt8()));
        return result;

    case CHAR: {
        String str = value.asChars();
        return new HiveCharWritable(new HiveChar(str, str.length()));
    }
    case TEXT:
        return new Text(value.asChars());
    case VARBINARY:
        return new BytesWritable(value.asByteArray());

    case NULL_TYPE:
        return null;
    }

    throw new TajoRuntimeException(new NotImplementedException(TypeStringEncoder.encode(value.type())));
}

From source file:org.shaf.core.util.IOUtils.java

License:Apache License

/**
 * Writes an {@link Object} to the {@link DataOutput}.
 * //  w  w w .j a v a  2s.c  o m
 * @param obj
 *            the object to write.
 * @param out
 *            the data output stream.
 * @throws IOException
 *             if I/O error occurs.
 */
public static final void writeObject(Object obj, DataOutput out) throws IOException {
    try {
        if (obj == null) {
            throw new IOException("Writing object is not defined: null.");
        } else if (ClassUtils.isBoolean(obj)) {
            (new BooleanWritable((boolean) obj)).write(out);
        } else if (ClassUtils.isByte(obj)) {
            (new ByteWritable((byte) obj)).write(out);
        } else if (ClassUtils.isShort(obj)) {
            (new ShortWritable((short) obj)).write(out);
        } else if (ClassUtils.isInteger(obj)) {
            (new IntWritable((int) obj)).write(out);
        } else if (ClassUtils.isLong(obj)) {
            (new LongWritable((long) obj)).write(out);
        } else if (ClassUtils.isFloat(obj)) {
            (new FloatWritable((float) obj)).write(out);
        } else if (ClassUtils.isDouble(obj)) {
            (new DoubleWritable((double) obj)).write(out);
        } else if (ClassUtils.isString(obj)) {
            Text.writeString(out, (String) obj);
        } else if (ClassUtils.isEnum(obj)) {
            (new IntWritable(((Enum<?>) obj).ordinal())).write(out);
        } else if (ClassUtils.isArray(obj)) {
            int length = Array.getLength(obj);
            writeObject(length, out);
            for (int j = 0; j < length; j++) {
                writeObject(Array.get(obj, j), out);
            }
        } else {
            ((Writable) obj).write(out);
        }
    } catch (IllegalArgumentException exc) {
        throw new IOException(exc);
    }
}

From source file:org.shaf.core.util.IOUtilsTest.java

License:Apache License

/**
 * Test reading of {@code short} value.//w w w  . ja  v  a  2s. c o  m
 */
@Test
public void testReadShort() {
    byte[] buf = null;

    short value = 123;
    try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
            DataOutputStream out = new DataOutputStream(baos);) {
        new ShortWritable(value).write(out);
        buf = baos.toByteArray();
    } catch (IOException exc) {
        fail(exc.getMessage());
    }

    try (ByteArrayInputStream bais = new ByteArrayInputStream(buf);
            DataInputStream in = new DataInputStream(bais);) {
        assertEquals(value, (short) IOUtils.readObject(short.class, in));
    } catch (IOException exc) {
        fail(exc.getMessage());
    }
}