Example usage for org.apache.hadoop.io BytesWritable getLength

List of usage examples for org.apache.hadoop.io BytesWritable getLength

Introduction

In this page you can find the example usage for org.apache.hadoop.io BytesWritable getLength.

Prototype

@Override
public int getLength() 

Source Link

Document

Get the current size of the buffer.

Usage

From source file:org.apache.hawq.pxf.plugins.hive.HiveResolver.java

License:Apache License

private void resolvePrimitive(Object o, PrimitiveObjectInspector oi, List<OneField> record, boolean toFlatten)
        throws IOException {
    Object val;
    switch (oi.getPrimitiveCategory()) {
    case BOOLEAN: {
        val = (o != null) ? ((BooleanObjectInspector) oi).get(o) : null;
        addOneFieldToRecord(record, BOOLEAN, val);
        break;/*w  w  w . j  a  va2 s  . co m*/
    }
    case SHORT: {
        val = (o != null) ? ((ShortObjectInspector) oi).get(o) : null;
        addOneFieldToRecord(record, SMALLINT, val);
        break;
    }
    case INT: {
        val = (o != null) ? ((IntObjectInspector) oi).get(o) : null;
        addOneFieldToRecord(record, INTEGER, val);
        break;
    }
    case LONG: {
        val = (o != null) ? ((LongObjectInspector) oi).get(o) : null;
        addOneFieldToRecord(record, BIGINT, val);
        break;
    }
    case FLOAT: {
        val = (o != null) ? ((FloatObjectInspector) oi).get(o) : null;
        addOneFieldToRecord(record, REAL, val);
        break;
    }
    case DOUBLE: {
        val = (o != null) ? ((DoubleObjectInspector) oi).get(o) : null;
        addOneFieldToRecord(record, FLOAT8, val);
        break;
    }
    case DECIMAL: {
        String sVal = null;
        if (o != null) {
            HiveDecimal hd = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o);
            if (hd != null) {
                BigDecimal bd = hd.bigDecimalValue();
                sVal = bd.toString();
            }
        }
        addOneFieldToRecord(record, NUMERIC, sVal);
        break;
    }
    case STRING: {
        val = (o != null) ? ((StringObjectInspector) oi).getPrimitiveJavaObject(o) : null;
        addOneFieldToRecord(record, TEXT, toFlatten ? String.format("\"%s\"", val) : val);
        break;
    }
    case VARCHAR:
        val = (o != null) ? ((HiveVarcharObjectInspector) oi).getPrimitiveJavaObject(o) : null;
        addOneFieldToRecord(record, VARCHAR, toFlatten ? String.format("\"%s\"", val) : val);
        break;
    case CHAR:
        val = (o != null) ? ((HiveCharObjectInspector) oi).getPrimitiveJavaObject(o) : null;
        addOneFieldToRecord(record, BPCHAR, toFlatten ? String.format("\"%s\"", val) : val);
        break;
    case BINARY: {
        byte[] toEncode = null;
        if (o != null) {
            BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
            toEncode = new byte[bw.getLength()];
            System.arraycopy(bw.getBytes(), 0, toEncode, 0, bw.getLength());
        }
        addOneFieldToRecord(record, BYTEA, toEncode);
        break;
    }
    case TIMESTAMP: {
        val = (o != null) ? ((TimestampObjectInspector) oi).getPrimitiveJavaObject(o) : null;
        addOneFieldToRecord(record, TIMESTAMP, val);
        break;
    }
    case DATE:
        val = (o != null) ? ((DateObjectInspector) oi).getPrimitiveJavaObject(o) : null;
        addOneFieldToRecord(record, DATE, val);
        break;
    case BYTE: { /* TINYINT */
        val = (o != null) ? new Short(((ByteObjectInspector) oi).get(o)) : null;
        addOneFieldToRecord(record, SMALLINT, val);
        break;
    }
    default: {
        throw new UnsupportedTypeException(
                oi.getTypeName() + " conversion is not supported by " + getClass().getSimpleName());
    }
    }
}

From source file:org.apache.ignite.hadoop.io.BytesWritablePartiallyRawComparator.java

License:Apache License

/** {@inheritDoc} */
@Override/*from  w ww .java2s . c  om*/
public int compare(BytesWritable val1, long val2Ptr, int val2Len) {
    return HadoopUtils.compareBytes(val1.getBytes(), val1.getLength(), val2Ptr + LEN_BYTES,
            val2Len - LEN_BYTES);
}

From source file:org.apache.kylin.source.kafka.hadoop.KafkaFlatTableMapper.java

License:Apache License

@Override
public void doMap(LongWritable key, BytesWritable value, Context context)
        throws IOException, InterruptedException {
    outKey.set(Bytes.toBytes(key.get()));
    outValue.set(value.getBytes(), 0, value.getLength());
    context.write(outKey, outValue);/*from   w  ww .  j  a  v  a2s  . c o  m*/
}

From source file:org.apache.orc.TestColumnStatistics.java

License:Apache License

void appendRow(VectorizedRowBatch batch, BytesWritable bytes, String str) {
    int row = batch.size++;
    if (bytes == null) {
        batch.cols[0].noNulls = false;/*from  www . ja va 2 s .  c  o  m*/
        batch.cols[0].isNull[row] = true;
    } else {
        ((BytesColumnVector) batch.cols[0]).setVal(row, bytes.getBytes(), 0, bytes.getLength());
    }
    if (str == null) {
        batch.cols[1].noNulls = false;
        batch.cols[1].isNull[row] = true;
    } else {
        ((BytesColumnVector) batch.cols[1]).setVal(row, str.getBytes());
    }
}

From source file:org.apache.orc.TestVectorOrcFile.java

License:Apache License

private static void setBigRow(VectorizedRowBatch batch, int rowId, Boolean b1, Byte b2, Short s1, Integer i1,
        Long l1, Float f1, Double d1, BytesWritable b3, String s2, MiddleStruct m1, List<InnerStruct> l2,
        Map<String, InnerStruct> m2) {
    ((LongColumnVector) batch.cols[0]).vector[rowId] = b1 ? 1 : 0;
    ((LongColumnVector) batch.cols[1]).vector[rowId] = b2;
    ((LongColumnVector) batch.cols[2]).vector[rowId] = s1;
    ((LongColumnVector) batch.cols[3]).vector[rowId] = i1;
    ((LongColumnVector) batch.cols[4]).vector[rowId] = l1;
    ((DoubleColumnVector) batch.cols[5]).vector[rowId] = f1;
    ((DoubleColumnVector) batch.cols[6]).vector[rowId] = d1;
    if (b3 != null) {
        ((BytesColumnVector) batch.cols[7]).setVal(rowId, b3.getBytes(), 0, b3.getLength());
    } else {//from   w  w w.  ja v  a2 s.co  m
        batch.cols[7].isNull[rowId] = true;
        batch.cols[7].noNulls = false;
    }
    if (s2 != null) {
        ((BytesColumnVector) batch.cols[8]).setVal(rowId, s2.getBytes());
    } else {
        batch.cols[8].isNull[rowId] = true;
        batch.cols[8].noNulls = false;
    }
    setMiddleStruct((StructColumnVector) batch.cols[9], rowId, m1);
    setInnerList((ListColumnVector) batch.cols[10], rowId, l2);
    setInnerMap((MapColumnVector) batch.cols[11], rowId, m2);
}

From source file:org.apache.orc.TestVectorOrcFile.java

License:Apache License

private static void checkBigRow(VectorizedRowBatch batch, int rowInBatch, int rowId, boolean b1, byte b2,
        short s1, int i1, long l1, float f1, double d1, BytesWritable b3, String s2, MiddleStruct m1,
        List<InnerStruct> l2, Map<String, InnerStruct> m2) {
    assertEquals("row " + rowId, b1, getBoolean(batch, rowInBatch));
    assertEquals("row " + rowId, b2, getByte(batch, rowInBatch));
    assertEquals("row " + rowId, s1, getShort(batch, rowInBatch));
    assertEquals("row " + rowId, i1, getInt(batch, rowInBatch));
    assertEquals("row " + rowId, l1, getLong(batch, rowInBatch));
    assertEquals("row " + rowId, f1, getFloat(batch, rowInBatch), 0.0001);
    assertEquals("row " + rowId, d1, getDouble(batch, rowInBatch), 0.0001);
    if (b3 != null) {
        BytesColumnVector bytes = (BytesColumnVector) batch.cols[7];
        assertEquals("row " + rowId, b3.getLength(), bytes.length[rowInBatch]);
        for (int i = 0; i < b3.getLength(); ++i) {
            assertEquals("row " + rowId + " byte " + i, b3.getBytes()[i],
                    bytes.vector[rowInBatch][bytes.start[rowInBatch] + i]);
        }/*from www .  j a v  a  2s.  c  o m*/
    } else {
        assertEquals("row " + rowId, true, batch.cols[7].isNull[rowInBatch]);
        assertEquals("row " + rowId, false, batch.cols[7].noNulls);
    }
    if (s2 != null) {
        assertEquals("row " + rowId, s2, getText(batch, rowInBatch).toString());
    } else {
        assertEquals("row " + rowId, true, batch.cols[8].isNull[rowInBatch]);
        assertEquals("row " + rowId, false, batch.cols[8].noNulls);
    }
    checkMiddleStruct((StructColumnVector) batch.cols[9], rowId, rowInBatch, m1);
    checkInnerList((ListColumnVector) batch.cols[10], rowId, rowInBatch, l2);
    checkInnerMap((MapColumnVector) batch.cols[11], rowId, rowInBatch, m2);
}

From source file:org.apache.parquet.hadoop.thrift.ThriftBytesWriteSupport.java

License:Apache License

private TProtocol protocol(BytesWritable record) {
    TProtocol protocol = protocolFactory
            .getProtocol(new TIOStreamTransport(new ByteArrayInputStream(record.getBytes())));

    /* Reduce the chance of OOM when data is corrupted. When readBinary is called on TBinaryProtocol, it reads the length of the binary first,
     so if the data is corrupted, it could read a big integer as the length of the binary and therefore causes OOM to happen.
     Currently this fix only applies to TBinaryProtocol which has the setReadLength defined.
      *//*  w w  w.  j  ava 2  s . com*/
    if (IS_READ_LENGTH_SETABLE && protocol instanceof TBinaryProtocol) {
        ((TBinaryProtocol) protocol).setReadLength(record.getLength());
    }

    return protocol;
}

From source file:org.apache.pig.builtin.TestOrcStorage.java

License:Apache License

@SuppressWarnings("rawtypes")
private void compareData(Object expected, Object actual) {
    if (expected instanceof Text) {
        assertEquals(String.class, actual.getClass());
        assertEquals(expected.toString(), actual);
    } else if (expected instanceof ShortWritable) {
        assertEquals(Integer.class, actual.getClass());
        assertEquals((int) ((ShortWritable) expected).get(), actual);
    } else if (expected instanceof IntWritable) {
        assertEquals(Integer.class, actual.getClass());
        assertEquals(((IntWritable) expected).get(), actual);
    } else if (expected instanceof LongWritable) {
        assertEquals(Long.class, actual.getClass());
        assertEquals(((LongWritable) expected).get(), actual);
    } else if (expected instanceof FloatWritable) {
        assertEquals(Float.class, actual.getClass());
        assertEquals(((FloatWritable) expected).get(), actual);
    } else if (expected instanceof HiveDecimalWritable) {
        assertEquals(BigDecimal.class, actual.getClass());
        assertEquals(((HiveDecimalWritable) expected).toString(), actual.toString());
    } else if (expected instanceof DoubleWritable) {
        assertEquals(Double.class, actual.getClass());
        assertEquals(((DoubleWritable) expected).get(), actual);
    } else if (expected instanceof BooleanWritable) {
        assertEquals(Boolean.class, actual.getClass());
        assertEquals(((BooleanWritable) expected).get(), actual);
    } else if (expected instanceof TimestampWritable) {
        assertEquals(DateTime.class, actual.getClass());
        assertEquals(((TimestampWritable) expected).getTimestamp().getTime(), ((DateTime) actual).getMillis());
    } else if (expected instanceof BytesWritable) {
        assertEquals(DataByteArray.class, actual.getClass());
        BytesWritable bw = (BytesWritable) expected;
        assertEquals(new DataByteArray(bw.getBytes(), 0, bw.getLength()), actual);
    } else if (expected instanceof ByteWritable) {
        assertEquals(Integer.class, actual.getClass());
        assertEquals((int) ((ByteWritable) expected).get(), actual);
    } else if (expected instanceof OrcStruct) {
        assertEquals(BinSedesTuple.class, actual.getClass());
        // TODO: compare actual values. No getters in OrcStruct
    } else if (expected instanceof ArrayList) {
        assertEquals(DefaultDataBag.class, actual.getClass());
        // TODO: compare actual values. No getters in OrcStruct
    } else if (expected instanceof HashMap) {
        assertEquals(HashMap.class, actual.getClass());
        assertEquals(((HashMap) expected).size(), ((HashMap) actual).size());
        // TODO: compare actual values. No getters in OrcStruct
    } else if (expected == null) {
        assertEquals(expected, actual);//from  w  w w. j a v a  2 s.  c  o  m
    } else {
        Assert.fail("Unknown object type: " + expected.getClass().getName());
    }
}

From source file:org.apache.pig.impl.io.NullableBytesWritable.java

License:Apache License

public Object getValueAsPigType() {
    BytesWritable bw = (BytesWritable) mValue;
    return isNull() ? null : new DataByteArray(bw.getBytes(), 0, bw.getLength());
}

From source file:org.apache.pig.impl.util.orc.OrcUtils.java

License:Apache License

public static Object getPrimaryFromOrc(Object obj, PrimitiveObjectInspector poi) {
    Object result = null;/* ww w . j  a va  2  s  . c o m*/
    if (obj == null) {
        return result;
    }
    switch (poi.getPrimitiveCategory()) {
    case FLOAT:
    case DOUBLE:
    case BOOLEAN:
    case INT:
    case LONG:
    case STRING:
        result = poi.getPrimitiveJavaObject(obj);
        break;
    case BYTE:
        result = (int) (Byte) poi.getPrimitiveJavaObject(obj);
        break;
    case SHORT:
        result = (int) (Short) poi.getPrimitiveJavaObject(obj);
        break;
    case BINARY:
        BytesWritable bw = (BytesWritable) obj;
        // Make a copy
        result = new DataByteArray(bw.getBytes(), 0, bw.getLength());
        break;
    case TIMESTAMP:
        java.sql.Timestamp origTimeStamp = (java.sql.Timestamp) poi.getPrimitiveJavaObject(obj);
        result = new DateTime(origTimeStamp.getTime());
        break;
    case DATE:
        java.sql.Date origDate = (java.sql.Date) poi.getPrimitiveJavaObject(obj);
        result = new DateTime(origDate.getTime());
        break;
    case DECIMAL:
        org.apache.hadoop.hive.common.type.HiveDecimal origDecimal = (org.apache.hadoop.hive.common.type.HiveDecimal) poi
                .getPrimitiveJavaObject(obj);
        result = origDecimal.bigDecimalValue();
        break;
    default:
        throw new IllegalArgumentException("Unknown primitive type " + (poi).getPrimitiveCategory());
    }
    return result;
}