Example usage for org.apache.thrift TDeserializer TDeserializer

List of usage examples for org.apache.thrift TDeserializer TDeserializer

Introduction

In this page you can find the example usage for org.apache.thrift TDeserializer TDeserializer.

Prototype

public TDeserializer(TProtocolFactory protocolFactory) 

Source Link

Document

Create a new TDeserializer.

Usage

From source file:org.apache.cassandra.hadoop2.pig.CassandraStorage.java

License:Apache License

/** convert string to a list of index expression */
private static List<IndexExpression> indexExpressionsFromString(String ie) {
    assert ie != null;
    TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
    IndexClause indexClause = new IndexClause();
    try {/*from w  w w  . java 2  s. c  o  m*/
        deserializer.deserialize(indexClause, Hex.hexToBytes(ie));
    } catch (TException e) {
        throw new RuntimeException(e);
    }
    return indexClause.getExpressions();
}

From source file:org.apache.hadoop.hive.cassandra.CassandraPushdownPredicate.java

License:Apache License

/**
 * Serialize a set of ColumnDefs for indexed columns, read
 * from Job configuration// ww w.  j  av  a  2  s  .com
 *
 * @param serialized column metadata
 * @return list of column metadata objects which may be empty, but not null
 */
public static Set<ColumnDef> deserializeIndexedColumns(String serialized) {
    Set<ColumnDef> columns = new HashSet<ColumnDef>();
    if (null == serialized) {
        return columns;
    }

    Iterable<String> strings = Splitter.on(AbstractCassandraSerDe.DELIMITER).omitEmptyStrings().trimResults()
            .split(serialized);
    TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
    for (String encoded : strings) {
        ColumnDef column = new ColumnDef();
        try {
            logger.info("Encoded column def: " + encoded);
            deserializer.deserialize(column, Hex.hexToBytes(encoded));
        } catch (TException e) {
            logger.warn("Error deserializing indexed column definition", e);
        }
        if (null == column.getName() || null == column.validation_class) {
            continue;
        }
        columns.add(column);
    }
    return columns;
}

From source file:org.apache.hadoop.hive.metastore.messaging.json.JSONMessageFactory.java

License:Apache License

public static Table getTableObj(ObjectNode jsonTree) throws Exception {
    TDeserializer deSerializer = new TDeserializer(new TJSONProtocol.Factory());
    Table tableObj = new Table();
    String tableJson = jsonTree.get("tableObjJson").asText();
    deSerializer.deserialize(tableObj, tableJson, "UTF-8");
    return tableObj;
}

From source file:org.apache.hadoop.hive.metastore.messaging.json.JSONMessageFactory.java

License:Apache License

public static List<Partition> getPartitionObjList(ObjectNode jsonTree) throws Exception {
    TDeserializer deSerializer = new TDeserializer(new TJSONProtocol.Factory());
    List<Partition> partitionObjList = new ArrayList<Partition>();
    Partition partitionObj = new Partition();
    Iterator<JsonNode> jsonArrayIterator = jsonTree.get("partitionListJson").iterator();
    while (jsonArrayIterator.hasNext()) {
        deSerializer.deserialize(partitionObj, jsonArrayIterator.next().asText(), "UTF-8");
        partitionObjList.add(partitionObj);
    }// w w  w.  j  av  a 2  s.  co  m
    return partitionObjList;
}

From source file:org.apache.hadoop.hive.metastore.messaging.json.JSONMessageFactory.java

License:Apache License

public static Function getFunctionObj(ObjectNode jsonTree) throws Exception {
    TDeserializer deSerializer = new TDeserializer(new TJSONProtocol.Factory());
    Function funcObj = new Function();
    String tableJson = jsonTree.get("functionObjJson").asText();
    deSerializer.deserialize(funcObj, tableJson, "UTF-8");
    return funcObj;
}

From source file:org.apache.hadoop.hive.metastore.messaging.json.JSONMessageFactory.java

License:Apache License

public static Index getIndexObj(ObjectNode jsonTree, String indexObjKey) throws Exception {
    TDeserializer deSerializer = new TDeserializer(new TJSONProtocol.Factory());
    Index indexObj = new Index();
    String tableJson = jsonTree.get(indexObjKey).asText();
    deSerializer.deserialize(indexObj, tableJson, "UTF-8");
    return indexObj;
}

From source file:org.apache.hadoop.hive.metastore.messaging.MessageBuilder.java

License:Apache License

public static TBase getTObj(String tSerialized, Class<? extends TBase> objClass) throws Exception {
    TDeserializer thriftDeSerializer = new TDeserializer(new TJSONProtocol.Factory());
    TBase obj = objClass.newInstance();//from  w  w  w.j a  va 2 s. co m
    thriftDeSerializer.deserialize(obj, tSerialized, "UTF-8");
    return obj;
}

From source file:org.apache.hadoop.hive.ql.parse.EximUtil.java

License:Apache License

public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath)
        throws IOException, SemanticException {
    FSDataInputStream mdstream = null;/*w  w w.jav a2 s  .  com*/
    try {
        mdstream = fs.open(metadataPath);
        byte[] buffer = new byte[1024];
        ByteArrayOutputStream sb = new ByteArrayOutputStream();
        int read = mdstream.read(buffer);
        while (read != -1) {
            sb.write(buffer, 0, read);
            read = mdstream.read(buffer);
        }
        String md = new String(sb.toByteArray(), "UTF-8");
        JSONObject jsonContainer = new JSONObject(md);
        String version = jsonContainer.getString("version");
        String fcversion = getJSONStringEntry(jsonContainer, "fcversion");
        checkCompatibility(version, fcversion);
        String tableDesc = getJSONStringEntry(jsonContainer, "table");
        Table table = null;
        List<Partition> partitionsList = null;
        if (tableDesc != null) {
            table = new Table();
            TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory());
            deserializer.deserialize(table, tableDesc, "UTF-8");
            // TODO : jackson-streaming-iterable-redo this
            JSONArray jsonPartitions = new JSONArray(jsonContainer.getString("partitions"));
            partitionsList = new ArrayList<Partition>(jsonPartitions.length());
            for (int i = 0; i < jsonPartitions.length(); ++i) {
                String partDesc = jsonPartitions.getString(i);
                Partition partition = new Partition();
                deserializer.deserialize(partition, partDesc, "UTF-8");
                partitionsList.add(partition);
            }
        }

        return new ReadMetaData(table, partitionsList, readReplicationSpec(jsonContainer));
    } catch (JSONException e) {
        throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e);
    } catch (TException e) {
        throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg("Error in serializing metadata"), e);
    } finally {
        if (mdstream != null) {
            mdstream.close();
        }
    }
}

From source file:org.apache.hadoop.hive.ql.parse.repl.load.MetadataJson.java

License:Apache License

public MetadataJson(String message) throws JSONException, SemanticException {
    deserializer = new TDeserializer(new TJSONProtocol.Factory());
    json = new JSONObject(message);
    checkCompatibility();/* www. j a v a2  s.c o  m*/
    tableDesc = jsonEntry(TableSerializer.FIELD_NAME);
}

From source file:org.apache.hcatalog.hbase.snapshot.ZKUtil.java

License:Apache License

/**
 * This method deserializes the given byte array into the TBase object.
 *
 * @param obj An instance of TBase//from   w  w  w.  ja va  2  s .  c om
 * @param data Output of deserialization.
 * @throws IOException
 */
static void deserialize(TBase obj, byte[] data) throws IOException {
    if (data == null || data.length == 0)
        return;
    try {
        TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
        deserializer.deserialize(obj, data);
    } catch (Exception e) {
        throw new IOException("Deserialization error: " + e.getMessage(), e);
    }
}