Example usage for org.apache.hadoop.io ObjectWritable ObjectWritable

List of usage examples for org.apache.hadoop.io ObjectWritable ObjectWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io ObjectWritable ObjectWritable.

Prototype

public ObjectWritable(Object instance) 

Source Link

Usage

From source file:co.cask.cdap.etl.batch.mapreduce.TaggedWritable.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    Text.writeString(out, stageName);
    ObjectWritable recordWritable = new ObjectWritable(record);
    recordWritable.write(out);/* w w  w.j av  a  2s  . co m*/
}

From source file:com.bah.culvert.constraints.Constraint.java

License:Apache License

/**
 * Write a given constraint to the output stream
 * @param constraint to write//from   ww w.j  a  v a 2 s . c o m
 * @param out to write to
 * @throws IOException on failure to write
 */
public static void write(Constraint constraint, DataOutput out) throws IOException {
    new ObjectWritable(constraint).write(out);
}

From source file:com.bah.culvert.constraints.filter.ResultFilter.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {

    if (this.subConstraint == null)
        out.writeBoolean(false);// w w w  .j av a 2s.  c om
    else {
        out.writeBoolean(true);
        // write out the subconstraint
        Constraint.write(this.subConstraint, out);
    }

    // write out the columns
    ow.set(this.columns);
    ow.write(out);

    // write out the range
    this.range.write(out);

    if (this.table == null)
        out.writeBoolean(false);
    else {
        out.writeBoolean(true);
        // write out the table
        new ObjectWritable(this.table).write(out);
    }
}

From source file:com.bah.culvert.constraints.IndexRangeConstraint.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    range.write(out);//from   w  ww . j a  v  a 2  s  .com
    new ObjectWritable(this.index).write(out);
}

From source file:com.bah.culvert.constraints.join.IndexedJoin.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    super.write(out);
    new ObjectWritable(this.rightIndex).write(out);
}

From source file:com.bah.culvert.constraints.Join.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    // write the left table
    ObjectWritable ow = new ObjectWritable(this.leftTable);
    ow.write(out);/*from w w w .  j  a va 2  s. c  o m*/

    // write left constraint
    ow.set(this.left);
    ow.write(out);

    this.leftColumn.write(out);

    Text.writeString(out, this.rightTable);

    // write out the database
    ow.set(this.database);
    ow.write(out);
}

From source file:com.bah.culvert.constraints.RetrieveColumns.java

License:Apache License

@Override
public void write(DataOutput out) throws IOException {
    out.writeInt(this.columns.length);
    for (int i = 0; i < this.columns.length; i++) {
        this.columns[i].write(out);
    }// w w w .  j a  va2s .c o m
    Constraint.write(this.subConstraint, out);
    new ObjectWritable(this.table).write(out);
}

From source file:org.apache.hama.bsp.message.DiskQueue.java

License:Apache License

@Override
public final void add(M item) {
    size++;/*from   w w w  .  j  av a2 s .c  om*/
    try {
        new ObjectWritable(item).write(writer);
    } catch (IOException e) {
        LOG.error(e);
    }
}

From source file:org.apache.hive.storage.jdbc.JdbcRecordReader.java

License:Apache License

@Override
public boolean next(LongWritable key, MapWritable value) throws IOException {
    try {//from w ww .j  a  va2s  .  c o  m
        LOGGER.debug("JdbcRecordReader.next called");
        if (dbAccessor == null) {
            dbAccessor = DatabaseAccessorFactory.getAccessor(conf);
            iterator = dbAccessor.getRecordIterator(conf, split.getLimit(), split.getOffset());
        }

        if (iterator.hasNext()) {
            LOGGER.debug("JdbcRecordReader has more records to read.");
            key.set(pos);
            pos++;
            Map<String, Object> record = iterator.next();
            if ((record != null) && (!record.isEmpty())) {
                for (Entry<String, Object> entry : record.entrySet()) {
                    value.put(new Text(entry.getKey()), entry.getValue() == null ? NullWritable.get()
                            : new ObjectWritable(entry.getValue()));
                }
                return true;
            } else {
                LOGGER.debug("JdbcRecordReader got null record.");
                return false;
            }
        } else {
            LOGGER.debug("JdbcRecordReader has no more records to read.");
            return false;
        }
    } catch (Exception e) {
        LOGGER.error("An error occurred while reading the next record from DB.", e);
        return false;
    }
}

From source file:org.apache.hive.streaming.HiveRecordWriter.java

License:Apache License

public Object encode(Record record) throws SerializationError {
    try {/*  ww w.ja  v a 2  s.  c o m*/
        ObjectWritable blob = new ObjectWritable(record);
        return serde.deserialize(blob);
    } catch (SerDeException e) {
        throw new SerializationError("Unable to convert Record into Object", e);
    }
}