Example usage for org.apache.hadoop.io BytesWritable getBytes

List of usage examples for org.apache.hadoop.io BytesWritable getBytes

Introduction

In this page you can find the example usage for org.apache.hadoop.io BytesWritable getBytes.

Prototype

@Override
public byte[] getBytes() 

Source Link

Document

Get the data backing the BytesWritable.

Usage

From source file:io.bfscan.data.VByteDocVector.java

License:Apache License

public static void fromBytesWritable(BytesWritable bytes, VByteDocVector doc) {
    try {/*from  w w  w . ja v  a2 s .  c o  m*/
        ByteArrayInputStream bytesIn = new ByteArrayInputStream(bytes.getBytes());
        DataInputStream data = new DataInputStream(bytesIn);

        int length = WritableUtils.readVInt(data);
        doc.termids = new int[length];
        for (int i = 0; i < length; i++) {
            doc.termids[i] = WritableUtils.readVInt(data);
        }
    } catch (IOException e) {
        doc.termids = new int[0];
    }
}

From source file:io.covert.binary.analysis.BinaryAnalysisMapper.java

License:Apache License

protected void writeToFile(BytesWritable value, File binaryFile, Context context) throws IOException {
    long fileCreationOverheadMS = System.currentTimeMillis();

    FileOutputStream fileOut = new FileOutputStream(binaryFile);
    fileOut.write(value.getBytes(), 0, value.getLength());
    fileOut.close();//from  w  ww . ja  v a2s  . c  om
    fileCreationOverheadMS = System.currentTimeMillis() - fileCreationOverheadMS;
    context.getCounter(STATS, FILE_CREATION_OVERHEAD_MS_COUNTER).increment(fileCreationOverheadMS);
}

From source file:io.covert.dns.parse.ParseMapper.java

License:Apache License

protected void map(Text key, BytesWritable value, Context context)
        throws java.io.IOException, InterruptedException {
    try {//from   ww w .  java 2 s.c  o m
        Message msg = new Message(value.getBytes());

        int[] sections = { Section.ANSWER, Section.ADDITIONAL, Section.AUTHORITY };
        for (int section : sections) {
            for (Record record : msg.getSectionArray(section)) {
                String json = JsonUtils.toJson(record, ignoreTTL);
                outKey.set(json);
                context.write(outKey, outVal);
            }
        }
    } catch (Exception e) {
        context.getCounter(getClass().getSimpleName(), "PARSE_FAIL").increment(1);
        e.printStackTrace();
    }
}

From source file:io.covert.dns.util.DumpResponses.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf();
    FileSystem fs = FileSystem.get(conf);

    Text key = new Text();
    BytesWritable val = new BytesWritable();

    FileStatus[] listing;//from w  w  w.  j ava2s .  c om
    Path inpath = new Path(args[0]);
    if (fs.getFileStatus(inpath) != null && fs.getFileStatus(inpath).isDir())
        listing = fs.listStatus(inpath);
    else
        listing = fs.globStatus(inpath);

    for (FileStatus f : listing) {
        if (f.isDir() || f.getPath().getName().startsWith("_"))
            continue;

        System.out.println("Opennning " + f.getPath() + " ...");
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, f.getPath(), conf);

        while (reader.next(key, val)) {
            Message msg = new Message(val.getBytes());
            System.out.println(key + ": " + msg);
            System.out.println("---");
        }
        reader.close();
    }
    return 0;
}

From source file:io.druid.indexer.HadoopyStringInputRowParser.java

License:Apache License

@Override
public InputRow parse(Object input) {
    if (input instanceof Text) {
        return parser.parse(((Text) input).toString());
    } else if (input instanceof BytesWritable) {
        BytesWritable valueBytes = (BytesWritable) input;
        return parser.parse(ByteBuffer.wrap(valueBytes.getBytes(), 0, valueBytes.getLength()));
    } else {/*from ww  w.  j  av  a2s. c  om*/
        throw new IAE("can't convert type [%s] to InputRow", input.getClass().getName());
    }
}

From source file:io.druid.indexer.InputRowSerde.java

License:Apache License

public static final InputRow fromBytes(byte[] data, AggregatorFactory[] aggs) {
    try {/*from w ww .  java  2s . c om*/
        DataInput in = ByteStreams.newDataInput(data);

        //Read timestamp
        long timestamp = in.readLong();

        //Read dimensions
        StringArrayWritable sw = new StringArrayWritable();
        sw.readFields(in);
        List<String> dimensions = Arrays.asList(sw.toStrings());

        MapWritable mw = new MapWritable();
        mw.readFields(in);

        Map<String, Object> event = Maps.newHashMap();

        for (String d : dimensions) {
            Writable v = mw.get(new Text(d));

            if (v == null) {
                continue;
            }

            if (v instanceof Text) {
                event.put(d, ((Text) v).toString());
            } else if (v instanceof StringArrayWritable) {
                event.put(d, Arrays.asList(((StringArrayWritable) v).toStrings()));
            } else {
                throw new ISE("unknown dim value type %s", v.getClass().getName());
            }
        }

        //Read metrics
        for (AggregatorFactory aggFactory : aggs) {
            String k = aggFactory.getName();
            Writable v = mw.get(new Text(k));

            if (v == null) {
                continue;
            }

            String t = aggFactory.getTypeName();

            if (t.equals("float")) {
                event.put(k, ((FloatWritable) v).get());
            } else if (t.equals("long")) {
                event.put(k, ((LongWritable) v).get());
            } else {
                //its a complex metric
                ComplexMetricSerde serde = getComplexMetricSerde(t);
                BytesWritable bw = (BytesWritable) v;
                event.put(k, serde.fromBytes(bw.getBytes(), 0, bw.getLength()));
            }
        }

        return new MapBasedInputRow(timestamp, dimensions, event);
    } catch (IOException ex) {
        throw Throwables.propagate(ex);
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.BinaryProtocol.java

License:Apache License

/**
 * Write the given object to the stream. If it is a Text or BytesWritable,
 * write it directly. Otherwise, write it to a buffer and then write the
 * length and data to the stream.//w ww. ja v  a  2s  .  co  m
 * @param obj the object to write
 * @throws IOException
 */
private void writeObject(Writable obj) throws IOException {
    // For Text and BytesWritable, encode them directly, so that they end up
    // in C++ as the natural translations.
    if (obj instanceof Text) {
        Text t = (Text) obj;
        int len = t.getLength();
        WritableUtils.writeVInt(stream, len);
        stream.write(t.getBytes(), 0, len);
    } else if (obj instanceof BytesWritable) {
        BytesWritable b = (BytesWritable) obj;
        int len = b.getLength();
        WritableUtils.writeVInt(stream, len);
        stream.write(b.getBytes(), 0, len);
    } else if (obj == null) {
        // write a zero length string
        WritableUtils.writeVInt(stream, 0);
    } else {
        buffer.reset();
        obj.write(buffer);
        int length = buffer.getLength();
        WritableUtils.writeVInt(stream, length);
        stream.write(buffer.getData(), 0, length);
    }
}

From source file:it.crs4.pydoop.mapreduce.pipes.CommonStub.java

License:Apache License

protected void writeObject(Writable obj, DataOutputStream stream) throws IOException {
    // For Text and BytesWritable, encode them directly, so that they end up
    // in C++ as the natural translations.
    System.err.println("obj: " + obj);

    DataOutputBuffer buffer = new DataOutputBuffer();
    if (obj instanceof Text) {
        Text t = (Text) obj;
        int len = t.getLength();
        WritableUtils.writeVLong(stream, len);
        stream.flush();// ww w.j a  v  a  2s  .  c o  m

        stream.write(t.getBytes(), 0, len);
        stream.flush();
        System.err.println("len: " + len);

    } else if (obj instanceof BytesWritable) {
        BytesWritable b = (BytesWritable) obj;
        int len = b.getLength();
        WritableUtils.writeVLong(stream, len);
        stream.write(b.getBytes(), 0, len);
        System.err.println("len: " + len);
    } else {
        buffer.reset();
        obj.write(buffer);
        int length = buffer.getLength();
        WritableUtils.writeVInt(stream, length);
        stream.write(buffer.getData(), 0, length);
        System.err.println("len: " + length);
    }
    stream.flush();

}

From source file:it.crs4.pydoop.pipes.BinaryProtocol.java

License:Apache License

/**
 * Write the given object to the stream. If it is a Text or BytesWritable,
 * write it directly. Otherwise, write it to a buffer and then write the
 * length and data to the stream./*from  ww w.j a  v  a 2s .c  o m*/
 * @param obj the object to write
 * @throws IOException
 */
private void writeObject(Writable obj) throws IOException {
    // For Text and BytesWritable, encode them directly, so that they end up
    // in C++ as the natural translations.
    if (obj instanceof Text) {
        Text t = (Text) obj;
        int len = t.getLength();
        WritableUtils.writeVInt(stream, len);
        stream.write(t.getBytes(), 0, len);
    } else if (obj instanceof BytesWritable) {
        BytesWritable b = (BytesWritable) obj;
        int len = b.getLength();
        WritableUtils.writeVInt(stream, len);
        stream.write(b.getBytes(), 0, len);
    } else {
        buffer.reset();
        obj.write(buffer);
        int length = buffer.getLength();
        WritableUtils.writeVInt(stream, length);
        stream.write(buffer.getData(), 0, length);
    }
}

From source file:kafka.bridge.hadoop.KafkaRecordWriter.java

License:Apache License

@Override
public void write(NullWritable key, BytesWritable value) throws IOException, InterruptedException {
    Message msg = new Message(value.getBytes());
    msgList.add(msg);//  w ww.ja  v a 2  s . co m
    totalSize += msg.size();

    if (totalSize > queueSize)
        sendMsgList();
}