Example usage for org.apache.hadoop.io BytesWritable getLength

List of usage examples for org.apache.hadoop.io BytesWritable getLength

Introduction

In this page you can find the example usage for org.apache.hadoop.io BytesWritable getLength.

Prototype

@Override
public int getLength() 

Source Link

Document

Get the current size of the buffer.

Usage

From source file:hydrograph.engine.cascading.scheme.avro.CustomCascadingToAvro.java

License:Apache License

protected static Object toAvroFixed(Object obj, Schema schema) {
    BytesWritable bytes = (BytesWritable) obj;
    return new Fixed(schema, Arrays.copyOfRange(bytes.getBytes(), 0, bytes.getLength()));
}

From source file:hydrograph.engine.cascading.scheme.avro.CustomCascadingToAvro.java

License:Apache License

protected static Object toAvroBytes(Object obj) {
    BytesWritable inBytes = (BytesWritable) obj;
    return ByteBuffer.wrap(Arrays.copyOfRange(inBytes.getBytes(), 0, inBytes.getLength()));
}

From source file:io.amient.kafka.hadoop.testutils.MyJsonTimestampExtractor.java

License:Apache License

@Override
public Long extract(MsgMetadataWritable key, BytesWritable value) throws IOException {
    if (value.getLength() > 0) {
        JsonNode json = jsonMapper.readValue(value.getBytes(), 0, value.getLength(), JsonNode.class);
        if (json.has("timestamp")) {
            return json.get("timestamp").getLongValue();
        }//from ww  w .j  a v  a 2  s  . co m
    }
    return null;
}

From source file:io.aos.hdfs.BytesWritableTest.java

License:Apache License

@Test
public void test() throws IOException {
    // vv BytesWritableTest
    BytesWritable b = new BytesWritable(new byte[] { 3, 5 });
    byte[] bytes = serialize(b);
    assertThat(StringUtils.byteToHexString(bytes), is("000000020305"));
    // ^^ BytesWritableTest

    // vv BytesWritableTest-Capacity
    b.setCapacity(11);//from w w w . j  a va 2 s  .c  o m
    assertThat(b.getLength(), is(2));
    assertThat(b.getBytes().length, is(11));
    // ^^ BytesWritableTest-Capacity
}

From source file:io.covert.binary.analysis.BinaryAnalysisMapper.java

License:Apache License

protected void writeToFile(BytesWritable value, File binaryFile, Context context) throws IOException {
    long fileCreationOverheadMS = System.currentTimeMillis();

    FileOutputStream fileOut = new FileOutputStream(binaryFile);
    fileOut.write(value.getBytes(), 0, value.getLength());
    fileOut.close();//from  w w  w . j a  va  2s  .  c om
    fileCreationOverheadMS = System.currentTimeMillis() - fileCreationOverheadMS;
    context.getCounter(STATS, FILE_CREATION_OVERHEAD_MS_COUNTER).increment(fileCreationOverheadMS);
}

From source file:io.druid.indexer.HadoopyStringInputRowParser.java

License:Apache License

@Override
public InputRow parse(Object input) {
    if (input instanceof Text) {
        return parser.parse(((Text) input).toString());
    } else if (input instanceof BytesWritable) {
        BytesWritable valueBytes = (BytesWritable) input;
        return parser.parse(ByteBuffer.wrap(valueBytes.getBytes(), 0, valueBytes.getLength()));
    } else {/*  ww w .j av  a2  s.c  o m*/
        throw new IAE("can't convert type [%s] to InputRow", input.getClass().getName());
    }
}

From source file:io.druid.indexer.InputRowSerde.java

License:Apache License

public static final InputRow fromBytes(byte[] data, AggregatorFactory[] aggs) {
    try {/*from   ww w .j a v a2  s .c o m*/
        DataInput in = ByteStreams.newDataInput(data);

        //Read timestamp
        long timestamp = in.readLong();

        //Read dimensions
        StringArrayWritable sw = new StringArrayWritable();
        sw.readFields(in);
        List<String> dimensions = Arrays.asList(sw.toStrings());

        MapWritable mw = new MapWritable();
        mw.readFields(in);

        Map<String, Object> event = Maps.newHashMap();

        for (String d : dimensions) {
            Writable v = mw.get(new Text(d));

            if (v == null) {
                continue;
            }

            if (v instanceof Text) {
                event.put(d, ((Text) v).toString());
            } else if (v instanceof StringArrayWritable) {
                event.put(d, Arrays.asList(((StringArrayWritable) v).toStrings()));
            } else {
                throw new ISE("unknown dim value type %s", v.getClass().getName());
            }
        }

        //Read metrics
        for (AggregatorFactory aggFactory : aggs) {
            String k = aggFactory.getName();
            Writable v = mw.get(new Text(k));

            if (v == null) {
                continue;
            }

            String t = aggFactory.getTypeName();

            if (t.equals("float")) {
                event.put(k, ((FloatWritable) v).get());
            } else if (t.equals("long")) {
                event.put(k, ((LongWritable) v).get());
            } else {
                //its a complex metric
                ComplexMetricSerde serde = getComplexMetricSerde(t);
                BytesWritable bw = (BytesWritable) v;
                event.put(k, serde.fromBytes(bw.getBytes(), 0, bw.getLength()));
            }
        }

        return new MapBasedInputRow(timestamp, dimensions, event);
    } catch (IOException ex) {
        throw Throwables.propagate(ex);
    }
}

From source file:io.warp10.standalone.StandaloneChunkedMemoryStore.java

License:Apache License

public void dump(String path) throws IOException {

    long nano = System.nanoTime();
    int gts = 0;/* w  w  w . j  av  a  2 s  . c o  m*/
    long bytes = 0L;

    Configuration conf = new Configuration();

    conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
    conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());

    BytesWritable key = new BytesWritable();
    BytesWritable value = new BytesWritable();

    CompressionCodec Codec = new DefaultCodec();
    SequenceFile.Writer writer = null;
    SequenceFile.Writer.Option optPath = SequenceFile.Writer.file(new Path(path));
    SequenceFile.Writer.Option optKey = SequenceFile.Writer.keyClass(key.getClass());
    SequenceFile.Writer.Option optVal = SequenceFile.Writer.valueClass(value.getClass());
    SequenceFile.Writer.Option optCom = SequenceFile.Writer.compression(CompressionType.RECORD, Codec);

    writer = SequenceFile.createWriter(conf, optPath, optKey, optVal, optCom);

    TSerializer serializer = new TSerializer(new TCompactProtocol.Factory());

    try {
        for (Entry<BigInteger, InMemoryChunkSet> entry : this.series.entrySet()) {
            gts++;
            Metadata metadata = this.directoryClient.getMetadataById(entry.getKey());

            List<GTSDecoder> decoders = entry.getValue().getDecoders();

            //GTSEncoder encoder = entry.getValue().fetchEncoder(now, this.chunkcount * this.chunkspan);

            for (GTSDecoder decoder : decoders) {
                GTSWrapper wrapper = new GTSWrapper(metadata);

                wrapper.setBase(decoder.getBaseTimestamp());
                wrapper.setCount(decoder.getCount());

                byte[] data = serializer.serialize(wrapper);
                key.set(data, 0, data.length);

                ByteBuffer bb = decoder.getBuffer();

                ByteBuffer rwbb = ByteBuffer.allocate(bb.remaining());
                rwbb.put(bb);
                rwbb.rewind();
                value.set(rwbb.array(), rwbb.arrayOffset(), rwbb.remaining());

                bytes += key.getLength() + value.getLength();

                writer.append(key, value);
            }
        }
    } catch (IOException ioe) {
        ioe.printStackTrace();
        throw ioe;
    } catch (Exception e) {
        e.printStackTrace();
        throw new IOException(e);
    }

    writer.close();

    nano = System.nanoTime() - nano;

    System.out.println("Dumped " + gts + " GTS (" + bytes + " bytes) in " + (nano / 1000000.0D) + " ms.");
}

From source file:io.warp10.standalone.StandaloneChunkedMemoryStore.java

License:Apache License

private void load(String path) throws IOException {

    long nano = System.nanoTime();
    int gts = 0;//from w  ww  . j a va2 s .  co m
    long bytes = 0L;

    Configuration conf = new Configuration();

    conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
    conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());

    BytesWritable key = new BytesWritable();
    BytesWritable value = new BytesWritable();

    TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory());

    SequenceFile.Reader.Option optPath = SequenceFile.Reader.file(new Path(path));

    SequenceFile.Reader reader = null;

    boolean failsafe = "true".equals(
            properties.getProperty(io.warp10.continuum.Configuration.STANDALONE_MEMORY_STORE_LOAD_FAILSAFE));

    try {
        reader = new SequenceFile.Reader(conf, optPath);

        System.out.println("Loading '" + path + "' back in memory.");

        while (reader.next(key, value)) {
            gts++;
            GTSWrapper wrapper = new GTSWrapper();
            deserializer.deserialize(wrapper, key.copyBytes());
            GTSEncoder encoder = new GTSEncoder(0L, null, value.copyBytes());
            encoder.setCount(wrapper.getCount());

            bytes += value.getLength() + key.getLength();
            encoder.safeSetMetadata(wrapper.getMetadata());
            store(encoder);
            if (null != this.directoryClient) {
                this.directoryClient.register(wrapper.getMetadata());
            }
        }
    } catch (FileNotFoundException fnfe) {
        System.err.println("File '" + path + "' was not found, skipping.");
        return;
    } catch (IOException ioe) {
        if (!failsafe) {
            throw ioe;
        } else {
            System.err.println("Ignoring exception " + ioe.getMessage() + ".");
        }
    } catch (Exception e) {
        if (!failsafe) {
            throw new IOException(e);
        } else {
            System.err.println("Ignoring exception " + e.getMessage() + ".");
        }
    }

    reader.close();

    nano = System.nanoTime() - nano;

    System.out.println("Loaded " + gts + " GTS (" + bytes + " bytes) in " + (nano / 1000000.0D) + " ms.");
}

From source file:io.warp10.standalone.StandaloneMemoryStore.java

License:Apache License

public void dump(String path) throws IOException {

    long nano = System.nanoTime();
    int gts = 0;//from   w w w  .ja v  a 2s.  c o m
    long bytes = 0L;

    Configuration conf = new Configuration();

    conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
    conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());

    BytesWritable key = new BytesWritable();
    BytesWritable value = new BytesWritable();

    CompressionCodec Codec = new DefaultCodec();
    SequenceFile.Writer writer = null;
    SequenceFile.Writer.Option optPath = SequenceFile.Writer.file(new Path(path));
    SequenceFile.Writer.Option optKey = SequenceFile.Writer.keyClass(key.getClass());
    SequenceFile.Writer.Option optVal = SequenceFile.Writer.valueClass(value.getClass());
    SequenceFile.Writer.Option optCom = SequenceFile.Writer.compression(CompressionType.RECORD, Codec);

    writer = SequenceFile.createWriter(conf, optPath, optKey, optVal, optCom);

    TSerializer serializer = new TSerializer(new TCompactProtocol.Factory());

    try {
        for (Entry<BigInteger, GTSEncoder> entry : this.series.entrySet()) {
            gts++;
            Metadata metadata = this.directoryClient.getMetadataById(entry.getKey());

            GTSWrapper wrapper = new GTSWrapper(metadata);

            GTSEncoder encoder = entry.getValue();

            wrapper.setBase(encoder.getBaseTimestamp());
            wrapper.setCount(encoder.getCount());

            byte[] data = serializer.serialize(wrapper);
            key.set(data, 0, data.length);

            data = encoder.getBytes();
            value.set(data, 0, data.length);

            bytes += key.getLength() + value.getLength();

            writer.append(key, value);
        }
        /*      
              for (Entry<BigInteger,Metadata> entry: this.metadatas.entrySet()) {
                gts++;
                byte[] data = serializer.serialize(entry.getValue());
                key.set(data, 0, data.length);
                        
                GTSEncoder encoder = this.series.get(entry.getKey());
                data = encoder.getBytes();
                value.set(data, 0, data.length);
                
                bytes += key.getLength() + value.getLength();
                        
                writer.append(key, value);
              }
        */
    } catch (IOException ioe) {
        ioe.printStackTrace();
        throw ioe;
    } catch (Exception e) {
        e.printStackTrace();
        throw new IOException(e);
    }

    writer.close();

    nano = System.nanoTime() - nano;

    System.out.println("Dumped " + gts + " GTS (" + bytes + " bytes) in " + (nano / 1000000.0D) + " ms.");
}