Example usage for java.nio ByteBuffer arrayOffset

List of usage examples for java.nio ByteBuffer arrayOffset

Introduction

In this page you can find the example usage for java.nio ByteBuffer arrayOffset.

Prototype

public final int arrayOffset() 

Source Link

Document

Returns the offset of the byte array which this buffer is based on, if there is one.

Usage

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private int[] readIntegers(int chunkSize, ByteBuffer input) throws IOException {
    int[] vals = Snappy.uncompressIntArray(input.array(), input.arrayOffset() + input.position(), chunkSize);
    input.position(input.position() + chunkSize);
    return vals;/*w ww . j a  v a 2  s  .c o m*/
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private long[] readLongs(int chunkSize, ByteBuffer input) throws IOException {
    long[] vals = Snappy.uncompressLongArray(input.array(), input.arrayOffset() + input.position(), chunkSize);
    input.position(input.position() + chunkSize);
    return vals;//from   ww w.jav a2 s. c  o  m
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private short[] readShorts(int chunkSize, ByteBuffer input) throws IOException {
    short[] vals = Snappy.uncompressShortArray(input.array(), input.arrayOffset() + input.position(),
            chunkSize);/*from ww w  .  ja  v  a2s.c om*/
    input.position(input.position() + chunkSize);
    return vals;
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

/**
 * Read a chunk from a ByteBuffer and advance the buffer position.
 * @param chunkSize The number of bytes to decompress starting at the current position.
 * @param input The buffer to read from.
 * @return An array of primitives.//  ww w. j  ava  2  s  .co  m
 * @throws IOException
 */
private byte[] readBytes(int chunkSize, ByteBuffer input) throws IOException {
    byte[] vals = new byte[Snappy.uncompressedLength(input.array(), input.arrayOffset() + input.position(),
            chunkSize)];
    Snappy.uncompress(input.array(), input.arrayOffset() + input.position(), chunkSize, vals, 0);
    input.position(input.position() + chunkSize);
    return vals;
}

From source file:org.apache.hadoop.hbase.io.HalfStoreFileReader.java

@Override
public HFileScanner getScanner(final boolean cacheBlocks, final boolean pread, final boolean isCompaction) {
    final HFileScanner s = super.getScanner(cacheBlocks, pread, isCompaction);
    return new HFileScanner() {
        final HFileScanner delegate = s;
        public boolean atEnd = false;

        public ByteBuffer getKey() {
            if (atEnd)
                return null;
            return delegate.getKey();
        }/* ww  w  .  j ava  2  s  . c  om*/

        public String getKeyString() {
            if (atEnd)
                return null;

            return delegate.getKeyString();
        }

        public ByteBuffer getValue() {
            if (atEnd)
                return null;

            return delegate.getValue();
        }

        public String getValueString() {
            if (atEnd)
                return null;

            return delegate.getValueString();
        }

        public Cell getKeyValue() {
            if (atEnd)
                return null;

            return delegate.getKeyValue();
        }

        public boolean next() throws IOException {
            if (atEnd)
                return false;

            boolean b = delegate.next();
            if (!b) {
                return b;
            }
            // constrain the bottom.
            if (!top) {
                ByteBuffer bb = getKey();
                if (getComparator().compareFlatKey(bb.array(), bb.arrayOffset(), bb.limit(), splitkey, 0,
                        splitkey.length) >= 0) {
                    atEnd = true;
                    return false;
                }
            }
            return true;
        }

        @Override
        public boolean seekBefore(byte[] key) throws IOException {
            return seekBefore(key, 0, key.length);
        }

        @Override
        public boolean seekBefore(byte[] key, int offset, int length) throws IOException {
            return seekBefore(new KeyValue.KeyOnlyKeyValue(key, offset, length));
        }

        @Override
        public boolean seekTo() throws IOException {
            if (top) {
                int r = this.delegate.seekTo(new KeyValue.KeyOnlyKeyValue(splitkey, 0, splitkey.length));
                if (r == HConstants.INDEX_KEY_MAGIC) {
                    return true;
                }
                if (r < 0) {
                    // midkey is < first key in file
                    return this.delegate.seekTo();
                }
                if (r > 0) {
                    return this.delegate.next();
                }
                return true;
            }

            boolean b = delegate.seekTo();
            if (!b) {
                return b;
            }
            // Check key.
            ByteBuffer k = this.delegate.getKey();
            return this.delegate.getReader().getComparator().compareFlatKey(k.array(), k.arrayOffset(),
                    k.limit(), splitkey, 0, splitkey.length) < 0;
        }

        @Override
        public int seekTo(byte[] key) throws IOException {
            return seekTo(key, 0, key.length);
        }

        @Override
        public int seekTo(byte[] key, int offset, int length) throws IOException {
            return seekTo(new KeyValue.KeyOnlyKeyValue(key, offset, length));
        }

        @Override
        public int reseekTo(byte[] key) throws IOException {
            return reseekTo(key, 0, key.length);
        }

        @Override
        public int reseekTo(byte[] key, int offset, int length) throws IOException {
            //This function is identical to the corresponding seekTo function except
            //that we call reseekTo (and not seekTo) on the delegate.
            return reseekTo(new KeyValue.KeyOnlyKeyValue(key, offset, length));
        }

        public org.apache.hadoop.hbase.io.hfile.HFile.Reader getReader() {
            return this.delegate.getReader();
        }

        public boolean isSeeked() {
            return this.delegate.isSeeked();
        }

        @Override
        public int seekTo(Cell key) throws IOException {
            if (top) {
                if (getComparator().compareOnlyKeyPortion(key, splitCell) < 0) {
                    return -1;
                }
            } else {
                if (getComparator().compareOnlyKeyPortion(key, splitCell) >= 0) {
                    // we would place the scanner in the second half.
                    // it might be an error to return false here ever...
                    boolean res = delegate.seekBefore(splitCell);
                    if (!res) {
                        throw new IOException(
                                "Seeking for a key in bottom of file, but key exists in top of file, "
                                        + "failed on seekBefore(midkey)");
                    }
                    return 1;
                }
            }
            return delegate.seekTo(key);
        }

        @Override
        public int reseekTo(Cell key) throws IOException {
            // This function is identical to the corresponding seekTo function
            // except
            // that we call reseekTo (and not seekTo) on the delegate.
            if (top) {
                if (getComparator().compareOnlyKeyPortion(key, splitCell) < 0) {
                    return -1;
                }
            } else {
                if (getComparator().compareOnlyKeyPortion(key, splitCell) >= 0) {
                    // we would place the scanner in the second half.
                    // it might be an error to return false here ever...
                    boolean res = delegate.seekBefore(splitCell);
                    if (!res) {
                        throw new IOException("Seeking for a key in bottom of file, but"
                                + " key exists in top of file, failed on seekBefore(midkey)");
                    }
                    return 1;
                }
            }
            if (atEnd) {
                // skip the 'reseek' and just return 1.
                return 1;
            }
            return delegate.reseekTo(key);
        }

        @Override
        public boolean seekBefore(Cell key) throws IOException {
            if (top) {
                Cell fk = new KeyValue.KeyOnlyKeyValue(getFirstKey(), 0, getFirstKey().length);
                if (getComparator().compareOnlyKeyPortion(key, fk) <= 0) {
                    return false;
                }
            } else {
                // The equals sign isn't strictly necessary just here to be consistent
                // with seekTo
                if (getComparator().compareOnlyKeyPortion(key, splitCell) >= 0) {
                    return this.delegate.seekBefore(splitCell);
                }
            }
            return this.delegate.seekBefore(key);
        }
    };
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private double[] readDoubles(int chunkSize, ByteBuffer input) throws IOException {
    byte[] doubleBytes = new byte[chunkSize];
    System.arraycopy(input.array(), input.arrayOffset() + input.position(), doubleBytes, 0, chunkSize);
    double[] vals = Snappy.uncompressDoubleArray(doubleBytes);
    input.position(input.position() + chunkSize);
    return vals;/*from w  ww .  j  a  v a 2 s.  c o m*/
}

From source file:org.apache.nutch.store.readable.StoreReadable.java

public Parse getParse(String url, WebPage page) {
    HTMLMetaTags metaTags = new HTMLMetaTags();
    System.out.println("[STORE-READABLE]getParse-------------------------------------------------------------");
    String baseUrl = TableUtil.toString(page.getBaseUrl());
    URL base;//from  w w  w.  j a  va2  s.c  om
    try {
        base = new URL(baseUrl);
    } catch (MalformedURLException e) {
        return ParseStatusUtils.getEmptyParse(e, getConf());
    }

    String text = "";
    String title = "";
    Outlink[] outlinks = new Outlink[0];

    // parse the content
    DocumentFragment root;
    try {
        ByteBuffer contentInOctets = page.getContent();
        InputSource input = new InputSource(new ByteArrayInputStream(contentInOctets.array(),
                contentInOctets.arrayOffset() + contentInOctets.position(), contentInOctets.remaining()));

        EncodingDetector detector = new EncodingDetector(conf);
        detector.autoDetectClues(page, true);
        detector.addClue(sniffCharacterEncoding(contentInOctets), "sniffed");
        String encoding = detector.guessEncoding(page, defaultCharEncoding);

        page.getMetadata().put(new Utf8(Metadata.ORIGINAL_CHAR_ENCODING),
                ByteBuffer.wrap(Bytes.toBytes(encoding)));
        page.getMetadata().put(new Utf8(Metadata.CHAR_ENCODING_FOR_CONVERSION),
                ByteBuffer.wrap(Bytes.toBytes(encoding)));

        input.setEncoding(encoding);
        if (LOG.isTraceEnabled()) {
            LOG.trace("Parsing...");
        }
        root = parse(input);
    } catch (IOException e) {
        LOG.error("Failed with the following IOException: ", e);
        return ParseStatusUtils.getEmptyParse(e, getConf());
    } catch (DOMException e) {
        LOG.error("Failed with the following DOMException: ", e);
        return ParseStatusUtils.getEmptyParse(e, getConf());
    } catch (SAXException e) {
        LOG.error("Failed with the following SAXException: ", e);
        return ParseStatusUtils.getEmptyParse(e, getConf());
    } catch (Exception e) {
        LOG.error("Failed with the following Exception: ", e);
        return ParseStatusUtils.getEmptyParse(e, getConf());
    }

    // get meta directives
    HTMLMetaProcessor.getMetaTags(metaTags, root, base);
    if (LOG.isTraceEnabled()) {
        LOG.trace("Meta tags for " + base + ": " + metaTags.toString());
    }
    // check meta directives
    if (!metaTags.getNoIndex()) { // okay to index
        StringBuilder sb = new StringBuilder();
        if (LOG.isTraceEnabled()) {
            LOG.trace("Getting text...");
        }
        utils.getText(sb, root); // extract text
        text = sb.toString();
        sb.setLength(0);
        if (LOG.isTraceEnabled()) {
            LOG.trace("Getting title...");
        }
        utils.getTitle(sb, root); // extract title
        title = sb.toString().trim();
    }

    if (!metaTags.getNoFollow()) { // okay to follow links
        ArrayList<Outlink> l = new ArrayList<Outlink>(); // extract outlinks
        URL baseTag = utils.getBase(root);
        if (LOG.isTraceEnabled()) {
            LOG.trace("Getting links...");
        }
        utils.getOutlinks(baseTag != null ? baseTag : base, l, root);
        outlinks = l.toArray(new Outlink[l.size()]);
        if (LOG.isTraceEnabled()) {
            LOG.trace("found " + outlinks.length + " outlinks in " + url);
        }
    }

    ParseStatus status = ParseStatus.newBuilder().build();
    status.setMajorCode((int) ParseStatusCodes.SUCCESS);
    if (metaTags.getRefresh()) {
        status.setMinorCode((int) ParseStatusCodes.SUCCESS_REDIRECT);
        status.getArgs().add(new Utf8(metaTags.getRefreshHref().toString()));
        status.getArgs().add(new Utf8(Integer.toString(metaTags.getRefreshTime())));
    }

    String strJo = addJsonToPage(url, page);

    //        storeJsonToSchema(url, page ,strJo);
    page.setReadable(new Utf8(strJo));

    Parse parse = new Parse(text, title, outlinks, status, strJo);
    parse = htmlParseFilters.filter(url, page, parse, metaTags, root);

    if (metaTags.getNoCache()) { // not okay to cache
        page.getMetadata().put(new Utf8(Nutch.CACHING_FORBIDDEN_KEY),
                ByteBuffer.wrap(Bytes.toBytes(cachingPolicy)));
    }
    parse.setJsonRead(strJo);

    return parse;
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

/**
 * Write compressed data to the output ByteBuffer and update the position of the buffer.
 * @param primitives An array of primitive data types.
 * @param output The buffer to write into.
 * @return The number of bytes written.//from  w w w .j  a v  a  2s.c o  m
 * @throws IOException
 */
private int writePrimitives(byte[] primitives, ByteBuffer output) throws IOException {
    int bytesWritten = Snappy.rawCompress(primitives, 0, primitives.length, output.array(),
            output.arrayOffset() + output.position());
    output.position(output.position() + bytesWritten);
    return bytesWritten;
}

From source file:cn.iie.haiep.hbase.value.Bytes.java

/**
 * Converts the given byte buffer, from its array offset to its limit, to
 * a string. The position and the mark are ignored.
 *
 * @param buf a byte buffer/*w w  w. ja va2 s .  c  o m*/
 * @return a string representation of the buffer's binary contents
 */
public static String toStringBinary(ByteBuffer buf) {
    if (buf == null)
        return "null";
    return toStringBinary(buf.array(), buf.arrayOffset(), buf.limit());
}

From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java

private int writePrimitives(long[] primitives, ByteBuffer output) throws IOException {
    int bytesWritten = Snappy.rawCompress(primitives, 0, primitives.length * Long.SIZE / Byte.SIZE,
            output.array(), output.arrayOffset() + output.position());
    output.position(output.position() + bytesWritten);
    return bytesWritten;
}