Example usage for java.nio ByteBuffer clear

List of usage examples for java.nio ByteBuffer clear

Introduction

In this page you can find the example usage for java.nio ByteBuffer clear.

Prototype

public final Buffer clear() 

Source Link

Document

Clears this buffer.

Usage

From source file:org.apache.kylin.engine.mr.common.CubeStatsWriter.java

public static void writeCuboidStatistics(Configuration conf, Path outputPath, //
        Map<Long, HLLCounter> cuboidHLLMap, int samplingPercentage, int mapperNumber, double mapperOverlapRatio)
        throws IOException {
    Path seqFilePath = new Path(outputPath, BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME);

    List<Long> allCuboids = new ArrayList<Long>();
    allCuboids.addAll(cuboidHLLMap.keySet());
    Collections.sort(allCuboids);

    ByteBuffer valueBuf = ByteBuffer.allocate(BufferedMeasureCodec.DEFAULT_BUFFER_SIZE);
    SequenceFile.Writer writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(seqFilePath),
            SequenceFile.Writer.keyClass(LongWritable.class),
            SequenceFile.Writer.valueClass(BytesWritable.class));
    try {/*w  w  w.  j  a  v  a 2s .com*/
        // mapper overlap ratio at key -1
        writer.append(new LongWritable(-1), new BytesWritable(Bytes.toBytes(mapperOverlapRatio)));

        // mapper number at key -2
        writer.append(new LongWritable(-2), new BytesWritable(Bytes.toBytes(mapperNumber)));

        // sampling percentage at key 0
        writer.append(new LongWritable(0L), new BytesWritable(Bytes.toBytes(samplingPercentage)));

        for (long i : allCuboids) {
            valueBuf.clear();
            cuboidHLLMap.get(i).writeRegisters(valueBuf);
            valueBuf.flip();
            writer.append(new LongWritable(i), new BytesWritable(valueBuf.array(), valueBuf.limit()));
        }
    } finally {
        IOUtils.closeQuietly(writer);
    }
}

From source file:hivemall.fm.FactorizationMachineUDTF.java

private static void writeBuffer(@Nonnull ByteBuffer srcBuf, @Nonnull NioStatefullSegment dst)
        throws HiveException {
    srcBuf.flip();/*  ww  w  . j  a  v  a  2 s .  co m*/
    try {
        dst.write(srcBuf);
    } catch (IOException e) {
        throw new HiveException("Exception causes while writing a buffer to file", e);
    }
    srcBuf.clear();
}

From source file:hivemall.mf.OnlineMatrixFactorizationUDTF.java

protected static void writeBuffer(@Nonnull final ByteBuffer srcBuf, @Nonnull final NioFixedSegment dst,
        final long lastWritePos) throws HiveException {
    // TODO asynchronous write in the background
    srcBuf.flip();//from  w w  w.ja va2  s .co m
    try {
        dst.writeRecords(lastWritePos, srcBuf);
    } catch (IOException e) {
        throw new HiveException("Exception causes while writing records to : " + lastWritePos, e);
    }
    srcBuf.clear();
}

From source file:org.jtrfp.trcl.core.Texture.java

public static ByteBuffer fragmentRGBA(ByteBuffer input, int quadDepth, int x, int y) {
    final int originalSideLen = (int) Math.sqrt(input.capacity() / 4);
    final int splitAmount = (int) Math.pow(2, quadDepth);
    final int newSideLen = originalSideLen / splitAmount;
    ByteBuffer result = ByteBuffer.allocateDirect((int) (Math.pow(newSideLen, 2) * 4));
    for (int row = y * newSideLen; row < (y + 1) * newSideLen; row++) {
        input.clear();
        input.limit((x + 1) * newSideLen * 4 + row * originalSideLen * 4);
        input.position(x * newSideLen * 4 + row * originalSideLen * 4);
        result.put(input);//  w  w  w .  j a  v  a  2  s. c  om
    }
    return result;
}

From source file:org.jtrfp.trcl.core.Texture.java

public static ByteBuffer indexed2RGBA8888(ByteBuffer indexedPixels, Color[] palette) {
    Color color;/*from  www  .jav  a  2s  .c  om*/
    ByteBuffer buf = ByteBuffer.allocateDirect(indexedPixels.capacity() * 4);
    final int cap = indexedPixels.capacity();
    for (int i = 0; i < cap; i++) {
        color = palette[(indexedPixels.get() & 0xFF)];
        buf.put((byte) color.getRed());
        buf.put((byte) color.getGreen());
        buf.put((byte) color.getBlue());
        buf.put((byte) color.getAlpha());
    } // end for(i)
    buf.clear();// Rewind
    return buf;
}

From source file:hivemall.GeneralLearnerBaseUDTF.java

private static void writeBuffer(@Nonnull ByteBuffer srcBuf, @Nonnull NioStatefulSegment dst)
        throws HiveException {
    srcBuf.flip();//from   w w  w  .  ja  va2 s  .c om
    try {
        dst.write(srcBuf);
    } catch (IOException e) {
        throw new HiveException("Exception causes while writing a buffer to file", e);
    }
    srcBuf.clear();
}

From source file:com.sunchenbin.store.feilong.core.io.IOReaderUtil.java

/**
 * ?.//from  w  w w  .ja  va2  s .c o  m
 *
 * @param file
 *            
 * @param charsetName
 *            ?,isNullOrEmpty, {@link CharsetType#UTF8}
 * @return the file content
 * @see org.apache.commons.io.FileUtils#readFileToString(File, Charset)
 */
public static String getFileContent(File file, String charsetName) {
    if (Validator.isNullOrEmpty(file)) {
        throw new NullPointerException("the file is null or empty!");
    }
    // ?
    final int capacity = 186140;
    ByteBuffer byteBuffer = ByteBuffer.allocateDirect(capacity);
    StringBuilder sb = new StringBuilder(capacity);

    FileInputStream fileInputStream = null;
    try {
        fileInputStream = new FileInputStream(file);

        // ?????.
        FileChannel fileChannel = fileInputStream.getChannel();
        String useCharsetName = Validator.isNullOrEmpty(charsetName) ? DEFAULT_CHARSET_NAME : charsetName;
        Charset charset = Charset.forName(useCharsetName);
        while (fileChannel.read(byteBuffer) != -1) {
            // ??
            byteBuffer.flip();
            CharBuffer charBuffer = charset.decode(byteBuffer);
            sb.append(charBuffer.toString());
            byteBuffer.clear();
        }
        return sb.toString();

    } catch (FileNotFoundException e) {
        throw new UncheckedIOException(e);
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    } finally {
        // ? ,^_^
        IOUtils.closeQuietly(fileInputStream);
    }
}

From source file:Main.java

/**
 * Decodes the specified URL as per RFC 3986, i.e. transforms
 * percent-encoded octets to characters by decoding with the UTF-8 character
 * set. This function is primarily intended for usage with
 * {@link URL} which unfortunately does not enforce proper URLs. As
 * such, this method will leniently accept invalid characters or malformed
 * percent-encoded octets and simply pass them literally through to the
 * result string. Except for rare edge cases, this will make unencoded URLs
 * pass through unaltered./* ww w  . jav a2  s  . c  o  m*/
 *
 * @param url The URL to decode, may be {@code null}.
 * @return The decoded URL or {@code null} if the input was
 * {@code null}.
 */
static String decodeUrl(String url) {
    String decoded = url;
    if (url != null && url.indexOf('%') >= 0) {
        int n = url.length();
        StringBuffer buffer = new StringBuffer();
        ByteBuffer bytes = ByteBuffer.allocate(n);
        for (int i = 0; i < n;) {
            if (url.charAt(i) == '%') {
                try {
                    do {
                        byte octet = (byte) Integer.parseInt(url.substring(i + 1, i + 3), 16);
                        bytes.put(octet);
                        i += 3;
                    } while (i < n && url.charAt(i) == '%');
                    continue;
                } catch (RuntimeException e) {
                    // malformed percent-encoded octet, fall through and
                    // append characters literally
                } finally {
                    if (bytes.position() > 0) {
                        bytes.flip();
                        buffer.append(UTF8.decode(bytes).toString());
                        bytes.clear();
                    }
                }
            }
            buffer.append(url.charAt(i++));
        }
        decoded = buffer.toString();
    }
    return decoded;
}

From source file:hivemall.recommend.SlimUDTF.java

private static void writeBuffer(@Nonnull final ByteBuffer srcBuf, @Nonnull final NioStatefulSegment dst)
        throws HiveException {
    srcBuf.flip();// ww w  .j  av  a2s.  c  o m
    try {
        dst.write(srcBuf);
    } catch (IOException e) {
        throw new HiveException("Exception causes while writing a buffer to file", e);
    }
    srcBuf.clear();
}

From source file:Main.java

/**
 * Discards data from the buffer up to the first SPS, where {@code data.position()} is interpreted
 * as the length of the buffer.//  w ww  .j a v  a 2s. co  m
 * <p>
 * When the method returns, {@code data.position()} will contain the new length of the buffer. If
 * the buffer is not empty it is guaranteed to start with an SPS.
 *
 * @param data Buffer containing start code delimited NAL units.
 */
public static void discardToSps(ByteBuffer data) {
    int length = data.position();
    int consecutiveZeros = 0;
    int offset = 0;
    while (offset + 1 < length) {
        int value = data.get(offset) & 0xFF;
        if (consecutiveZeros == 3) {
            if (value == 1 && (data.get(offset + 1) & 0x1F) == NAL_UNIT_TYPE_SPS) {
                // Copy from this NAL unit onwards to the start of the buffer.
                ByteBuffer offsetData = data.duplicate();
                offsetData.position(offset - 3);
                offsetData.limit(length);
                data.position(0);
                data.put(offsetData);
                return;
            }
        } else if (value == 0) {
            consecutiveZeros++;
        }
        if (value != 0) {
            consecutiveZeros = 0;
        }
        offset++;
    }
    // Empty the buffer if the SPS NAL unit was not found.
    data.clear();
}