Example usage for java.nio ByteBuffer limit

List of usage examples for java.nio ByteBuffer limit

Introduction

In this page you can find the example usage for java.nio ByteBuffer limit.

Prototype

public final int limit() 

Source Link

Document

Returns the limit of this buffer.

Usage

From source file:org.apache.eagle.alert.metric.MetricSystemTest.java

private SimpleConsumer assertMsgFromKafka(KafkaEmbedded kafkaEmbedded) throws IOException {
    SimpleConsumer consumer = new SimpleConsumer("localhost", kafkaEmbedded.getPort(), 100000, 64 * 1024,
            clientName);/*from   w ww  .jav a  2s  .  c  o  m*/
    long readOffset = getLastOffset(consumer, TOPIC, 0, kafka.api.OffsetRequest.EarliestTime(), clientName);
    FetchRequest req = new FetchRequestBuilder().clientId(clientName).addFetch(TOPIC, 0, readOffset, 100000)
            .build();
    FetchResponse fetchResponse = consumer.fetch(req);
    Map<Integer, Map<String, String>> resultCollector = new HashMap<>();
    int count = 1;
    for (MessageAndOffset messageAndOffset : fetchResponse.messageSet(TOPIC, 0)) {
        long currentOffset = messageAndOffset.offset();
        if (currentOffset < readOffset) {
            System.out.println("found an old offset: " + currentOffset + " expecting: " + readOffset);
            continue;
        }

        readOffset = messageAndOffset.nextOffset();
        ByteBuffer payload = messageAndOffset.message().payload();

        byte[] bytes = new byte[payload.limit()];
        payload.get(bytes);
        String message = new String(bytes, "UTF-8");
        Map<String, String> covertedMsg = JsonUtils.mapper.readValue(message, Map.class);
        covertedMsg.remove("timestamp");
        resultCollector.put(count, covertedMsg);
        count++;
    }
    Assert.assertEquals(
            "{1={name=heap.committed, value=175636480}, 2={name=heap.init, value=262144000}, 3={name=heap.max, value=3704094720}, 4={name=heap.usage, value=0.01570181876990446}, 5={name=heap.used, value=58491576}, 6={name=name, value=testname}, 7={name=non-heap.committed, value=36405248}, 8={name=non-heap.init, value=2555904}, 9={name=non-heap.max, value=-1}, 10={name=non-heap.usage, value=-3.5588712E7}, 11={name=non-heap.used, value=35596496}, 12={name=pools.Code-Cache.usage, value=0.020214080810546875}, 13={name=pools.Compressed-Class-Space.usage, value=0.0035556256771087646}, 14={name=pools.Metaspace.usage, value=0.9777212526244751}, 15={name=pools.PS-Eden-Space.usage, value=0.03902325058129612}, 16={name=pools.PS-Old-Gen.usage, value=0.001959359247654333}, 17={name=pools.PS-Survivor-Space.usage, value=0.0}, 18={name=total.committed, value=212107264}, 19={name=total.init, value=264699904}, 20={name=total.max, value=3704094719}, 21={name=total.used, value=94644240}, 22={name=uptime, value=testuptime}, 23={name=vendor, value=testvendor}}",
            resultCollector.toString());
    return consumer;
}

From source file:com.jonbanjo.cups.operations.HttpPoster.java

static OperationResult sendRequest(URL url, ByteBuffer ippBuf, InputStream documentStream, final AuthInfo auth)
        throws IOException {

    final OperationResult opResult = new OperationResult();

    if (ippBuf == null) {
        return null;
    }//from   w  w  w . ja v  a 2  s  .  c  om

    if (url == null) {
        return null;
    }

    DefaultHttpClient client = new DefaultHttpClient();

    // will not work with older versions of CUPS!
    client.getParams().setParameter("http.protocol.version", HttpVersion.HTTP_1_1);
    client.getParams().setParameter("http.socket.timeout", SOCKET_TIMEOUT);
    client.getParams().setParameter("http.connection.timeout", CONNECTION_TIMEOUT);
    client.getParams().setParameter("http.protocol.content-charset", "UTF-8");
    client.getParams().setParameter("http.method.response.buffer.warnlimit", new Integer(8092));
    // probabaly not working with older CUPS versions
    client.getParams().setParameter("http.protocol.expect-continue", Boolean.valueOf(true));

    HttpPost httpPost;

    try {
        httpPost = new HttpPost(url.toURI());
    } catch (Exception e) {
        System.out.println(e.toString());
        return null;
    }

    httpPost.getParams().setParameter("http.socket.timeout", SOCKET_TIMEOUT);

    byte[] bytes = new byte[ippBuf.limit()];
    ippBuf.get(bytes);

    ByteArrayInputStream headerStream = new ByteArrayInputStream(bytes);
    // If we need to send a document, concatenate InputStreams
    InputStream inputStream = headerStream;
    if (documentStream != null) {
        inputStream = new SequenceInputStream(headerStream, documentStream);
    }

    // set length to -1 to advice the entity to read until EOF
    InputStreamEntity requestEntity = new InputStreamEntity(inputStream, -1);

    requestEntity.setContentType(IPP_MIME_TYPE);
    httpPost.setEntity(requestEntity);

    if (auth.reason == AuthInfo.AUTH_REQUESTED) {
        AuthHeader.makeAuthHeader(httpPost, auth);
        if (auth.reason == AuthInfo.AUTH_OK) {
            httpPost.addHeader(auth.getAuthHeader());
            //httpPost.addHeader("Authorization", "Basic am9uOmpvbmJveQ==");
        }
    }

    ResponseHandler<byte[]> handler = new ResponseHandler<byte[]>() {
        @Override
        public byte[] handleResponse(HttpResponse response) throws ClientProtocolException, IOException {
            if (response.getStatusLine().getStatusCode() == 401) {
                auth.setHttpHeader(response.getFirstHeader("WWW-Authenticate"));
            } else {
                auth.reason = AuthInfo.AUTH_OK;
            }
            HttpEntity entity = response.getEntity();
            opResult.setHttResult(response.getStatusLine().toString());
            if (entity != null) {
                return EntityUtils.toByteArray(entity);
            } else {
                return null;
            }
        }
    };

    if (url.getProtocol().equals("https")) {

        Scheme scheme = JfSSLScheme.getScheme();
        if (scheme == null)
            return null;
        client.getConnectionManager().getSchemeRegistry().register(scheme);
    }

    byte[] result = client.execute(httpPost, handler);
    //String test = new String(result);
    IppResponse ippResponse = new IppResponse();

    opResult.setIppResult(ippResponse.getResponse(ByteBuffer.wrap(result)));
    opResult.setAuthInfo(auth);
    client.getConnectionManager().shutdown();
    return opResult;
}

From source file:nextflow.fs.dx.DxUploadOutputStream.java

private void dequeueAndSubmit() {
    log.trace("Entering received loop");

    while (!closed || queue.size() > 0) {
        ByteBuffer buffer;
        try {//from ww w. j  a  va 2s .  c om
            buffer = queue.poll(1, TimeUnit.SECONDS);
            log.trace("File: {} > Received a buffer -- limit: ", fileId, buffer.limit());
            executor.submit(consumeBuffer0(buffer, ++chunkCount));
        } catch (InterruptedException e) {
            log.trace("File: {} > Got an interrupted exception while waiting new chunk to upload -- cause: {}",
                    fileId, e.getMessage());
        }
    }

    log.trace("Exiting received loop");
}

From source file:net.kungfoo.grizzly.proxy.impl.ConnectingHandler.java

public void outputReady(final NHttpClientConnection conn, final ContentEncoder encoder) {
    System.out.println(conn + " [proxy->origin] output ready");

    HttpContext context = conn.getContext();
    ProxyProcessingInfo proxyTask = (ProxyProcessingInfo) context.getAttribute(ProxyProcessingInfo.ATTRIB);

    synchronized (proxyTask) {
        ConnState connState = proxyTask.getOriginState();
        if (connState != ConnState.REQUEST_SENT && connState != ConnState.REQUEST_BODY_STREAM) {
            throw new IllegalStateException("Illegal target connection state: " + connState);
        }/*from  w ww  . j a v a2 s  .c o  m*/

        try {

            // TODO: propper handling of POST
            ByteBuffer src = proxyTask.getInBuffer();
            final int srcSize = src.limit();
            if (src.position() != 0) {
                System.out.println(conn + " [proxy->origin] buff not consumed yet");
                return;
            }
            ByteChunk chunk = new ByteChunk(srcSize);
            Request originalRequest = proxyTask.getOriginalRequest();
            int read;
            int encRead = 0;
            long bytesWritten = 0;
            while ((read = originalRequest.doRead(chunk)) != -1) {
                System.out.println(conn + " [proxy->origin] " + read + " bytes read");
                if (read > srcSize) {
                    src = ByteBuffer.wrap(chunk.getBytes(), chunk.getOffset(), read);
                } else {
                    src.put(chunk.getBytes(), chunk.getOffset(), read);
                }
                src.flip();
                encRead = encoder.write(src);
                bytesWritten += encRead;
                src.compact();
                chunk.reset();
                if (encRead == 0) {
                    System.out.println(conn + " [proxy->origin] encoder refused to consume more");
                    break;
                } else {
                    System.out.println(conn + " [proxy->origin] " + encRead + " consumed by encoder");
                }
            }
            System.out.println(conn + " [proxy->origin] " + bytesWritten + " bytes written");
            System.out.println(conn + " [proxy->origin] " + encoder);
            src.compact();

            if (src.position() == 0 && encRead != 0) {
                encoder.complete();
            }
            // Update connection state
            if (encoder.isCompleted()) {
                System.out.println(conn + " [proxy->origin] request body sent");
                proxyTask.setOriginState(ConnState.REQUEST_BODY_DONE);
            } else {
                proxyTask.setOriginState(ConnState.REQUEST_BODY_STREAM);
            }

        } catch (IOException ex) {
            shutdownConnection(conn);
        }
    }
}

From source file:org.apache.hadoop.hbase.io.hfile.TestHFileSeek.java

public void seekTFile() throws IOException {
    int miss = 0;
    long totalBytes = 0;
    FSDataInputStream fsdis = fs.open(path);
    Reader reader = HFile.createReaderFromStream(path, fsdis, fs.getFileStatus(path).getLen(),
            new CacheConfig(conf), conf);
    reader.loadFileInfo();//from ww  w  . j av  a  2s .c om
    KeySampler kSampler = new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(), keyLenGen);
    HFileScanner scanner = reader.getScanner(false, USE_PREAD);
    BytesWritable key = new BytesWritable();
    timer.reset();
    timer.start();
    for (int i = 0; i < options.seekCount; ++i) {
        kSampler.next(key);
        byte[] k = new byte[key.getLength()];
        System.arraycopy(key.getBytes(), 0, k, 0, key.getLength());
        if (scanner.seekTo(KeyValue.createKeyValueFromKey(k)) >= 0) {
            ByteBuffer bbkey = scanner.getKey();
            ByteBuffer bbval = scanner.getValue();
            totalBytes += bbkey.limit();
            totalBytes += bbval.limit();
        } else {
            ++miss;
        }
    }
    timer.stop();
    System.out.printf("time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n", timer.toString(),
            NanoTimer.nanoTimeToString(timer.read() / options.seekCount), options.seekCount - miss, miss,
            (double) totalBytes / 1024 / (options.seekCount - miss));

}

From source file:org.dbmfs.custom.ApiFilesystem.java

public int read(String path, Object fh, ByteBuffer buf, long offset) throws FuseException {
    log.info("read:" + path + " offset:" + offset + " buf.limit:" + buf.limit());
    if (fh == null)
        return Errno.EBADE;
    try {/*from  w  w  w .j  ava  2 s . com*/
        // ????offset limit???
        path = DbmfsUtil.convertRealPath(path.trim());

        int readLen = dbmfsCore.readValue(path, offset, buf.limit(), buf);
    } catch (FuseException fe) {
        throw fe;
    } catch (Exception e) {
        new FuseException(e);
    }
    return 0;
}

From source file:org.alfresco.patch.PatchServiceImpl.java

@SuppressWarnings("resource")
@Override//ww  w  . j ava 2s .c  o  m
public PatchDocument getPatch(MultiPart resource) throws IOException {
    Integer blockSize = null;
    Integer matchCount = null;

    List<Integer> matchedBlocks = null;
    List<Patch> patches = new LinkedList<>();

    // This will iterate the individual parts of the multipart response
    for (BodyPart bodyPart : resource.getBodyParts()) {
        if (bodyPart instanceof FormDataMultiPart) {
            System.out.printf("Multipart Body Part [Mime Type: %s]\n", bodyPart.getMediaType());

            InputStream is = null;
            Integer size = null;
            Integer lastMatchIndex = null;

            FormDataMultiPart mp = (FormDataMultiPart) bodyPart;
            for (BodyPart bodyPart1 : mp.getBodyParts()) {
                ContentDisposition contentDisposition = bodyPart1.getContentDisposition();
                if (contentDisposition instanceof FormDataContentDisposition) {
                    FormDataContentDisposition cd = (FormDataContentDisposition) contentDisposition;
                    String name = cd.getName();

                    if (name.equals("p_size")) {
                        size = Integer.parseInt((String) bodyPart1.getEntity());
                    } else if (name.equals("p_last_match_idx")) {
                        lastMatchIndex = Integer.parseInt((String) bodyPart1.getEntity());
                    } else if (name.equals("p_stream")) {
                        is = (InputStream) bodyPart1.getEntity();
                    }
                }
            }

            ByteBuffer bb = ByteBuffer.allocate(1024 * 20); // TODO
            ReadableByteChannel channel = Channels.newChannel(is);
            channel.read(bb);
            bb.flip();
            byte[] buffer = new byte[bb.limit()];
            bb.get(buffer);
            Patch patch = new Patch(lastMatchIndex, size, buffer);
            patches.add(patch);
        } else {
            System.out.printf("Embedded Body Part [Mime Type: %s, Length: %s]\n", bodyPart.getMediaType(),
                    bodyPart.getContentDisposition().getSize());

            ContentDisposition contentDisposition = bodyPart.getContentDisposition();
            if (contentDisposition instanceof FormDataContentDisposition) {
                FormDataContentDisposition cd = (FormDataContentDisposition) contentDisposition;
                String name = cd.getName();

                if (name.equals("p_block_size")) {
                    blockSize = Integer.parseInt((String) bodyPart.getEntity());
                } else if (name.equals("p_match_count")) {
                    matchCount = Integer.parseInt((String) bodyPart.getEntity());
                } else if (name.equals("p_matched_blocks")) {
                    String matchedBlocksStr = (String) bodyPart.getEntity();
                    List<String> l = Arrays.asList(matchedBlocksStr.split(","));
                    matchedBlocks = l.stream().filter(s -> s != null && !s.equals(""))
                            .map(s -> Integer.parseInt(s)).collect(Collectors.toList());
                }
            }
        }
    }

    PatchDocument patchDocument = new PatchDocument(blockSize, matchedBlocks, patches);
    return patchDocument;
}

From source file:org.red5.stream.http.servlet.TransportSegmentFeeder.java

/**
 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
 *      response)//from  www.j ava2 s  . c om
 */
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    log.debug("Segment feed requested");
    // get red5 context and segmenter
    if (service == null) {
        ApplicationContext appCtx = (ApplicationContext) getServletContext()
                .getAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE);
        service = (SegmenterService) appCtx.getBean("segmenter.service");
    }
    // get the requested stream / segment
    String servletPath = request.getServletPath();
    String streamName = servletPath.split("\\.")[0];
    log.debug("Stream name: {}", streamName);
    if (service.isAvailable(streamName)) {
        response.setContentType("video/MP2T");
        // data segment
        Segment segment = null;
        // setup buffers and output stream
        byte[] buf = new byte[188];
        ByteBuffer buffer = ByteBuffer.allocate(188);
        ServletOutputStream sos = response.getOutputStream();
        // loop segments
        while ((segment = service.getSegment(streamName)) != null) {
            do {
                buffer = segment.read(buffer);
                // log.trace("Limit - position: {}", (buffer.limit() - buffer.position()));
                if ((buffer.limit() - buffer.position()) == 188) {
                    buffer.get(buf);
                    // write down the output stream
                    sos.write(buf);
                } else {
                    log.info("Segment result has indicated a problem");
                    // verifies the currently requested stream segment
                    // number against the currently active segment
                    if (service.getSegment(streamName) == null) {
                        log.debug("Requested segment is no longer available");
                        break;
                    }
                }
                buffer.clear();
            } while (segment.hasMoreData());
            log.trace("Segment {} had no more data", segment.getIndex());
            // flush
            sos.flush();
            // segment had no more data
            segment.cleanupThreadLocal();
        }
        buffer.clear();
        buffer = null;
    } else {
        // let requester know that stream segment is not available
        response.sendError(404, "Requested segmented stream not found");
    }
}

From source file:org.apache.hadoop.hive.serde2.lazy.fast.LazySimpleSerializeWrite.java

@Override
public void writeDouble(double v) throws IOException {
    beginPrimitive();/*from w w w.jav  a  2  s.c  o  m*/
    ByteBuffer b = Text.encode(String.valueOf(v));
    output.write(b.array(), 0, b.limit());
    finishPrimitive();
}

From source file:org.apache.hadoop.hive.serde2.lazy.fast.LazySimpleSerializeWrite.java

@Override
public void writeFloat(float vf) throws IOException {
    beginPrimitive();/*  w w  w .  j a  va 2 s.c  o  m*/
    ByteBuffer b = Text.encode(String.valueOf(vf));
    output.write(b.array(), 0, b.limit());
    finishPrimitive();
}