Example usage for com.google.common.io ByteStreams read

List of usage examples for com.google.common.io ByteStreams read

Introduction

In this page you can find the example usage for com.google.common.io ByteStreams read.

Prototype

public static int read(InputStream in, byte[] b, int off, int len) throws IOException 

Source Link

Document

Reads some bytes from an input stream and stores them into the buffer array b .

Usage

From source file:com.spotify.docker.client.LogReader.java

public LogMessage nextMessage() throws IOException {

    // Read header
    final byte[] headerBytes = new byte[HEADER_SIZE];
    final int n = ByteStreams.read(stream, headerBytes, 0, HEADER_SIZE);
    if (n == 0) {
        return null;
    }//www.  j a v  a 2s .  c o m
    if (n != HEADER_SIZE) {
        throw new EOFException();
    }
    final ByteBuffer header = ByteBuffer.wrap(headerBytes);
    final int streamId = header.get();
    header.position(FRAME_SIZE_OFFSET);
    final int frameSize = header.getInt();

    // Read frame
    final byte[] frame = new byte[frameSize];
    ByteStreams.readFully(stream, frame);
    return new LogMessage(streamId, ByteBuffer.wrap(frame));
}

From source file:org.eclipse.che.plugin.docker.client.LogMessagePumper.java

@Override
void start() throws IOException {
    final byte[] buf = new byte[MAX_LINE_LENGTH];
    StringBuilder lineBuf = null;
    boolean endOfLine = false;
    LogMessage.Type logMessageType = LogMessage.Type.DOCKER;
    for (;;) {/* w ww. ja  v a  2 s. c o m*/
        int r = ByteStreams.read(source, buf, 0, STREAM_HEADER_LENGTH);
        if (r != 8) {
            if (r != -1) {
                LOG.debug(
                        "Invalid stream, can't read header. Header of each frame must contain 8 bytes but got {}",
                        r);
            }
            if (lineBuf != null && lineBuf.length() > 0) {
                target.process(new LogMessage(logMessageType, lineBuf.toString()));
                lineBuf.setLength(0);
            }
            break;
        }
        logMessageType = getLogMessageType(buf);
        int remaining = getPayloadLength(buf);
        while (remaining > 0) {
            r = source.read(buf, 0, Math.min(remaining, buf.length));
            int offset = 0;
            int lineLength = lineBuf != null ? lineBuf.length() : 0;
            for (int i = 0; i < r; i++, lineLength++) {
                endOfLine = false;
                if (buf[i] == '\n' || buf[i] == '\r' || lineLength > MAX_LINE_LENGTH) {
                    int length = i - offset;
                    boolean isLineFeedFollowed = false;
                    if (buf[i] == '\r') {
                        int nextIndex = i + 1;
                        isLineFeedFollowed = nextIndex < MAX_LINE_LENGTH && nextIndex < r
                                && buf[nextIndex] == '\n';
                        if (!isLineFeedFollowed) {
                            length += 1; // include <CR> char in log message
                        }
                    }
                    if (lineBuf != null && lineBuf.length() > 0) {
                        lineBuf.append(new String(buf, offset, length));
                        target.process(new LogMessage(logMessageType, lineBuf.toString()));
                        lineBuf.setLength(0);
                    } else {
                        target.process(new LogMessage(logMessageType, new String(buf, offset, length)));
                    }

                    if (isLineFeedFollowed) {
                        i++;
                    }
                    offset = i + 1;
                    lineLength = 0;
                    endOfLine = true;
                }
            }
            if (!endOfLine) {
                if (lineBuf == null) {
                    lineBuf = new StringBuilder(MAX_LINE_LENGTH);
                }
                lineBuf.append(new String(buf, offset, r - offset));
            }
            remaining -= r;
        }
    }
}

From source file:org.eclipse.packagedrone.repo.adapter.npm.aspect.NpmExtractor.java

private void perform(final Path file, final Map<String, String> metadata) throws IOException {
    try (final GZIPInputStream gis = new GZIPInputStream(new FileInputStream(file.toFile()));
            final TarArchiveInputStream tis = new TarArchiveInputStream(gis)) {
        TarArchiveEntry entry;//from  w w  w  . ja v a  2  s  .  c  o m
        while ((entry = tis.getNextTarEntry()) != null) {
            if (entry.getName().equals("package/package.json")) {
                final byte[] data = new byte[(int) entry.getSize()];
                ByteStreams.read(tis, data, 0, data.length);

                final String str = StandardCharsets.UTF_8.decode(ByteBuffer.wrap(data)).toString();

                try {
                    // test parse
                    new JsonParser().parse(str);
                    // store
                    metadata.put("package.json", str);
                } catch (final JsonParseException e) {
                    // ignore
                }

                break; // stop parsing the archive
            }
        }

    }
}

From source file:org.jenkinsci.plugins.dockerbuildstep.log.container.DockerLogStreamReader.java

public DockerLogMessage nextMessage() throws IOException {
    // Read header
    //TODO header is sent only when TTY is disabled, when enabled, it sends raw stream without any headers
    final byte[] headerBytes = new byte[HEADER_SIZE];
    final int n = ByteStreams.read(stream, headerBytes, 0, HEADER_SIZE);
    if (n == 0) {
        return null;
    }/*from w ww  .  ja v a  2 s  . c  o m*/
    if (n != HEADER_SIZE) {
        throw new EOFException();
    }
    final ByteBuffer header = ByteBuffer.wrap(headerBytes);
    final int streamId = header.get();
    header.position(FRAME_SIZE_OFFSET);
    final int frameSize = header.getInt();
    // Read frame
    final byte[] frame = new byte[frameSize];
    ByteStreams.readFully(stream, frame);
    return new DockerLogMessage(streamId, ByteBuffer.wrap(frame));
}

From source file:com.streamsets.pipeline.lib.http.NopHttpRequestFragmenter.java

List<byte[]> fragmentInternal(InputStream is, int fragmentSizeB, int maxSizeB) throws IOException {
    if (fragmentSizeB != maxSizeB) {
        throw new IOException(
                Utils.format("Invalid configuration, fragmentSize '{}' and maxSize '{}' should be the same",
                        fragmentSizeB, maxSizeB));
    }//  www.  j  a v a2  s .co  m
    byte[] buffer = new byte[fragmentSizeB];
    int read = ByteStreams.read(is, buffer, 0, fragmentSizeB);
    if (is.read() > -1) {
        throw new IOException(Utils.format("Maximum data size '{}' exceeded", maxSizeB));
    }
    byte[] data = buffer;
    if (read < buffer.length) {
        data = new byte[read];
        System.arraycopy(buffer, 0, data, 0, read);
    }
    return ImmutableList.of(data);
}

From source file:com.sismics.reader.core.dao.file.rss.XmlReader.java

/**
 * @param in Input stream/*ww w.  ja va2  s  .  com*/
 * @param defaultEnc Default encoding
 * @throws IOException If an I/O error occurs
 */
public XmlReader(InputStream in, String defaultEnc) throws IOException {
    // Read ahead four bytes and check for BOM marks. Extra bytes are unread
    // back to the stream; only BOM bytes are skipped.
    String encoding = defaultEnc;
    byte header[] = new byte[HEADER_SIZE];
    int n, unread;

    PushbackInputStream pushbackStream = new PushbackInputStream(in, HEADER_SIZE);
    n = ByteStreams.read(in, header, 0, header.length);

    if ((header[0] == (byte) 0xEF) && (header[1] == (byte) 0xBB) && (header[2] == (byte) 0xBF)) {
        encoding = "UTF-8";
        unread = n - 3;
    } else if ((header[0] == (byte) 0xFE) && (header[1] == (byte) 0xFF)) {
        encoding = "UTF-16BE";
        unread = n - 2;
    } else if ((header[0] == (byte) 0xFF) && (header[1] == (byte) 0xFE)) {
        encoding = "UTF-16LE";
        unread = n - 2;
    } else if ((header[0] == (byte) 0x00) && (header[1] == (byte) 0x00) && (header[2] == (byte) 0xFE)
            && (header[3] == (byte) 0xFF)) {
        encoding = "UTF-32BE";
        unread = n - 4;
    } else if ((header[0] == (byte) 0xFF) && (header[1] == (byte) 0xFE) && (header[2] == (byte) 0x00)
            && (header[3] == (byte) 0x00)) {
        encoding = "UTF-32LE";
        unread = n - 4;
    } else {
        // Unicode BOM mark not found, unread all bytes and search in the XML header
        unread = n;
        Pattern pattern = Pattern.compile("encoding=\"(.*?)\"");
        Matcher matcher = pattern.matcher(new String(header));
        if (matcher.find()) {
            String enc = matcher.group(1);
            try {
                Charset.forName(enc);
                encoding = enc;
            } catch (Exception e) {
                // Fallback to default encoding if the encoding in the XML header is invalid
            }
        }
    }

    if (unread > 0) {
        pushbackStream.unread(header, (n - unread), unread);
    } else if (unread < -1) {
        pushbackStream.unread(header, 0, 0);
    }

    // Use given encoding
    internalInputStreamReader = new InputStreamReader(pushbackStream, encoding);
}

From source file:com.github.harmanpa.jrecon.io.HttpRandomAccessResource.java

public int read(long location, byte[] bytes) throws IOException {
    HttpGet get = new HttpGet(uri);
    get.addHeader("Range",
            "bytes=" + Long.toString(location) + "-" + Integer.toString((int) location + bytes.length - 1));
    HttpClient client = new DefaultHttpClient();
    HttpResponse response = client.execute(get);
    if (response.getStatusLine().getStatusCode() == 206) {
        HttpEntity entity = response.getEntity();
        return ByteStreams.read(entity.getContent(), bytes, 0, bytes.length);
    }//from  w  w w.  j a v  a 2  s . c  om
    throw new IOException(response.getStatusLine().getReasonPhrase());
}

From source file:com.orange.clara.cloud.servicedbdumper.dbdumper.s3.UploadS3StreamImpl.java

@Override
public String upload(InputStream content, Blob blob) throws IOException {
    String key = blob.getMetadata().getName();
    ContentMetadata metadata = blob.getMetadata().getContentMetadata();
    ObjectMetadataBuilder builder = ObjectMetadataBuilder.create().key(key)
            .contentType(MediaType.OCTET_STREAM.toString()).contentDisposition(key)
            .contentEncoding(metadata.getContentEncoding()).contentLanguage(metadata.getContentLanguage())
            .userMetadata(blob.getMetadata().getUserMetadata());
    String uploadId = this.s3Client.initiateMultipartUpload(bucketName, builder.build());
    Integer partNum = 1;/*from w w  w . j av a  2s . co m*/
    Payload part = null;
    int bytesRead = 0;
    boolean shouldContinue = true;
    try {
        SortedMap<Integer, String> etags = Maps.newTreeMap();
        while (shouldContinue) {
            byte[] chunk = new byte[CHUNK_SIZE];
            bytesRead = ByteStreams.read(content, chunk, 0, chunk.length);
            if (bytesRead != chunk.length) {
                shouldContinue = false;
                chunk = Arrays.copyOf(chunk, bytesRead);
            }
            part = new ByteArrayPayload(chunk);
            prepareUploadPart(bucketName, key, uploadId, partNum, part, etags);
            partNum++;
        }
        return this.s3Client.completeMultipartUpload(bucketName, key, uploadId, etags);
    } catch (RuntimeException ex) {
        this.s3Client.abortMultipartUpload(bucketName, key, uploadId);
        throw ex;
    }
}

From source file:com.orange.clara.cloud.poc.s3.upload.UploadS3StreamImpl.java

@Override
public String upload(InputStream content, Blob blob) throws IOException {
    String bucketName = this.blobStoreContext.getSpringCloudBlobStore().getBucketName();
    String key = blob.getMetadata().getName();
    ContentMetadata metadata = blob.getMetadata().getContentMetadata();
    ObjectMetadataBuilder builder = ObjectMetadataBuilder.create().key(key)
            .contentType(MediaType.OCTET_STREAM.toString()).contentDisposition(key)
            .contentEncoding(metadata.getContentEncoding()).contentLanguage(metadata.getContentLanguage())
            .userMetadata(blob.getMetadata().getUserMetadata());
    String uploadId = this.s3Client.initiateMultipartUpload(bucketName, builder.build());
    Integer partNum = 1;/*  ww  w .  j av a2 s.c o m*/
    Payload part = null;
    int bytesRead = 0;
    boolean shouldContinue = true;
    try {
        SortedMap<Integer, String> etags = Maps.newTreeMap();
        while (shouldContinue) {
            byte[] chunk = new byte[CHUNK_SIZE];
            bytesRead = ByteStreams.read(content, chunk, 0, chunk.length);
            if (bytesRead != chunk.length) {
                shouldContinue = false;
                chunk = Arrays.copyOf(chunk, bytesRead);
            }
            part = new ByteArrayPayload(chunk);
            prepareUploadPart(bucketName, key, uploadId, partNum, part, etags);
            partNum++;
        }
        return this.s3Client.completeMultipartUpload(bucketName, key, uploadId, etags);
    } catch (RuntimeException ex) {
        this.s3Client.abortMultipartUpload(bucketName, key, uploadId);
        throw ex;
    }
}

From source file:com.orange.clara.cloud.servicedbdumper.filer.s3uploader.UploadS3StreamImpl.java

@Override
public String upload(InputStream content, Blob blob) throws IOException {
    String key = blob.getMetadata().getName();
    String bucketName = this.blobStoreContext.getBucketName();
    ContentMetadata metadata = blob.getMetadata().getContentMetadata();
    ObjectMetadataBuilder builder = ObjectMetadataBuilder.create().key(key)
            .contentType(MediaType.OCTET_STREAM.toString()).contentDisposition(key)
            .contentEncoding(metadata.getContentEncoding()).contentLanguage(metadata.getContentLanguage())
            .userMetadata(blob.getMetadata().getUserMetadata());
    String uploadId = this.s3Client.initiateMultipartUpload(bucketName, builder.build());
    Integer partNum = 1;//  w w w.j  a v  a2s.  com
    Payload part = null;
    int bytesRead = 0;
    byte[] chunk = null;
    boolean shouldContinue = true;
    SortedMap<Integer, String> etags = Maps.newTreeMap();
    try {
        while (shouldContinue) {
            chunk = new byte[chunkSize];
            bytesRead = ByteStreams.read(content, chunk, 0, chunk.length);
            if (bytesRead != chunk.length) {
                shouldContinue = false;
                chunk = Arrays.copyOf(chunk, bytesRead);
                if (chunk.length == 0) {
                    //something from jvm causing memory leak, we try to help jvm which seems working.
                    //but PLEASE DON'T REPLICATE AT HOME !
                    chunk = null;
                    part = null;
                    System.gc();
                    //
                    break;
                }
            }
            part = new ByteArrayPayload(chunk);
            prepareUploadPart(bucketName, key, uploadId, partNum, part, etags);
            partNum++;
            //something from jvm causing memory leak, we try to help jvm which seems working.
            //but PLEASE DON'T REPLICATE AT HOME !
            chunk = null;
            part = null;
            System.gc();
            //
        }
        return this.completeMultipartUpload(bucketName, key, uploadId, etags);
    } catch (RuntimeException ex) {
        this.s3Client.abortMultipartUpload(bucketName, key, uploadId);
        throw ex;
    }
}