Example usage for com.google.common.io ByteSource ByteSource

List of usage examples for com.google.common.io ByteSource ByteSource

Introduction

In this page you can find the example usage for com.google.common.io ByteSource ByteSource.

Prototype

protected ByteSource() 

Source Link

Document

Constructor for use by subclasses.

Usage

From source file:io.druid.storage.oss.OssTaskLogs.java

@Override
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) {

    final String taskKey = getTaskLogKey(taskid);

    final OSSObject ossObject = ossClient.getObject(config.getLogBucket(), taskKey);

    return Optional.<ByteSource>of(new ByteSource() {
        @Override//from  w  ww .  java2 s.  co m
        public InputStream openStream() throws IOException {
            try {
                final long start;
                final long end = ossObject.getObjectMetadata().getContentLength() - 1;

                if (offset > 0 && offset < ossObject.getObjectMetadata().getContentLength()) {
                    start = offset;
                } else if (offset < 0 && (-1 * offset) < ossObject.getObjectMetadata().getContentLength()) {
                    start = ossObject.getObjectMetadata().getContentLength() + offset;
                } else {
                    start = 0;
                }

                GetObjectRequest gor = new GetObjectRequest(config.getLogBucket(), taskKey);
                gor.setRange(start, end);

                return ossClient.getObject(gor).getObjectContent();
            } catch (Exception e) {
                throw new IOException(e);
            }
        }
    });
}

From source file:io.druid.storage.s3.S3TaskLogs.java

@Override
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException {
    final String taskKey = getTaskLogKey(taskid);

    try {/*from w w  w .  ja  v a 2  s.  co m*/
        final StorageObject objectDetails = service.getObjectDetails(config.getS3Bucket(), taskKey, null, null,
                null, null);

        return Optional.<ByteSource>of(new ByteSource() {
            @Override
            public InputStream openStream() throws IOException {
                try {
                    final long start;
                    final long end = objectDetails.getContentLength() - 1;

                    if (offset > 0 && offset < objectDetails.getContentLength()) {
                        start = offset;
                    } else if (offset < 0 && (-1 * offset) < objectDetails.getContentLength()) {
                        start = objectDetails.getContentLength() + offset;
                    } else {
                        start = 0;
                    }

                    return service
                            .getObject(config.getS3Bucket(), taskKey, null, null,
                                    new String[] { objectDetails.getETag() }, null, start, end)
                            .getDataInputStream();
                } catch (ServiceException e) {
                    throw new IOException(e);
                }
            }
        });
    } catch (ServiceException e) {
        if (404 == e.getResponseCode() || "NoSuchKey".equals(e.getErrorCode())
                || "NoSuchBucket".equals(e.getErrorCode())) {
            return Optional.absent();
        } else {
            throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e);
        }
    }
}

From source file:com.facebook.buck.io.filesystem.impl.DefaultProjectFilesystemDelegate.java

@Override
public Sha1HashCode computeSha1(Path pathRelativeToProjectRootOrJustAbsolute) throws IOException {
    Path fileToHash = getPathForRelativePath(pathRelativeToProjectRootOrJustAbsolute);
    try {//from ww w.j a va2  s. co m
        // Normally, we would just use `Files.hash(fileToHash.toFile(), Hashing.sha1())`, but if
        // fileToHash is backed by Jimfs, its toFile() method throws an UnsupportedOperationException.
        // Creating the input stream via java.nio.file.Files.newInputStream() avoids this issue.
        ByteSource source = new ByteSource() {
            @Override
            public InputStream openStream() throws IOException {
                // No need to wrap with BufferedInputStream because ByteSource uses
                // ByteStreams.copy(), which already buffers.
                return Files.newInputStream(fileToHash);
            }
        };
        HashCode hashCode = source.hash(Hashing.sha1());

        return Sha1HashCode.fromHashCode(hashCode);

    } catch (IOException e) {
        String msg = String.format("Error computing Sha1 for %s: %s", fileToHash.toString(), e.getMessage());

        throw new IOException(msg, e);
    }
}

From source file:io.druid.storage.hdfs.tasklog.HdfsTaskLogs.java

@Override
public Optional<ByteSource> streamTaskLog(final String taskId, final long offset) throws IOException {
    final Path path = getTaskLogFileFromId(taskId);
    final FileSystem fs = path.getFileSystem(hadoopConfig);
    if (fs.exists(path)) {
        return Optional.<ByteSource>of(new ByteSource() {
            @Override//w ww.j  ava  2s .  co  m
            public InputStream openStream() throws IOException {
                log.info("Reading task log from: %s", path);
                final long seekPos;
                if (offset < 0) {
                    final FileStatus stat = fs.getFileStatus(path);
                    seekPos = Math.max(0, stat.getLen() + offset);
                } else {
                    seekPos = offset;
                }
                final FSDataInputStream inputStream = fs.open(path);
                inputStream.seek(seekPos);
                log.info("Read task log from: %s (seek = %,d)", path, seekPos);
                return inputStream;
            }
        });
    } else {
        return Optional.absent();
    }
}

From source file:io.druid.storage.google.GoogleTaskLogs.java

@Override
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException {
    final String taskKey = getTaskLogKey(taskid);

    try {/*ww w  .  j  ava  2 s .com*/
        if (!storage.exists(config.getBucket(), taskKey)) {
            return Optional.absent();
        }

        final long length = storage.size(config.getBucket(), taskKey);

        return Optional.<ByteSource>of(new ByteSource() {
            @Override
            public InputStream openStream() throws IOException {
                try {
                    final long start;

                    if (offset > 0 && offset < length) {
                        start = offset;
                    } else if (offset < 0 && (-1 * offset) < length) {
                        start = length + offset;
                    } else {
                        start = 0;
                    }

                    InputStream stream = new GoogleByteSource(storage, config.getBucket(), taskKey)
                            .openStream();
                    stream.skip(start);

                    return stream;
                } catch (Exception e) {
                    throw new IOException(e);
                }
            }
        });
    } catch (IOException e) {
        throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e);
    }
}

From source file:com.epam.ta.reportportal.util.ResourceCopierBean.java

/**
 * Sets source as springs {@link org.springframework.core.io.Resource}
 * //w  ww  .  j a va  2 s.  c  om
 * @param from
 */
public void setSource(final String from) {
    this.source = new ByteSource() {
        @Override
        public InputStream openStream() throws IOException {
            return resourceLoader.getResource(from).getInputStream();
        }
    };
}

From source file:io.druid.storage.azure.AzureTaskLogs.java

@Override
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException {
    final String container = config.getContainer();
    final String taskKey = getTaskLogKey(taskid);

    try {// w  w w  .ja va  2  s . com
        if (!azureStorage.getBlobExists(container, taskKey))
            return Optional.absent();

        return Optional.<ByteSource>of(new ByteSource() {
            @Override
            public InputStream openStream() throws IOException {
                try {
                    final long start;
                    final long length = azureStorage.getBlobLength(container, taskKey);

                    if (offset > 0 && offset < length) {
                        start = offset;
                    } else if (offset < 0 && (-1 * offset) < length) {
                        start = length + offset;
                    } else {
                        start = 0;
                    }

                    InputStream stream = azureStorage.getBlobInputStream(container, taskKey);
                    stream.skip(start);

                    return stream;

                } catch (Exception e) {
                    throw new IOException(e);
                }
            }
        });
    } catch (StorageException | URISyntaxException e) {
        throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e);
    }
}

From source file:org.apache.druid.storage.s3.S3TaskLogs.java

private Optional<ByteSource> streamTaskFile(final long offset, String taskKey) throws IOException {
    try {//from   w w  w  .  jav a2s. c o  m
        final ObjectMetadata objectMetadata = service.getObjectMetadata(config.getS3Bucket(), taskKey);

        return Optional.of(new ByteSource() {
            @Override
            public InputStream openStream() throws IOException {
                try {
                    final long start;
                    final long end = objectMetadata.getContentLength() - 1;

                    if (offset > 0 && offset < objectMetadata.getContentLength()) {
                        start = offset;
                    } else if (offset < 0 && (-1 * offset) < objectMetadata.getContentLength()) {
                        start = objectMetadata.getContentLength() + offset;
                    } else {
                        start = 0;
                    }

                    final GetObjectRequest request = new GetObjectRequest(config.getS3Bucket(), taskKey)
                            .withMatchingETagConstraint(objectMetadata.getETag()).withRange(start, end);

                    return service.getObject(request).getObjectContent();
                } catch (AmazonServiceException e) {
                    throw new IOException(e);
                }
            }
        });
    } catch (AmazonS3Exception e) {
        if (404 == e.getStatusCode() || "NoSuchKey".equals(e.getErrorCode())
                || "NoSuchBucket".equals(e.getErrorCode())) {
            return Optional.absent();
        } else {
            throw new IOE(e, "Failed to stream logs from: %s", taskKey);
        }
    }
}

From source file:com.eclipsesource.connect.mvc.internal.StaticResourceHandler.java

private LoadingCache<String, byte[]> createCache() {
    return CacheBuilder.newBuilder().build(new CacheLoader<String, byte[]>() {

        @Override//from w w w  .  ja va2s  . c o m
        public byte[] load(String path) throws Exception {
            AssetsResult result = assetsFinder.find("/" + path);
            if (result != null) {
                try (InputStream stream = result.getStream()) {
                    if (stream != null) {
                        if (configuration.useCompress() && isCompressable(path)) {
                            return compress(path, stream).getBytes(UTF_8);
                        }
                        return new ByteSource() {

                            @Override
                            public InputStream openStream() throws IOException {
                                return stream;
                            }
                        }.read();
                    }
                }
            }
            throw new IllegalStateException(path + " not found");
        }

    });
}

From source file:org.apache.druid.indexing.common.tasklogs.FileTaskLogs.java

@Override
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) {
    final File file = fileForTask(taskid, "log");
    if (file.exists()) {
        return Optional.of(new ByteSource() {
            @Override//from  w  w  w  .j a  v a  2 s .  c o  m
            public InputStream openStream() throws IOException {
                return LogUtils.streamFile(file, offset);
            }
        });
    } else {
        return Optional.absent();
    }
}