Example usage for org.apache.commons.compress.utils BoundedInputStream BoundedInputStream

List of usage examples for org.apache.commons.compress.utils BoundedInputStream BoundedInputStream

Introduction

In this page you can find the example usage for org.apache.commons.compress.utils BoundedInputStream BoundedInputStream.

Prototype

public BoundedInputStream(final InputStream in, final long size) 

Source Link

Document

Creates the stream that will at most read the given amount of bytes from the given stream.

Usage

From source file:com.facebook.buck.rules.HttpArtifactCache.java

public CacheResult fetchImpl(RuleKey ruleKey, File file) throws IOException {
    Request request = createRequestBuilder(ruleKey.toString()).get().build();
    Response response = fetchCall(request);

    if (response.code() == HttpURLConnection.HTTP_NOT_FOUND) {
        LOGGER.info("fetch(%s): cache miss", ruleKey);
        return CacheResult.MISS;
    }/*from   w w w  .  j a va 2 s.  c o  m*/

    if (response.code() != HttpURLConnection.HTTP_OK) {
        LOGGER.warn("fetch(%s): unexpected response: %d", ruleKey, response.code());
        return CacheResult.MISS;
    }

    // The hash code shipped with the artifact to/from the cache.
    HashCode expectedHashCode, actualHashCode;

    // Setup a temporary file, which sits next to the destination, to write to and
    // make sure all parent dirs exist.
    Path path = file.toPath();
    projectFilesystem.createParentDirs(path);
    Path temp = projectFilesystem.createTempFile(path.getParent(), path.getFileName().toString(), ".tmp");

    // Open the stream to server just long enough to read the hash code and artifact.
    try (DataInputStream input = new DataInputStream(response.body().byteStream())) {

        // First, extract the size of the file data portion, which we put in the beginning of
        // the artifact.
        long length = input.readLong();

        // Now, write the remaining response data to the temp file, while grabbing the hash.
        try (BoundedInputStream boundedInput = new BoundedInputStream(input, length);
                HashingInputStream hashingInput = new HashingInputStream(hashFunction, boundedInput);
                OutputStream output = projectFilesystem.newFileOutputStream(temp)) {
            ByteStreams.copy(hashingInput, output);
            actualHashCode = hashingInput.hash();
        }

        // Lastly, extract the hash code from the end of the request data.
        byte[] hashCodeBytes = new byte[hashFunction.bits() / Byte.SIZE];
        ByteStreams.readFully(input, hashCodeBytes);
        expectedHashCode = HashCode.fromBytes(hashCodeBytes);

        // We should be at the end of output -- verify this.  Also, we could just try to read a
        // single byte here, instead of all remaining input, but some network stack implementations
        // require that we exhaust the input stream before the connection can be reusable.
        try (OutputStream output = ByteStreams.nullOutputStream()) {
            if (ByteStreams.copy(input, output) != 0) {
                LOGGER.warn("fetch(%s): unexpected end of input", ruleKey);
                return CacheResult.MISS;
            }
        }
    }

    // Now form the checksum on the file we got and compare it to the checksum form the
    // the HTTP header.  If it's incorrect, log this and return a miss.
    if (!expectedHashCode.equals(actualHashCode)) {
        LOGGER.warn("fetch(%s): artifact had invalid checksum", ruleKey);
        projectFilesystem.deleteFileAtPath(temp);
        return CacheResult.MISS;
    }

    // Finally, move the temp file into it's final place.
    projectFilesystem.move(temp, path, StandardCopyOption.REPLACE_EXISTING);

    LOGGER.info("fetch(%s): cache hit", ruleKey);
    return CacheResult.HTTP_HIT;
}

From source file:bobs.is.compress.sevenzip.SevenZFile.java

public InputStream getStreamByIndex(int entryIndex) throws IOException {
    final int folderIndex = getArchive().streamMap.fileFolderIndex[entryIndex];
    if (folderIndex < 0) {
        getDeferredBlockStreams().clear();
        // TODO: previously it'd return an empty stream?
        // new BoundedInputStream(new ByteArrayInputStream(new byte[0]), 0);
        return null;
    }/* w ww .  j a  v  a2 s .  c o m*/
    final SevenZArchiveEntry file = getArchive().files[entryIndex];
    if (getCurrentFolderIndex() == folderIndex) {
        // (COMPRESS-320).
        // The current entry is within the same (potentially opened) folder. The
        // previous stream has to be fully decoded before we can start reading
        // but don't do it eagerly -- if the user skips over the entire folder nothing
        // is effectively decompressed.

        file.setContentMethods(getArchive().files[entryIndex - 1].getContentMethods());
    } else {
        // We're opening a new folder. Discard any queued streams/ folder stream.
        currentFolderIndex = folderIndex;
        getDeferredBlockStreams().clear();
        if (currentFolderInputStream != null) {
            currentFolderInputStream.close();
            currentFolderInputStream = null;
        }

        final Folder folder = getArchive().folders[folderIndex];
        final int firstPackStreamIndex = getArchive().streamMap.folderFirstPackStreamIndex[folderIndex];
        final long folderOffset = SIGNATURE_HEADER_SIZE + getArchive().packPos
                + getArchive().streamMap.packStreamOffsets[firstPackStreamIndex];
        currentFolderInputStream = buildDecoderStack(folder, folderOffset, firstPackStreamIndex, file);
    }

    InputStream fileStream = new BoundedInputStream(currentFolderInputStream, file.getSize());
    if (file.getHasCrc()) {
        fileStream = new CRC32VerifyingInputStream(fileStream, file.getSize(), file.getCrcValue());
    }

    getDeferredBlockStreams().add(fileStream);
    return fileStream;
}

From source file:bobs.is.compress.sevenzip.SevenZFile.java

private void buildDecodingStream() throws IOException {
    final int folderIndex = getArchive().streamMap.fileFolderIndex[getCurrentEntryIndex()];
    if (folderIndex < 0) {
        getDeferredBlockStreams().clear();
        // TODO: previously it'd return an empty stream?
        // new BoundedInputStream(new ByteArrayInputStream(new byte[0]), 0);
        return;//  w  w  w  . j av  a 2  s.  c  o m
    }
    final SevenZArchiveEntry file = getArchive().files[getCurrentEntryIndex()];
    if (getCurrentFolderIndex() == folderIndex) {
        // (COMPRESS-320).
        // The current entry is within the same (potentially opened) folder. The
        // previous stream has to be fully decoded before we can start reading
        // but don't do it eagerly -- if the user skips over the entire folder nothing
        // is effectively decompressed.

        file.setContentMethods(getArchive().files[getCurrentEntryIndex() - 1].getContentMethods());
    } else {
        // We're opening a new folder. Discard any queued streams/ folder stream.
        currentFolderIndex = folderIndex;
        getDeferredBlockStreams().clear();
        if (currentFolderInputStream != null) {
            currentFolderInputStream.close();
            currentFolderInputStream = null;
        }

        final Folder folder = getArchive().folders[folderIndex];
        final int firstPackStreamIndex = getArchive().streamMap.folderFirstPackStreamIndex[folderIndex];
        final long folderOffset = SIGNATURE_HEADER_SIZE + getArchive().packPos
                + getArchive().streamMap.packStreamOffsets[firstPackStreamIndex];
        currentFolderInputStream = buildDecoderStack(folder, folderOffset, firstPackStreamIndex, file);
    }

    InputStream fileStream = new BoundedInputStream(currentFolderInputStream, file.getSize());
    if (file.getHasCrc()) {
        fileStream = new CRC32VerifyingInputStream(fileStream, file.getSize(), file.getCrcValue());
    }

    getDeferredBlockStreams().add(fileStream);
}

From source file:org.apache.beam.sdk.io.solr.JavaBinCodecCoder.java

@Override
public T decode(InputStream inStream) throws IOException {
    DataInputStream in = new DataInputStream(inStream);

    int len = VarInt.decodeInt(in);
    if (len < 0) {
        throw new CoderException("Invalid encoded SolrDocument length: " + len);
    }/*w  w w.java  2 s  . c om*/

    JavaBinCodec codec = new JavaBinCodec();
    return (T) codec.unmarshal(new BoundedInputStream(in, len));
}

From source file:org.codehaus.plexus.archiver.zip.ZipArchiverTest.java

public void testAddArchivedFileSet() throws Exception {
    File toBeAdded = new File("src/test/resources/test.zip");
    DefaultArchivedFileSet sfd = DefaultArchivedFileSet.archivedFileSet(toBeAdded);
    File zipFIle = getTestFile("target/output/withZip.zip");
    final ZipArchiver zipArchiver = getZipArchiver(zipFIle);
    InputStreamTransformer is = new InputStreamTransformer() {
        @Nonnull//from   ww w.java  2s.co  m
        public InputStream transform(@Nonnull PlexusIoResource resource, @Nonnull InputStream inputStream)
                throws IOException {
            return new BoundedInputStream(inputStream, 3);
        }
    };
    sfd.setStreamTransformer(is);
    zipArchiver.addArchivedFileSet(sfd);
    zipArchiver.createArchive();

    final ZipUnArchiver zipUnArchiver = getZipUnArchiver(zipFIle);
    File destFile = new File("target/output/withZip");
    destFile.mkdirs();
    zipUnArchiver.setDestFile(destFile);
    zipUnArchiver.extract();
    File a3byteFile = new File(destFile,
            "Users/kristian/lsrc/plexus/plexus-archiver/src/main/java/org/codehaus/plexus/archiver/zip/ZipArchiver.java");
    assertTrue(a3byteFile.exists());
    assertTrue(a3byteFile.length() == 3);
}

From source file:org.codehaus.plexus.archiver.zip.ZipArchiverTest.java

public void testCreateArchiveWithStreamTransformer() throws IOException {
    InputStreamTransformer is = new InputStreamTransformer() {
        @Nonnull//  w w w.j a  va2s  . c o m
        public InputStream transform(@Nonnull PlexusIoResource resource, @Nonnull InputStream inputStream)
                throws IOException {
            return new BoundedInputStream(inputStream, 3);
        }
    };

    final ZipArchiver zipArchiver = getZipArchiver(getTestFile("target/output/all3bytes.zip"));
    File zipFIle = new File("src/test/resources/test.zip");
    DefaultArchivedFileSet afs = new DefaultArchivedFileSet(zipFIle);
    afs.setStreamTransformer(is);
    afs.setPrefix("azip/");
    zipArchiver.addArchivedFileSet(afs);

    DefaultFileSet dfs = new DefaultFileSet(new File("src/test/resources/mjar179"));
    dfs.setStreamTransformer(is);
    dfs.setPrefix("mj179/");
    zipArchiver.addFileSet(dfs);

    PlexusIoFileResourceCollection files = new PlexusIoFileResourceCollection();
    files.setBaseDir(new File("src/test/resources"));
    files.setStreamTransformer(is);
    files.setPrefix("plexus/");
    zipArchiver.addResources(files);

    zipArchiver.createArchive();

}

From source file:org.eclipse.che.plugin.urlfactory.URLFetcher.java

/**
 * Fetch the urlConnection stream by using the urlconnection and return its content
 * To prevent DOS attack, limit the amount of the collected data
 *
 * @param urlConnection/*from  w  w  w  .j av a  2 s  . co m*/
 *         the URL connection to fetch
 * @return the content of the file
 */
public String fetch(@NotNull URLConnection urlConnection) {
    requireNonNull(urlConnection, "urlConnection parameter can't be null");
    final String value;
    try (InputStream inputStream = urlConnection.getInputStream();
            BufferedReader reader = new BufferedReader(
                    new InputStreamReader(new BoundedInputStream(inputStream, getLimit()), UTF_8))) {
        value = reader.lines().collect(Collectors.joining("\n"));
    } catch (IOException e) {
        // we shouldn't fetch if check is done before
        LOG.debug("Invalid URL", e);
        return null;
    }
    return value;
}

From source file:org.haiku.haikudepotserver.pkg.PkgIconServiceImpl.java

@Override
public PkgIcon storePkgIconImage(InputStream input, MediaType mediaType, Integer expectedSize,
        ObjectContext context, Pkg pkg) throws IOException, BadPkgIconException {

    Preconditions.checkArgument(null != context, "the context is not supplied");
    Preconditions.checkArgument(null != input, "the input must be provided");
    Preconditions.checkArgument(null != mediaType, "the mediaType must be provided");
    Preconditions.checkArgument(null != pkg, "the pkg must be provided");

    byte[] imageData = ByteStreams.toByteArray(new BoundedInputStream(input, ICON_SIZE_LIMIT));

    Optional<PkgIcon> pkgIconOptional;
    Integer size = null;/*w ww  .j a  v  a2s. c om*/

    switch (mediaType.getCode()) {

    case MediaType.MEDIATYPE_PNG:
        ImageHelper.Size pngSize = imageHelper.derivePngSize(imageData);

        if (null == pngSize) {
            LOGGER.warn(
                    "attempt to set the bitmap (png) package icon for package {}, but the size was invalid; it is not a valid png image",
                    pkg.getName());
            throw new BadPkgIconException("invalid png");
        }

        if (!pngSize.areSides(16) && !pngSize.areSides(32) && !pngSize.areSides(64)) {
            LOGGER.warn(
                    "attempt to set the bitmap (png) package icon for package {}, but the size was invalid; it must be either 32x32 or 16x16 px, but was {}",
                    pkg.getName(), pngSize.toString());
            throw new BadPkgIconException("non-square sizing or unexpected sizing");
        }

        if (null != expectedSize && !pngSize.areSides(expectedSize)) {
            LOGGER.warn(
                    "attempt to set the bitmap (png) package icon for package {}, but the size did not match the expected size",
                    pkg.getName());
            throw new BadPkgIconException("size of image was not as expected");
        }

        try {
            imageData = pngOptimizationService.optimize(imageData);
        } catch (IOException ioe) {
            throw new RuntimeException("the png optimization process has failed; ", ioe);
        }

        size = pngSize.width;
        pkgIconOptional = pkg.getPkgIcon(mediaType, pngSize.width);
        break;

    case MediaType.MEDIATYPE_HAIKUVECTORICONFILE:
        if (!imageHelper.looksLikeHaikuVectorIconFormat(imageData)) {
            LOGGER.warn(
                    "attempt to set the vector (hvif) package icon for package {}, but the data does not look like hvif",
                    pkg.getName());
            throw new BadPkgIconException();
        }
        pkgIconOptional = pkg.getPkgIcon(mediaType, null);
        break;

    default:
        throw new IllegalStateException("unhandled media type; " + mediaType.getCode());

    }

    PkgIconImage pkgIconImage;

    if (pkgIconOptional.isPresent()) {
        pkgIconImage = pkgIconOptional.get().getPkgIconImage();
    } else {
        PkgIcon pkgIcon = context.newObject(PkgIcon.class);
        pkg.addToManyTarget(Pkg.PKG_ICONS.getName(), pkgIcon, true);
        pkgIcon.setMediaType(mediaType);
        pkgIcon.setSize(size);
        pkgIconImage = context.newObject(PkgIconImage.class);
        pkgIcon.addToManyTarget(PkgIcon.PKG_ICON_IMAGES.getName(), pkgIconImage, true);
        pkgIconOptional = Optional.of(pkgIcon);
    }

    pkgIconImage.setData(imageData);
    pkg.setModifyTimestamp();
    pkg.setIconModifyTimestamp(new java.sql.Timestamp(Clock.systemUTC().millis()));
    renderedPkgIconRepository.evict(context, pkg);

    if (null != size) {
        LOGGER.info("the icon {}px for package {} has been updated", size, pkg.getName());
    } else {
        LOGGER.info("the icon for package {} has been updated", pkg.getName());
    }

    PkgIcon pkgIcon = pkgIconOptional.orElseThrow(IllegalStateException::new);

    for (Pkg subordinatePkg : pkgService.findSubordinatePkgsForMainPkg(context, pkg.getName())) {
        replicatePkgIcon(context, pkgIcon, subordinatePkg);
    }

    return pkgIcon;
}

From source file:org.haiku.haikudepotserver.pkg.PkgScreenshotServiceImpl.java

/**
 * <p>Note that if the screenshot is already stored then this method will simply return that screenshot.</p>
 * @param ordering can be NULL; in which case the screenshot will come at the end.
 *///from   w ww .j a v  a 2  s. c om

@Override
public PkgScreenshot storePkgScreenshotImage(InputStream input, ObjectContext context, Pkg pkg,
        Integer ordering) throws IOException, BadPkgScreenshotException {

    Preconditions.checkArgument(null != input, "the input must be provided");
    Preconditions.checkArgument(null != context, "the context must be provided");
    Preconditions.checkArgument(null != pkg, "the package must be provided");

    byte[] pngData = ByteStreams.toByteArray(new BoundedInputStream(input, SCREENSHOT_SIZE_LIMIT));
    ImageHelper.Size size = imageHelper.derivePngSize(pngData);
    String hashSha256 = HASH_FUNCTION.hashBytes(pngData).toString();

    if (null == size) {
        LOGGER.warn("attempt to store a screenshot image that is not a png");
        throw new BadPkgScreenshotException();
    }

    // check that the file roughly looks like PNG and the size is something
    // reasonable.

    if (size.height > SCREENSHOT_SIDE_LIMIT || size.width > SCREENSHOT_SIDE_LIMIT) {
        LOGGER.warn("attempt to store a screenshot image that is too large; " + size.toString());
        throw new BadPkgScreenshotException();
    }

    // check that we do not already have this screenshot persisted for this package.

    for (PkgScreenshot pkgScreenshot : pkg.getPkgScreenshots()) {
        if (pkgScreenshot.getHashSha256().equals(hashSha256)) {
            LOGGER.warn("attempt to store a screenshot image that is already stored for this package");
            return pkgScreenshot;
        }
    }

    MediaType png = MediaType.tryGetByCode(context, com.google.common.net.MediaType.PNG.toString()).get();

    // now we need to know the largest ordering so we can add this one at the end of the orderings
    // such that it is the next one in the list.

    int actualOrdering = null == ordering ? pkg.getHighestPkgScreenshotOrdering().orElse(0) + 1 : ordering;

    PkgScreenshot screenshot = context.newObject(PkgScreenshot.class);
    screenshot.setCode(UUID.randomUUID().toString());
    screenshot.setOrdering(actualOrdering);
    screenshot.setHeight(size.height);
    screenshot.setWidth(size.width);
    screenshot.setLength(pngData.length);
    screenshot.setHashSha256(hashSha256);
    pkg.addToManyTarget(Pkg.PKG_SCREENSHOTS.getName(), screenshot, true);

    PkgScreenshotImage screenshotImage = context.newObject(PkgScreenshotImage.class);
    screenshotImage.setMediaType(png);
    screenshotImage.setData(pngData);
    screenshot.addToManyTarget(PkgScreenshot.PKG_SCREENSHOT_IMAGES.getName(), screenshotImage, true);

    pkg.setModifyTimestamp();

    LOGGER.info("a screenshot #{} has been added to package [{}] ({})", actualOrdering, pkg.getName(),
            screenshot.getCode());

    relayScreenshotStorageToSubordinatePkgs(context, screenshot, actualOrdering);

    return screenshot;
}

From source file:org.queeg.hadoop.tar.TarExtractor.java

public void extract(ByteSource source) throws IOException {
    TarArchiveInputStream archiveInputStream = new TarArchiveInputStream(source.openStream());

    TarArchiveEntry entry;/*from   w w w . j a va  2  s .  c o m*/
    while ((entry = archiveInputStream.getNextTarEntry()) != null) {
        if (entry.isFile()) {
            BoundedInputStream entryInputStream = new BoundedInputStream(archiveInputStream, entry.getSize());
            ByteSink sink = new PathByteSink(conf, new Path(destination, entry.getName()));
            sink.writeFrom(entryInputStream);
        } else if (entry.isDirectory()) {
            ByteStreams.skipFully(archiveInputStream, entry.getSize());
            fs.mkdirs(new Path(destination, entry.getName()));
        }
    }

    archiveInputStream.close();
}