List of usage examples for com.google.common.io LimitInputStream LimitInputStream
public LimitInputStream(InputStream paramInputStream, long paramLong)
From source file:de.cosmocode.palava.bridge.content.StreamContent.java
@Override public void write(OutputStream out) throws IOException { ByteStreams.copy(new LimitInputStream(stream, length), out); }
From source file:de.cosmocode.palava.bridge.content.StreamContent.java
@Override public byte[] getBytes() throws IOException { return ByteStreams.toByteArray(new LimitInputStream(stream, length)); }
From source file:org.jclouds.examples.blobstore.hdfs.io.HdfsPayloadSlicer.java
protected Payload doSlice(final FSDataInputStream inputStream, final long offset, final long length) { return new InputStreamSupplierPayload(new InputSupplier<InputStream>() { public InputStream getInput() throws IOException { if (offset > 0) { try { inputStream.seek(offset); } catch (IOException e) { Closeables.closeQuietly(inputStream); throw e; }//from w ww .j a va 2 s . c o m } return new LimitInputStream(inputStream, length); } }); }
From source file:org.apache.mahout.common.DevURandomSeedGenerator.java
/** * @return The requested number of random bytes, read directly from {@literal /dev/urandom}. * @throws SeedException If {@literal /dev/urandom} does not exist or is not accessible *//*from w ww. j a v a 2s . c o m*/ @Override public byte[] generateSeed(int length) throws SeedException { FileInputStream in = null; try { in = new FileInputStream(DEV_URANDOM); return ByteStreams.toByteArray(new LimitInputStream(in, length)); } catch (IOException ex) { throw new SeedException("Failed reading from " + DEV_URANDOM.getName(), ex); } catch (SecurityException ex) { // Might be thrown if resource access is restricted (such as in // an applet sandbox). throw new SeedException("SecurityManager prevented access to " + DEV_URANDOM.getName(), ex); } finally { Closeables.closeQuietly(in); } }
From source file:org.apache.hadoop.mapreduce.CryptoUtils.java
/** * Wraps a given InputStream with a CryptoInputStream. The size of the data * buffer required for the stream is specified by the * "mapreduce.job.encrypted-intermediate-data.buffer.kb" Job configuration * variable.//from ww w . j a va 2s .com * * If the value of 'length' is > -1, The InputStream is additionally wrapped * in a LimitInputStream. CryptoStreams are late buffering in nature. This * means they will always try to read ahead if they can. The LimitInputStream * will ensure that the CryptoStream does not read past the provided length * from the given Input Stream. * * @param conf * @param in * @param length * @return InputStream * @throws IOException */ public static InputStream wrapIfNecessary(Configuration conf, InputStream in, long length) throws IOException { if (isShuffleEncrypted(conf)) { int bufferSize = getBufferSize(conf); if (length > -1) { in = new LimitInputStream(in, length); } byte[] offsetArray = new byte[8]; IOUtils.readFully(in, offsetArray, 0, 8); long offset = ByteBuffer.wrap(offsetArray).getLong(); CryptoCodec cryptoCodec = CryptoCodec.getInstance(conf); byte[] iv = new byte[cryptoCodec.getCipherSuite().getAlgorithmBlockSize()]; IOUtils.readFully(in, iv, 0, cryptoCodec.getCipherSuite().getAlgorithmBlockSize()); if (LOG.isDebugEnabled()) { LOG.debug("IV read from [" + Base64.encodeBase64URLSafeString(iv) + "]"); } return new CryptoInputStream(in, cryptoCodec, bufferSize, getEncryptionKey(), iv, offset + cryptoPadding(conf)); } else { return in; } }
From source file:com.googlecode.htmlcompressor.compressor.ClosureJavaScriptCompressor.java
private List<JSSourceFile> getDefaultExterns() throws IOException { InputStream input = ClosureJavaScriptCompressor.class.getResourceAsStream("/externs.zip"); ZipInputStream zip = new ZipInputStream(input); List<JSSourceFile> externs = Lists.newLinkedList(); for (ZipEntry entry = null; (entry = zip.getNextEntry()) != null;) { LimitInputStream entryStream = new LimitInputStream(zip, entry.getSize()); externs.add(JSSourceFile.fromInputStream(entry.getName(), entryStream)); }// w w w .j a v a 2 s .c o m return externs; }
From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.LsrPBImage.java
public void visit(RandomAccessFile file) throws IOException { if (!FSImageUtil.checkFileFormat(file)) { throw new IOException("Unrecognized FSImage"); }//ww w. ja va 2 s . co m FileSummary summary = FSImageUtil.loadSummary(file); FileInputStream fin = null; try { fin = new FileInputStream(file.getFD()); ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList()); Collections.sort(sections, new Comparator<FileSummary.Section>() { @Override public int compare(FileSummary.Section s1, FileSummary.Section s2) { SectionName n1 = SectionName.fromString(s1.getName()); SectionName n2 = SectionName.fromString(s2.getName()); if (n1 == null) { return n2 == null ? 0 : -1; } else if (n2 == null) { return -1; } else { return n1.ordinal() - n2.ordinal(); } } }); for (FileSummary.Section s : sections) { fin.getChannel().position(s.getOffset()); InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream(fin, s.getLength()))); switch (SectionName.fromString(s.getName())) { case STRING_TABLE: loadStringTable(is); break; case INODE: loadINodeSection(is); break; case INODE_REFERENCE: loadINodeReferenceSection(is); break; case INODE_DIR: loadINodeDirectorySection(is); break; default: break; } } list("", INodeId.ROOT_INODE_ID); } finally { IOUtils.cleanup(null, fin); } }
From source file:com.googlecode.osde.internal.jscompiler.ClosureCompiler.java
/** * Creates a list of JavaScript files which contains browser-predefined * JavaScript object names that should be preserved under optimization. *///from www. j ava 2 s .com private JSSourceFile[] createExterns() throws IOException { // The externs.zip file is bundled inside the compiler's jar. InputStream input = Compiler.class.getResourceAsStream("/externs.zip"); ZipInputStream zip = new ZipInputStream(input); List<JSSourceFile> externs = new ArrayList<JSSourceFile>(); for (ZipEntry entry; (entry = zip.getNextEntry()) != null;) { LimitInputStream entryStream = new LimitInputStream(zip, entry.getSize()); externs.add(JSSourceFile.fromInputStream(entry.getName(), entryStream)); } return externs.toArray(new JSSourceFile[externs.size()]); }
From source file:de.fhg.iais.cortex.rest.resources.BinaryResource.java
private ResponseBuilder createResponseBuilder(final Binary binary, List<String> range, String file, List<String> ifRange) throws IOException { ByteRange byteRange;/* w w w .ja v a2 s . com*/ try { byteRange = ByteRange.parse(range, binary.getLength()); } catch (ParseException e) { return Response.status(416); } InputStream content = binary.getContent(); String actualETag = "\"" + this.hf.newHasher().putString(file).hash().toString() + "\""; if (byteRange == null) { return Response.ok(content).header("Accept-Ranges", "bytes").header("ETag", actualETag) .header("Expires", new Date(System.currentTimeMillis() + this.ONE_YEAR_IN_MS)) .header("Date", this.dateFormat.format(new Date())) .header("Content-Length", binary.getLength()); } String expectedETag = null; if ((ifRange != null) && !ifRange.isEmpty()) { expectedETag = ifRange.get(0); } if (expectedETag != null) { if (!actualETag.equals(expectedETag)) { return Response.status(Status.PRECONDITION_FAILED); } } if (!skipBytes(content, 0)) { return Response.status(416); } LimitInputStream entity = new LimitInputStream(content, binary.getLength()); return Response.status(206).entity(entity).header("Accept-Ranges", "bytes").header("ETag", actualETag) .header("Expires", new Date(System.currentTimeMillis() + this.ONE_YEAR_IN_MS)) .header("Date", this.dateFormat.format(new Date())).header("Content-Length", binary.getLength()) .header("Connection", "keep-alive").header("Keep-Alive", "timeout=100 max=1000") .header("Content-Range", "bytes " + 0 + "-" + (binary.getLength() - 1) + "/" + binary.getLength()); }
From source file:com.comphenix.protocol.compat.netty.shaded.ShadedByteBufAdapter.java
@Override public ByteBuf getBytes(int index, OutputStream dst, int length) throws IOException { ByteStreams.copy(new LimitInputStream(input, length), dst); return this; }