List of usage examples for com.google.common.hash HashingInputStream HashingInputStream
public HashingInputStream(HashFunction hashFunction, InputStream in)
From source file:org.apache.james.blob.objectstorage.ObjectStorageBlobsDAO.java
private CompletableFuture<BlobId> save(InputStream data, BlobId id) { String containerName = this.containerName.value(); HashingInputStream hashingInputStream = new HashingInputStream(Hashing.sha256(), data); Payload payload = payloadCodec.write(hashingInputStream); Blob blob = blobStore.blobBuilder(id.asString()).payload(payload).build(); return CompletableFuture.supplyAsync(() -> blobStore.putBlob(containerName, blob), executor) .thenApply(any -> blobIdFactory.from(hashingInputStream.hash().toString())); }
From source file:com.ibm.common.activitystreams.legacy.Binary.java
/** * Return an InputStream for reading the data * @param compression Compression/*from w w w .j av a 2 s . co m*/ * @return InputStream * @throws IOException */ public InputStream read(Compression<?, ?> compression) throws IOException { StringReader reader = new StringReader(data()); InputStream in = BaseEncoding.base64Url().decodingStream(reader); if (compression != null) in = compression.decompressor(in); if (has("md5")) in = new HashingInputStream(Hashing.md5(), in); return in; }
From source file:org.jclouds.s3.filters.Aws4SignerBase.java
/** * hash input with sha256//from w w w. jav a2 s. c om * * @param input * @return hash result * @throws HTTPException */ public static byte[] hash(InputStream input) throws HTTPException { HashingInputStream his = new HashingInputStream(Hashing.sha256(), input); try { ByteStreams.copy(his, ByteStreams.nullOutputStream()); return his.hash().asBytes(); } catch (IOException e) { throw new HttpException("Unable to compute hash while signing request: " + e.getMessage(), e); } }
From source file:org.haiku.haikudepotserver.pkg.job.PkgScreenshotImportArchiveJobRunner.java
/** * <p>Goes through the archive and captures information about each screenshot.</p> *///from w w w . ja v a 2s . c o m private void collectScreenshotMetadataFromArchive(Map<String, ScreenshotImportMetadatas> data, ArchiveInputStream archiveInputStream, ArchiveEntry archiveEntry, String pkgName, int order) { ScreenshotImportMetadatas metadatas = data.get(pkgName); if (null == metadatas) { metadatas = new ScreenshotImportMetadatas(); ObjectContext context = serverRuntime.newContext(); Optional<Pkg> pkgOptional = Pkg.tryGetByName(context, pkgName); if (!pkgOptional.isPresent()) { metadatas.setNotFound(); } data.put(pkgName, metadatas); } if (!metadatas.isNotFound()) { HashingInputStream hashingInputStream = new HashingInputStream(HASH_FUNCTION, archiveInputStream); try { ByteStreams.copy(hashingInputStream, ByteStreams.nullOutputStream()); } catch (IOException ioe) { throw new UncheckedIOException(ioe); } metadatas.add(new FromArchiveScreenshotMetadata(order, archiveEntry.getSize(), hashingInputStream.hash(), archiveEntry.getName())); } }
From source file:org.jclouds.kinetic.strategy.internal.KineticStorageStrategyImpl.java
@Override public String putBlob(final String containerName, final Blob blob) throws IOException { String blobKey = blob.getMetadata().getName(); Payload payload = blob.getPayload(); InputStream payloadStream = payload.openStream(); HashingInputStream his = new HashingInputStream(Hashing.md5(), payloadStream); // Reset input stream back to beginning payloadStream.reset();/*from w w w .ja v a 2 s. com*/ kineticContainerNameValidator.validate(containerName); kineticBlobKeyValidator.validate(blobKey); if (getDirectoryBlobSuffix(blobKey) != null) { return putDirectoryBlob(containerName, blob); } long fileLength = payload.getContentMetadata().getContentLength(); long chunksRequired = numberOfChunksForSize(fileLength); int chunkDataLength = KineticConstants.PROPERTY_CHUNK_SIZE_BYTES - KineticConstants.PROPERTY_CHUNK_FULL_HEADER_SIZE_BYTES; int currentChunk = 0; long fileId = -1; try { fileId = KineticDatabaseUtils.getInstance().getFileIdFromDatabase(containerName + "/" + blobKey); } catch (SQLException sqle) { sqle.printStackTrace(); } while (currentChunk < chunksRequired) { Chunk chunk = new Chunk(this, fileId, currentChunk); byte[] chunkData = new byte[KineticConstants.PROPERTY_CHUNK_SIZE_BYTES]; // Get header type values Map<String, String> headers = getChunkHeaders(containerName, blobKey, currentChunk); String chunkKey = getChunkKey(containerName, blobKey, currentChunk); // Set header values into the actual data of the chunk byte[] headerBytes = chunkKey.getBytes("UTF-8"); for (int i = 0; i < headerBytes.length; i++) { chunkData[i] = headerBytes[i]; } // Read data from blob into chunk payload.openStream().read(chunkData, headerBytes.length, chunkDataLength); chunk.setData(chunkData); // Send data to KDCC try { KineticDatabaseUtils.getInstance().addChunkToDatabase(chunkKey, chunkData); } catch (SQLException sqle) { return null; } } try { KineticDatabaseUtils.getInstance().addFileToDatabase(containerName + "/" + blobKey, fileLength); } catch (SQLException e) { e.printStackTrace(); } if (payload != null) { payload.release(); } return base16().lowerCase().encode(his.hash().asBytes()); }