List of usage examples for com.google.common.hash HashingOutputStream HashingOutputStream
public HashingOutputStream(HashFunction hashFunction, OutputStream out)
From source file:com.google.api.services.samples.storage.cmdline.StorageSample.java
public static void main(String[] args) { try {/*from w w w . jav a 2 s . c o m*/ // initialize network, sample settings, credentials, and the storage client. HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); JsonFactory jsonFactory = JacksonFactory.getDefaultInstance(); SampleSettings settings = SampleSettings.load(jsonFactory); Credential credential = CredentialsProvider.authorize(httpTransport, jsonFactory); Storage storage = new Storage.Builder(httpTransport, jsonFactory, credential) .setApplicationName(APPLICATION_NAME).build(); // // run commands // View.header1("Trying to create a new bucket " + settings.getBucket()); BucketsInsertExample.createInProject(storage, settings.getProject(), new Bucket().setName(settings.getBucket()).setLocation("US")); View.header1("Getting bucket " + settings.getBucket() + " metadata"); Bucket bucket = BucketsGetExample.get(storage, settings.getBucket()); View.show(bucket); View.header1("Listing objects in bucket " + settings.getBucket()); for (StorageObject object : ObjectsListExample.list(storage, settings.getBucket())) { View.show(object); } View.header1("Getting object metadata from gs://pub/SomeOfTheTeam.jpg"); StorageObject object = ObjectsGetMetadataExample.get(storage, "pub", "SomeOfTheTeam.jpg"); View.show(object); View.header1("Uploading object."); final long objectSize = 100 * 1024 * 1024 /* 100 MB */; InputStream data = new Helpers.RandomDataBlockInputStream(objectSize, 1024); object = new StorageObject().setBucket(settings.getBucket()).setName(settings.getPrefix() + "myobject") .setMetadata(ImmutableMap.of("key1", "value1", "key2", "value2")) .setCacheControl("max-age=3600, must-revalidate").setContentDisposition("attachment"); object = ObjectsUploadExample.uploadWithMetadata(storage, object, data); View.show(object); System.out.println("md5Hash: " + object.getMd5Hash()); System.out.println("crc32c: " + object.getCrc32c() + ", decoded to " + ByteBuffer.wrap(BaseEncoding.base64().decode(object.getCrc32c())).getInt()); View.header1("Getting object data of uploaded object, calculate hashes/crcs."); OutputStream nullOutputStream = new OutputStream() { // Throws away the bytes. @Override public void write(int b) throws IOException { } @Override public void write(byte b[], int off, int len) { } }; DigestOutputStream md5DigestOutputStream = new DigestOutputStream(nullOutputStream, MessageDigest.getInstance("MD5")); HashingOutputStream crc32cHashingOutputStream = new HashingOutputStream(Hashing.crc32c(), md5DigestOutputStream); ObjectsDownloadExample.downloadToOutputStream(storage, settings.getBucket(), settings.getPrefix() + "myobject", crc32cHashingOutputStream); String calculatedMD5 = BaseEncoding.base64().encode(md5DigestOutputStream.getMessageDigest().digest()); System.out.println( "md5Hash: " + calculatedMD5 + " " + (object.getMd5Hash().equals(calculatedMD5) ? "(MATCHES)" : "(MISMATCHES; data altered in transit)")); int calculatedCrc32c = crc32cHashingOutputStream.hash().asInt(); String calculatedEncodedCrc32c = BaseEncoding.base64().encode(Ints.toByteArray(calculatedCrc32c)); // NOTE: Don't compare HashCode.asBytes() directly, as that encodes the crc32c in // little-endien. One would have to reverse the bytes first. System.out.println("crc32c: " + calculatedEncodedCrc32c + ", decoded to " + crc32cHashingOutputStream.hash().asInt() + " " + (object.getCrc32c().equals(calculatedEncodedCrc32c) ? "(MATCHES)" : "(MISMATCHES; data altered in transit)")); // success! return; } catch (GoogleJsonResponseException e) { // An error came back from the API. GoogleJsonError error = e.getDetails(); System.err.println(error.getMessage()); // More error information can be retrieved with error.getErrors(). } catch (HttpResponseException e) { // No JSON body was returned by the API. System.err.println(e.getHeaders()); System.err.println(e.getMessage()); } catch (IOException e) { // Error formulating a HTTP request or reaching the HTTP service. System.err.println(e.getMessage()); } catch (Throwable t) { t.printStackTrace(); } System.exit(1); }
From source file:com.facebook.buck.artifact_cache.ThriftArtifactCacheProtocol.java
private static String computeHash(ByteSource source, HashFunction hashFunction) throws IOException { try (InputStream inputStream = source.openStream(); HashingOutputStream outputStream = new HashingOutputStream(hashFunction, new OutputStream() { @Override//ww w .ja va 2 s .c o m public void write(int b) throws IOException { // Do nothing. } })) { ByteStreams.copy(inputStream, outputStream); return outputStream.hash().toString(); } }
From source file:uk.ac.horizon.artcodes.server.christmas.ImageServlet.java
@Override public void doPut(HttpServletRequest request, HttpServletResponse response) throws IOException { try {/* w w w .j av a 2 s . c o m*/ if (request.getContentLength() > image_size) { throw new HTTPException(HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE, "Image too large"); } verifyApp(request); final String id = getImageID(request); final GcsService gcsService = GcsServiceFactory.createGcsService(RetryParams.getDefaultInstance()); final GcsFilename filename = new GcsFilename(request.getServerName(), id); final GcsFileMetadata metadata = gcsService.getMetadata(filename); if (metadata != null) { throw new HTTPException(HttpServletResponse.SC_FORBIDDEN, "Cannot modify"); } final BufferedInputStream inputStream = new BufferedInputStream(request.getInputStream()); final String mimetype = URLConnection.guessContentTypeFromStream(inputStream); if (mimetype == null) { throw new HTTPException(HttpServletResponse.SC_BAD_REQUEST, "Unrecognised image type"); } final GcsFileOptions.Builder fileOptionsBuilder = new GcsFileOptions.Builder(); fileOptionsBuilder.mimeType(mimetype); final GcsFileOptions fileOptions = fileOptionsBuilder.build(); final GcsOutputChannel outputChannel = gcsService.createOrReplace(filename, fileOptions); final HashingOutputStream outputStream = new HashingOutputStream(Hashing.sha256(), Channels.newOutputStream(outputChannel)); ByteStreams.copy(inputStream, outputStream); String hash = outputStream.hash().toString(); if (!hash.equals(id)) { gcsService.delete(filename); throw new HTTPException(HttpServletResponse.SC_BAD_REQUEST, "Invalid hash"); } outputStream.close(); outputChannel.close(); } catch (HTTPException e) { e.writeTo(response); } }
From source file:com.google.devtools.build.lib.collect.nestedset.NestedSetCodec.java
private void serializeOneNestedSet(Object children, CodedOutputStream codedOut, Map<Object, byte[]> childToDigest) throws IOException, SerializationException { // Serialize nested set into an inner byte array so we can take its digest ByteArrayOutputStream childOutputStream = new ByteArrayOutputStream(); HashingOutputStream hashingOutputStream = new HashingOutputStream(Hashing.md5(), childOutputStream); CodedOutputStream childCodedOut = CodedOutputStream.newInstance(hashingOutputStream); if (children instanceof Object[]) { serializeMultiItemChildArray((Object[]) children, childToDigest, childCodedOut); } else if (children != NestedSet.EMPTY_CHILDREN) { serializeSingleItemChildArray(children, childCodedOut); } else {//from w ww . j ava 2 s. c o m // Empty set childCodedOut.writeInt32NoTag(0); } childCodedOut.flush(); byte[] digest = hashingOutputStream.hash().asBytes(); codedOut.writeByteArrayNoTag(digest); byte[] childBytes = childOutputStream.toByteArray(); codedOut.writeByteArrayNoTag(childBytes); childToDigest.put(children, digest); }
From source file:org.sonatype.nexus.repository.maven.internal.MavenFacetUtils.java
/** * Creates a temporary {@link Content} equipped will all the whistles and bells, like hashes and so. *///from w w w . ja v a2 s . c o m public static Content createTempContent(final Path path, final String contentType, final Writer writer) throws IOException { Map<HashAlgorithm, HashingOutputStream> hashingStreams = new HashMap<>(); try (OutputStream outputStream = new BufferedOutputStream(Files.newOutputStream(path))) { OutputStream os = outputStream; for (HashType hashType : HashType.values()) { os = new HashingOutputStream(hashType.getHashAlgorithm().function(), os); hashingStreams.put(hashType.getHashAlgorithm(), (HashingOutputStream) os); } writer.write(os); os.flush(); } Map<HashAlgorithm, HashCode> hashCodes = new HashMap<>(); for (Map.Entry<HashAlgorithm, HashingOutputStream> entry : hashingStreams.entrySet()) { hashCodes.put(entry.getKey(), entry.getValue().hash()); } Content content = new Content(new StreamPayload(new InputStreamSupplier() { @Nonnull @Override public InputStream get() throws IOException { return new BufferedInputStream(Files.newInputStream(path)); } }, Files.size(path), contentType)); content.getAttributes().set(Content.CONTENT_LAST_MODIFIED, DateTime.now()); content.getAttributes().set(Content.CONTENT_HASH_CODES_MAP, hashCodes); mayAddETag(content); return content; }
From source file:uk.ac.horizon.artcodes.server.ImageServlet.java
@Override public void doPut(HttpServletRequest request, HttpServletResponse response) throws IOException { try {/*from w w w . j a va 2 s . c om*/ if (request.getContentLength() > image_size) { throw new HTTPException(HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE, "Image too large"); } verifyUser(getUser(request)); final String id = getImageID(request); final GcsService gcsService = GcsServiceFactory.createGcsService(RetryParams.getDefaultInstance()); final GcsFilename filename = new GcsFilename(request.getServerName(), id); final GcsFileMetadata metadata = gcsService.getMetadata(filename); if (metadata != null) { throw new HTTPException(HttpServletResponse.SC_FORBIDDEN, "Cannot modify"); } final BufferedInputStream inputStream = new BufferedInputStream(request.getInputStream()); final String mimetype = URLConnection.guessContentTypeFromStream(inputStream); if (mimetype == null) { throw new HTTPException(HttpServletResponse.SC_BAD_REQUEST, "Unrecognised image type"); } final GcsFileOptions.Builder fileOptionsBuilder = new GcsFileOptions.Builder(); fileOptionsBuilder.mimeType(mimetype); final GcsFileOptions fileOptions = fileOptionsBuilder.build(); final GcsOutputChannel outputChannel = gcsService.createOrReplace(filename, fileOptions); final HashingOutputStream outputStream = new HashingOutputStream(Hashing.sha256(), Channels.newOutputStream(outputChannel)); ByteStreams.copy(inputStream, outputStream); String hash = outputStream.hash().toString(); if (!hash.equals(id)) { gcsService.delete(filename); throw new HTTPException(HttpServletResponse.SC_BAD_REQUEST, "Invalid hash"); } outputStream.close(); outputChannel.close(); } catch (HTTPException e) { e.writeTo(response); } }
From source file:com.facebook.buck.autodeps.AutodepsWriter.java
/** * Writes the file only if the contents are different to avoid creating noise for Watchman/buckd. * @param deps Keys must be sorted so the output is generated consistently. * @param includeSignature Whether to insert a signature for the contents of the file. * @param generatedFile Where to write the generated output. * @param mapper To aid in JSON serialization. * @return whether the file was written// w w w .ja va 2s. co m */ private static boolean writeSignedFile(SortedMap<String, SortedMap<String, Iterable<String>>> deps, boolean includeSignature, Path generatedFile, ObjectMapper mapper) throws IOException { try (ByteArrayOutputStream bytes = new ByteArrayOutputStream(); HashingOutputStream hashingOutputStream = new HashingOutputStream(Hashing.sha1(), bytes)) { ObjectWriter jsonWriter = mapper.writer(PRETTY_PRINTER.get()); jsonWriter.writeValue(includeSignature ? hashingOutputStream : bytes, deps); // Flush a trailing newline through the HashingOutputStream so it is included both the // output and the signature calculation. hashingOutputStream.write('\n'); String serializedJson = bytes.toString(Charsets.UTF_8.name()); String contentsToWrite; if (includeSignature) { HashCode hash = hashingOutputStream.hash(); contentsToWrite = String.format(AUTODEPS_CONTENTS_FORMAT_STRING, hash, serializedJson); } else { contentsToWrite = serializedJson; } // Do not write file unless the contents have changed. Writing the file will cause the daemon // to indiscriminately invalidate any cached build rules for the associated build file. if (generatedFile.toFile().isFile()) { String existingContents = com.google.common.io.Files.toString(generatedFile.toFile(), Charsets.UTF_8); if (contentsToWrite.equals(existingContents)) { return false; } } try (Writer writer = Files.newBufferedWriter(generatedFile, Charsets.UTF_8)) { writer.write(contentsToWrite); } return true; } }
From source file:io.macgyver.plugin.cmdb.AppInstanceManager.java
public String computeSignature(ObjectNode n, Set<String> exclusions) { List<String> list = Lists.newArrayList(n.fieldNames()); Collections.sort(list);// ww w. j a v a 2 s .c om HashingOutputStream hos = new HashingOutputStream(Hashing.sha1(), ByteStreams.nullOutputStream()); list.forEach(it -> { if (exclusions != null && !exclusions.contains(it)) { JsonNode val = n.get(it); if (val.isObject() || val.isArray()) { // skipping } else { try { hos.write(it.getBytes()); hos.write(val.toString().getBytes()); } catch (IOException e) { } } } }); return hos.hash().toString(); }