List of usage examples for com.google.common.hash HashCode asBytes
@CheckReturnValue public abstract byte[] asBytes();
From source file:com.facebook.buck.rules.keys.hasher.GuavaRuleKeyHasher.java
@Override public GuavaRuleKeyHasher putPath(Path path, HashCode hash) { this.putStringified(RuleKeyHasherTypes.PATH, path.toString()); this.putBytes(RuleKeyHasherTypes.PATH, hash.asBytes()); return this; }
From source file:org.jclouds.openstack.swift.v1.binders.SetPayload.java
@SuppressWarnings("unchecked") @Override//from w w w . j av a2 s . c om public <R extends HttpRequest> R bindToRequest(R request, Object input) { Builder<?> builder = request.toBuilder(); Payload payload = Payload.class.cast(input); if (payload.getContentMetadata().getContentType() == null) { // TODO: use `X-Detect-Content-Type` here. Should be configurable via a property. payload.getContentMetadata().setContentType(MediaType.APPLICATION_OCTET_STREAM); } Long contentLength = payload.getContentMetadata().getContentLength(); if (contentLength != null && contentLength >= 0) { checkArgument(contentLength <= 5L * 1024 * 1024 * 1024, "maximum size for put object is 5GB, %s", contentLength); } else { builder.replaceHeader(TRANSFER_ENCODING, "chunked").build(); } HashCode md5 = payload.getContentMetadata().getContentMD5AsHashCode(); if (md5 != null) { // Swift will validate the md5, if placed as an ETag header builder.replaceHeader(ETAG, base16().lowerCase().encode(md5.asBytes())); } Date expires = payload.getContentMetadata().getExpires(); if (expires != null) { builder.addHeader(OBJECT_DELETE_AT, String.valueOf(MILLISECONDS.toSeconds(expires.getTime()))).build(); } return (R) builder.payload(payload).build(); }
From source file:com.facebook.buck.rules.keys.PolynomialRuleKeyHasher.java
@Override public PolynomialRuleKeyHasher putPath(Path path, HashCode hash) { feedString(path.toString());/*w w w . j a v a 2 s.com*/ feed(RuleKeyHasherTypes.PATH); feedBytes(hash.asBytes()); feed(RuleKeyHasherTypes.PATH); return this; }
From source file:org.fabrician.enabler.util.DockerfileBuildLock.java
private DockerfileBuildLock(String dockerImageName, File dockerFilePath) throws Exception { this.dockerImageName = dockerImageName; this.dockerFilePath = dockerFilePath; byte[] docker_bytes = FileUtils.readFileToByteArray(dockerFilePath); // we create a hash file name from the image and dockerfile content to build with... HashFunction hf = Hashing.md5();//from ww w .j a v a 2 s . co m HashCode hc = hf.newHasher().putString(dockerImageName, Charsets.UTF_8).putBytes(docker_bytes).hash(); String dockerFileHash = BaseEncoding.base64Url().encode(hc.asBytes()); File tmpDir = new File(System.getProperty("java.io.tmpdir")); lock_file = new File(tmpDir, dockerFileHash + ".dockerfile_lck"); logger.info("Attempt to acquire Dockerfile build lock at path :" + lock_file); lock_channel = FileUtils.openOutputStream(lock_file).getChannel(); lock = lock_channel.tryLock(); if (lock == null) { throw new Exception("Can't create exclusive build lock for image [" + dockerImageName + "] for Dockerfile [" + dockerFilePath + "]"); } else { logger.info("Acquired Dockerfile build lock at lock path : [" + lock_file + "]"); } }
From source file:com.facebook.buck.rules.keys.hasher.GuavaRuleKeyHasher.java
@Override public GuavaRuleKeyHasher putArchiveMemberPath(ArchiveMemberPath path, HashCode hash) { this.putStringified(RuleKeyHasherTypes.ARCHIVE_MEMBER_PATH, path.toString()); this.putBytes(RuleKeyHasherTypes.ARCHIVE_MEMBER_PATH, hash.asBytes()); return this; }
From source file:com.facebook.buck.rules.keys.PolynomialRuleKeyHasher.java
@Override public PolynomialRuleKeyHasher putArchiveMemberPath(ArchiveMemberPath path, HashCode hash) { feedString(path.toString());//w w w .ja va 2 s . c om feed(RuleKeyHasherTypes.ARCHIVE_MEMBER_PATH); feedBytes(hash.asBytes()); feed(RuleKeyHasherTypes.ARCHIVE_MEMBER_PATH); return this; }
From source file:com.matthewmitchell.peercoinj.core.CheckpointManager.java
private Sha256Hash readTextual(InputStream inputStream) throws IOException { Hasher hasher = Hashing.sha256().newHasher(); BufferedReader reader = null; try {//w w w .ja v a 2s.co m reader = new BufferedReader(new InputStreamReader(inputStream, Charsets.US_ASCII)); String magic = reader.readLine(); if (!TEXTUAL_MAGIC.equals(magic)) throw new IOException("unexpected magic: " + magic); int numSigs = Integer.parseInt(reader.readLine()); for (int i = 0; i < numSigs; i++) reader.readLine(); // Skip sigs for now. int numCheckpoints = Integer.parseInt(reader.readLine()); checkState(numCheckpoints > 0); // Hash numCheckpoints in a way compatible to the binary format. hasher.putBytes(ByteBuffer.allocate(4).order(ByteOrder.BIG_ENDIAN).putInt(numCheckpoints).array()); final int size = StoredBlock.COMPACT_SERIALIZED_SIZE; ByteBuffer buffer = ByteBuffer.allocate(size); for (int i = 0; i < numCheckpoints; i++) { byte[] bytes = BASE64.decode(reader.readLine()); hasher.putBytes(bytes); buffer.position(0); buffer.put(bytes); buffer.position(0); StoredBlock block = StoredBlock.deserializeCompact(params, buffer); checkpoints.put(block.getHeader().getTimeSeconds(), block); } HashCode hash = hasher.hash(); log.info("Read {} checkpoints, hash is {}", checkpoints.size(), hash); return new Sha256Hash(hash.asBytes()); } finally { if (reader != null) reader.close(); } }
From source file:ninja.NinjaController.java
/** * Handles manual object uploads//from ww w . j av a2 s .c o m * * @param ctx the context describing the current request * @param bucket the name of the target bucket */ @Routed(priority = PriorityCollector.DEFAULT_PRIORITY - 1, value = "/ui/:1/upload") public void uploadFile(WebContext ctx, String bucket) { try { String name = ctx.get("filename").asString(ctx.get("qqfile").asString()); Bucket storageBucket = storage.getBucket(bucket); StoredObject object = storageBucket.getObject(name); try (InputStream inputStream = ctx.getContent()) { try (FileOutputStream out = new FileOutputStream(object.getFile())) { ByteStreams.copy(inputStream, out); } } Map<String, String> properties = Maps.newTreeMap(); properties.put(HttpHeaderNames.CONTENT_TYPE.toString(), ctx.getHeaderValue(HttpHeaderNames.CONTENT_TYPE).asString(MimeHelper.guessMimeType(name))); HashCode hash = Files.hash(object.getFile(), Hashing.md5()); String md5 = BaseEncoding.base64().encode(hash.asBytes()); properties.put("Content-MD5", md5); object.storeProperties(properties); ctx.respondWith().direct(HttpResponseStatus.OK, "{ success: true }"); } catch (IOException e) { UserContext.handle(e); ctx.respondWith().direct(HttpResponseStatus.OK, "{ success: false }"); } }
From source file:org.guldenj.core.CheckpointManager.java
private Sha256Hash readTextual(InputStream inputStream) throws IOException { Hasher hasher = Hashing.sha256().newHasher(); BufferedReader reader = null; try {/*from w w w . java 2 s . co m*/ reader = new BufferedReader(new InputStreamReader(inputStream, Charsets.US_ASCII)); String magic = reader.readLine(); if (!TEXTUAL_MAGIC.equals(magic)) throw new IOException("unexpected magic: " + magic); int numSigs = Integer.parseInt(reader.readLine()); for (int i = 0; i < numSigs; i++) reader.readLine(); // Skip sigs for now. int numCheckpoints = Integer.parseInt(reader.readLine()); checkState(numCheckpoints > 0); // Hash numCheckpoints in a way compatible to the binary format. hasher.putBytes(ByteBuffer.allocate(4).order(ByteOrder.BIG_ENDIAN).putInt(numCheckpoints).array()); final int size = StoredBlock.COMPACT_SERIALIZED_SIZE; ByteBuffer buffer = ByteBuffer.allocate(size); for (int i = 0; i < numCheckpoints; i++) { byte[] bytes = BASE64.decode(reader.readLine()); hasher.putBytes(bytes); buffer.position(0); buffer.put(bytes); buffer.position(0); StoredBlock block = StoredBlock.deserializeCompact(params, buffer); checkpoints.put(block.getHeader().getTimeSeconds(), block); } HashCode hash = hasher.hash(); log.info("Read {} checkpoints, hash is {}", checkpoints.size(), hash); return Sha256Hash.wrap(hash.asBytes()); } finally { if (reader != null) reader.close(); } }
From source file:com.facebook.buck.core.build.engine.buildinfo.DefaultOnDiskBuildInfo.java
@Override public void writeOutputHashes(FileHashCache fileHashCache) throws IOException { ImmutableSortedSet<Path> pathsForArtifact = getPathsForArtifact(); // Grab and record the output hashes in the build metadata so that cache hits avoid re-hashing // file contents. Since we use output hashes for input-based rule keys and for detecting // non-determinism, we would spend a lot of time re-hashing output paths -- potentially in // serialized in a single step. So, do the hashing here to distribute the workload across // several threads and cache the results. ImmutableSortedMap.Builder<String, String> outputHashes = ImmutableSortedMap.naturalOrder(); Hasher hasher = Hashing.sha1().newHasher(); for (Path path : pathsForArtifact) { String pathString = path.toString(); HashCode fileHash = fileHashCache.get(projectFilesystem, path); hasher.putBytes(pathString.getBytes(Charsets.UTF_8)); hasher.putBytes(fileHash.asBytes()); outputHashes.put(pathString, fileHash.toString()); }/*from w w w. j av a 2 s .c o m*/ projectFilesystem.writeContentsToPath(ObjectMappers.WRITER.writeValueAsString(outputHashes.build()), metadataDirectory.resolve(BuildInfo.MetadataKey.RECORDED_PATH_HASHES)); projectFilesystem.writeContentsToPath(hasher.hash().toString(), metadataDirectory.resolve(BuildInfo.MetadataKey.OUTPUT_HASH)); }