List of usage examples for com.google.common.hash Hashing md5
public static HashFunction md5()
From source file:com.scireum.SDSMojo.java
private String computeContentHash(File artifactFile) throws IOException { return ByteStreams.hash(Files.newInputStreamSupplier(artifactFile), Hashing.md5()).toString(); }
From source file:com.ibm.common.activitystreams.legacy.Binary.java
/** * Return an InputStream for reading the data * @param compression Compression//w w w . j a v a 2 s. c o m * @return InputStream * @throws IOException */ public InputStream read(Compression<?, ?> compression) throws IOException { StringReader reader = new StringReader(data()); InputStream in = BaseEncoding.base64Url().decodingStream(reader); if (compression != null) in = compression.decompressor(in); if (has("md5")) in = new HashingInputStream(Hashing.md5(), in); return in; }
From source file:org.eclipse.che.api.watcher.server.detectors.EditorFileTracker.java
private String hashFile(String wsPath) { try {/*from ww w. j a va2 s . c o m*/ File file = fsManager.toIoFile(wsPath); return file == null ? Hashing.md5().hashString("", defaultCharset()).toString() : hash(file, Hashing.md5()).toString(); } catch (IOException e) { LOG.error("Error trying to read {} file and broadcast it", wsPath, e); } return null; }
From source file:com.facebook.buck.versions.VersionedTargetGraphBuilder.java
/** * @return a flavor to which summarizes the given version selections. *//*ww w.ja v a 2 s . com*/ static Flavor getVersionedFlavor(SortedMap<BuildTarget, Version> versions) { Preconditions.checkArgument(!versions.isEmpty()); Hasher hasher = Hashing.md5().newHasher(); for (Map.Entry<BuildTarget, Version> ent : versions.entrySet()) { hasher.putString(ent.getKey().toString(), Charsets.UTF_8); hasher.putString(ent.getValue().getName(), Charsets.UTF_8); } return ImmutableFlavor.of("v" + hasher.hash().toString().substring(0, 7)); }
From source file:com.qubole.rubix.core.CachingFileSystem.java
@Override public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException { if (!clusterManager.isMaster() || cacheSkipped) { // If in worker node, blockLocation does not matter return fs.getFileBlockLocations(file, start, len); }//from www . ja v a2 s . com List<String> nodes = clusterManager.getNodes(); if (file == null) { return null; } else if (start >= 0L && len >= 0L) { if (file.getLen() < start) { return new BlockLocation[0]; } else { // Using similar logic of returning all Blocks as FileSystem.getFileBlockLocations does instead of only returning blocks from start till len BlockLocation[] blockLocations = new BlockLocation[(int) Math .ceil((double) file.getLen() / clusterManager.getSplitSize())]; int blockNumber = 0; for (long i = 0; i < file.getLen(); i = i + clusterManager.getSplitSize()) { long end = i + clusterManager.getSplitSize(); if (end > file.getLen()) { end = file.getLen(); } String key = file.getPath().toString() + i + end; HashFunction hf = Hashing.md5(); HashCode hc = hf.hashString(key, Charsets.UTF_8); int nodeIndex = Hashing.consistentHash(hc, nodes.size()); String[] name = new String[] { nodes.get(nodeIndex) }; String[] host = new String[] { nodes.get(nodeIndex) }; blockLocations[blockNumber++] = new BlockLocation(name, host, i, end - i); log.info(String.format("BlockLocation %s %d %d %s totalHosts: %s", file.getPath().toString(), i, end - i, host[0], nodes.size())); } return blockLocations; } } else { throw new IllegalArgumentException("Invalid start or len parameter"); } }
From source file:org.lenskit.data.store.PackedEntityCollection.java
@Override public void describeTo(DescriptionWriter writer) { writer.putField("entity_count", size); writer.putList("attributes", attributes); if (contentHash == null) { Hasher hash = Hashing.md5().newHasher(); for (int i = 0; i < size; i++) { hash.putLong(idStore.getLong(i)); for (int j = 1; j < attributes.size(); j++) { hash.putInt(Objects.hashCode(attrStores[j].get(i))); }/*from ww w . j av a 2 s . co m*/ } contentHash = hash.hash(); } writer.putField("content_hash", contentHash); }
From source file:ca.ualberta.physics.cssdp.file.service.CacheService.java
private String getMD5(File file) { try {/*from ww w . j av a 2 s . c o m*/ HashCode hashcode = Files.hash(file, Hashing.md5()); String md5HexString = hashcode.toString(); return md5HexString; } catch (IOException e) { throw Throwables.propagate(e); } }
From source file:net.nharyes.drivecopy.biz.wfm.FileStorageWorkflowManagerImpl.java
private FileBO upsert(FileBO file, boolean upload) throws WorkflowManagerException { try {/*from w ww.j a v a 2s .co m*/ // get token TokenBO token = getToken(); // log action if (upload) logger.info(String.format("Upload '%s' to entry '%s'", file.getFile().getAbsolutePath(), file.getName())); else logger.info(String.format("Replace entry '%s' with '%s'", file.getName(), file.getFile().getAbsolutePath())); // check force option if (upload && file.isForce()) logger.warning("force option ignored"); EntryBO entry = null; String parentId = null; try { // process folders and get parent ID parentId = driveSdo.getLastFolderId(token, extractFolders(file.getName()), file.isCreateFolders()); // search entry entry = driveSdo.searchEntry(token, extractFileName(file.getName()), parentId); if (upload) { // entry already exists throw new SdoException(String.format("Entry with name '%s' already exists", file.getName())); } } catch (ItemNotFoundException ex) { // check force option if (!upload && !file.isForce()) { // re-throw exception throw ex; } else if (!upload && file.isForce()) { // switch to upload mode upload = true; logger.info("Switched to upload mode"); } if (upload) { // compose BO entry = new EntryBO(); entry.setName(extractFileName(file.getName())); } } // set file property entry.setFile(file.getFile()); // set MIME type property entry.setMimeType(file.getMimeType()); // check skip revision option if (file.isSkipRevision() && upload) logger.warning("skip revision option ignored"); else entry.setSkipRevision(file.isSkipRevision()); // check directory DirectoryBO dirBO = new DirectoryBO(); if (file.isDirectory()) { // compress directory logger.info(String.format("Compress directory with level '%d'", file.getCompressionLevel())); dirBO.setFile(file.getFile()); dirBO.setLevel(file.getCompressionLevel()); dirBO = directoryCompressorWorkflowManager.handleWorkflow(dirBO, DirectoryCompressorWorkflowManager.ACTION_COMPRESS); // replace file entry.setFile(dirBO.getFile()); // in case set ZIP MIME type if (entry.getMimeType() == null) entry.setMimeType("application/zip"); } else { // in case set generic MIME type if (entry.getMimeType() == null) entry.setMimeType("application/octet-stream"); } // upload/replace entry logger.info(String.format("MIME type of the entry: %s", entry.getMimeType())); if (upload) entry = driveSdo.uploadEntry(token, entry, parentId); else entry = driveSdo.updateEntry(token, entry); // in case check MD5 of the replaced entry if (file.isCheckMd5()) { // calculate MD5 of the local file/directory logger.info("calculate the MD5 summary of the file..."); byte[] digest = Files.hash(entry.getFile(), Hashing.md5()).asBytes(); String sDigest = new BigInteger(1, digest).toString(16); logger.fine(String.format("digest of the file: %s", sDigest)); logger.fine(String.format("digest of the entry: %s", entry.getMd5Sum())); // compare digests if (!sDigest.equalsIgnoreCase(entry.getMd5Sum())) throw new WorkflowManagerException("wrong digest!"); logger.info("digests comparison OK"); } // in case delete temporary file if (file.isDirectory()) { logger.fine("Delete temporary file"); entry.getFile().delete(); } // in case delete file or directory if (file.isDeleteAfter()) { logger.info("Process file(s) for deletion..."); processFileForDeletion(file.getFile(), dirBO.getNotCompressed()); } // return updated entry FileBO fBO = new FileBO(); fBO.setFile(entry.getFile()); fBO.setName(entry.getName()); return fBO; } catch (SdoException ex) { // re-throw exception throw new WorkflowManagerException(ex.getMessage(), ex); } catch (IOException ex) { // re-throw exception throw new WorkflowManagerException(ex.getMessage(), ex); } }
From source file:com.google.api.control.aggregator.ReportRequestAggregator.java
/** * Obtains the {@hashCode} for the contents of {@code value}. * * @param value a {@code Operation} to be signed * @return the {@code HashCode} corresponding to {@code value} *//* w w w . j a va 2 s .c om*/ private static HashCode sign(Operation value) { Hasher h = Hashing.md5().newHasher(); h.putString(value.getConsumerId(), StandardCharsets.UTF_8); h.putChar('\0'); h.putString(value.getOperationName(), StandardCharsets.UTF_8); h.putChar('\0'); return Signing.putLabels(h, value.getLabels()).hash(); }
From source file:com.google.idea.blaze.base.wizard2.ui.BlazeEditProjectViewControl.java
public void update(BlazeNewProjectBuilder builder) { BlazeSelectWorkspaceOption workspaceOption = builder.getWorkspaceOption(); BlazeSelectProjectViewOption projectViewOption = builder.getProjectViewOption(); String workspaceName = workspaceOption.getWorkspaceName(); WorkspaceRoot workspaceRoot = workspaceOption.getWorkspaceRoot(); WorkspacePath workspacePath = projectViewOption.getSharedProjectView(); String initialProjectViewText = projectViewOption.getInitialProjectViewText(); boolean allowAddDefaultValues = projectViewOption.allowAddDefaultProjectViewValues() && allowAddprojectViewDefaultValues.getValue(); WorkspacePathResolver workspacePathResolver = workspaceOption.getWorkspacePathResolver(); HashCode hashCode = Hashing.md5().newHasher().putUnencodedChars(workspaceName) .putUnencodedChars(workspaceRoot.toString()) .putUnencodedChars(workspacePath != null ? workspacePath.toString() : "") .putUnencodedChars(initialProjectViewText != null ? initialProjectViewText : "") .putBoolean(allowAddDefaultValues).hash(); // If any params have changed, reinit the control if (!hashCode.equals(paramsHash)) { this.paramsHash = hashCode; init(workspaceName, workspaceRoot, workspacePathResolver, workspacePath, initialProjectViewText, allowAddDefaultValues);/*from w w w . java 2s . c o m*/ } }