List of usage examples for com.google.common.hash Hashing sha1
public static HashFunction sha1()
From source file:com.facebook.buck.shell.WorkerProcessPoolFactory.java
/** * Returns an existing WorkerProcessPool for the given job params if one exists, otherwise * creates a new one.// w ww.j ava 2 s. c o m */ public WorkerProcessPool getWorkerProcessPool(final ExecutionContext context, WorkerJobParams paramsToUse) { ConcurrentMap<String, WorkerProcessPool> processPoolMap; final String key; final HashCode workerHash; if (paramsToUse.getPersistentWorkerKey().isPresent() && context.getPersistentWorkerPools().isPresent()) { processPoolMap = context.getPersistentWorkerPools().get(); key = paramsToUse.getPersistentWorkerKey().get(); workerHash = paramsToUse.getWorkerHash().get(); } else { processPoolMap = context.getWorkerProcessPools(); key = Joiner.on(' ').join(getCommand(context.getPlatform(), paramsToUse)); workerHash = Hashing.sha1().hashString(key, StandardCharsets.UTF_8); } // If the worker pool has a different hash, recreate the pool. WorkerProcessPool pool = processPoolMap.get(key); if (pool != null && !pool.getPoolHash().equals(workerHash)) { if (processPoolMap.remove(key, pool)) { pool.close(); } pool = processPoolMap.get(key); } if (pool == null) { pool = createWorkerProcessPool(context, paramsToUse, processPoolMap, key, workerHash); } int poolCapacity = pool.getCapacity(); if (poolCapacity != paramsToUse.getMaxWorkers()) { context.postEvent(ConsoleEvent.warning( "There are two 'worker_tool' targets declared with the same command (%s), but " + "different 'max_worker' settings (%d and %d). Only the first capacity is applied. " + "Consolidate these workers to avoid this warning.", key, poolCapacity, paramsToUse.getMaxWorkers())); } return pool; }
From source file:org.glowroot.agent.embedded.repo.FullQueryTextDao.java
String updateLastCaptureTime(String fullText, long captureTime) throws SQLException { String fullTextSha1 = Hashing.sha1().hashString(fullText, UTF_8).toString(); if (lastCaptureTimeUpdatedInThePastDay.getIfPresent(fullTextSha1) != null) { return fullTextSha1; }//from w ww. ja va 2 s.c om synchronized (lock) { int updateCount = dataSource.update( "update full_query_text set last_capture_time = ? where full_text_sha1 = ?", captureTime, fullTextSha1); if (updateCount == 0) { dataSource.update("insert into full_query_text (full_text_sha1, full_text," + " last_capture_time) values (?, ?, ?)", fullTextSha1, fullText, captureTime); } } lastCaptureTimeUpdatedInThePastDay.put(fullTextSha1, true); return fullTextSha1; }
From source file:com.eventsourcing.h2.index.HashIndex.java
public static <A, O extends Entity> HashIndex<A, O> onAttribute(MVStore store, Attribute<O, A> attribute) { return onAttribute(store, attribute, Hashing.sha1()); }
From source file:com.torchmind.stockpile.server.controller.v1.BlacklistController.java
/** * <code>POST /v1/blacklist/</code> * * Checks any hostname supplied as a form parameter in the post body against the server blacklist. * * @param hostname a hostname to check against. * @return a blacklist result./* ww w.j a va2 s. c o m*/ */ @Nonnull @RequestMapping(params = "hostname", method = RequestMethod.POST) public BlacklistResult check(@Nonnull @RequestParam("hostname") String hostname) { // before checking for wildcards check for exact matches of the hostname hostname = hostname.toLowerCase(); String hash = Hashing.sha1().hashString(hostname, StandardCharsets.ISO_8859_1).toString(); if (this.hashes.contains(hash)) { return new BlacklistResult(hostname, true); } if (IP_ADDRESS_PATTERN.matcher(hostname).matches()) { return this.checkAddress(hostname); } return this.checkHostname(hostname); }
From source file:com.facebook.buck.worker.WorkerProcessPoolFactory.java
/** * Returns an existing WorkerProcessPool for the given job params if one exists, otherwise creates * a new one./*www.java2 s.com*/ */ public WorkerProcessPool getWorkerProcessPool(ExecutionContext context, WorkerProcessParams paramsToUse) { ConcurrentMap<String, WorkerProcessPool> processPoolMap; String key; HashCode workerHash; if (paramsToUse.getWorkerProcessIdentity().isPresent() && context.getPersistentWorkerPools().isPresent()) { processPoolMap = context.getPersistentWorkerPools().get(); key = paramsToUse.getWorkerProcessIdentity().get().getPersistentWorkerKey(); workerHash = paramsToUse.getWorkerProcessIdentity().get().getWorkerHash(); } else { processPoolMap = context.getWorkerProcessPools(); key = Joiner.on(' ').join(getCommand(context.getPlatform(), paramsToUse)); workerHash = Hashing.sha1().hashString(key, StandardCharsets.UTF_8); } // If the worker pool has a different hash, recreate the pool. WorkerProcessPool pool = processPoolMap.get(key); if (pool != null && !pool.getPoolHash().equals(workerHash)) { if (processPoolMap.remove(key, pool)) { pool.close(); } pool = processPoolMap.get(key); } if (pool == null) { pool = createWorkerProcessPool(context, paramsToUse, processPoolMap, key, workerHash); } int poolCapacity = pool.getCapacity(); if (poolCapacity != paramsToUse.getMaxWorkers()) { context.postEvent(ConsoleEvent.warning( "There are two 'worker_tool' targets declared with the same command (%s), but " + "different 'max_worker' settings (%d and %d). Only the first capacity is applied. " + "Consolidate these workers to avoid this warning.", key, poolCapacity, paramsToUse.getMaxWorkers())); } return pool; }
From source file:com.facebook.buck.hashing.FilePathHashLoader.java
@Override public HashCode get(Path root) throws IOException { // In case the root path is a directory, collect all files contained in it and sort them before // hashing to avoid non-deterministic directory traversal order from influencing the hash. final ImmutableSortedSet.Builder<Path> files = ImmutableSortedSet.naturalOrder(); Files.walkFileTree(defaultCellRoot.resolve(root), ImmutableSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() { @Override//w w w . j ava2 s.co m public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { files.add(file); return FileVisitResult.CONTINUE; } }); Hasher hasher = Hashing.sha1().newHasher(); for (Path file : files.build()) { file = defaultCellRoot.resolve(file).toRealPath(); boolean assumeModified = assumeModifiedFiles.contains(file); Path relativePath = MorePaths.relativize(defaultCellRoot, file); // For each file add its path to the hasher suffixed by whether we assume the file to be // modified or not. This way files with different paths always result in different hashes and // files that are assumed to be modified get different hashes than all unmodified files. StringHashing.hashStringAndLength(hasher, relativePath.toString()); hasher.putBoolean(assumeModified); } return hasher.hash(); }
From source file:com.test.config.service.zkdao.AuthDao.java
private byte[] sha1Digest(String text) { HashFunction hf = Hashing.sha1(); HashCode hashCode = hf.hashString(text, Charsets.UTF_8); return hashCode.asBytes(); }
From source file:com.facebook.buck.features.js.JsFlavors.java
public static Flavor fileFlavorForSourcePath(Path path) { String hash = Hashing.sha1().hashString(MorePaths.pathWithUnixSeparators(path), Charsets.UTF_8).toString() .substring(0, 10);/*from w w w. jav a2 s .c o m*/ String safeFileName = Flavor.replaceInvalidCharacters(path.getFileName().toString()); return InternalFlavor.of(fileFlavorPrefix + safeFileName + "-" + hash); }
From source file:hu.skawa.migrator_maven_plugin.DependencyExport.java
public void execute() throws MojoExecutionException { Set<Artifact> artifacts = project.getArtifacts(); for (Artifact arti : artifacts) { File file = arti.getFile(); String hash = ""; try {/*www .ja v a2 s . com*/ byte[] contents = Files.toByteArray(file); hash = Hashing.sha1().hashBytes(contents).toString(); } catch (IOException e) { throw new MojoExecutionException("Dependency could not be hashed!", e); } InternalDependency id = new InternalDependency(arti.getGroupId(), arti.getArtifactId(), arti.getVersion(), hash); File remotes = new File(file.getParent() + File.separator + "_remote.repositories"); try { String remoteDescriptorContent = Files.toString(remotes, StandardCharsets.UTF_8); getLog().debug(remoteDescriptorContent); Matcher jarServerMatcher = jarPattern.matcher(remoteDescriptorContent); while (jarServerMatcher.find()) { String server = jarServerMatcher.group(1); if (server != null) { id.setJarServer(server); } else { id.setJarServer(""); } } } catch (IOException e) { getLog().warn( "Could not locate repository file for " + arti.getArtifactId() + ", setting to empty!"); id.setJarServer(""); } allDependencies.add(id); } if (outputFilePrefix != null) { File directives = new File(outputFilePrefix + "-" + project.getName() + "-directives"); File references = new File(outputFilePrefix + "-" + project.getName() + "-references"); try (FileWriter directiveWriter = new FileWriter(directives); FileWriter referenceWriter = new FileWriter(references);) { for (InternalDependency dep : allDependencies) { if (outputDirectives) { directiveWriter.append(dep.toBazelDirective(addHashes, addServers)); directiveWriter.append("\n"); } if (outputReferences) { referenceWriter.append(dep.getArtifactId() + ": @" + dep.getBazelName() + "//jar"); referenceWriter.append("\n"); } } } catch (IOException e) { getLog().error(e); } } else { for (InternalDependency dep : allDependencies) { getLog().info(dep.toBazelDirective(addHashes, addServers)); } } }
From source file:org.gradle.api.internal.artifacts.repositories.resolver.MavenMetadataLoader.java
private void parseMavenMetadataInfo(final ExternalResourceName metadataLocation, final MavenMetadata metadata) throws IOException { ExternalResource resource = cacheAwareExternalResourceAccessor.getResource(metadataLocation, new CacheAwareExternalResourceAccessor.ResourceFileStore() { @Override//from w w w. j a v a 2 s .c om public LocallyAvailableResource moveIntoCache(File downloadedResource) { String key = Hashing.sha1().hashString(metadataLocation.toString(), Charsets.UTF_8) .toString(); return resourcesFileStore.move(key, downloadedResource); } }, null); if (resource == null) { throw new MissingResourceException(metadataLocation.getUri(), String.format("Maven meta-data not available at %s", metadataLocation)); } parseMavenMetadataInto(resource, metadata); }