List of usage examples for com.google.common.hash Hashing crc32
public static HashFunction crc32()
From source file:com.yahoo.pulsar.broker.namespace.NamespaceService.java
/** * Default constructor./*from w w w . j a v a 2s . co m*/ * * @throws PulsarServerException */ public NamespaceService(PulsarService pulsar) { this.pulsar = pulsar; host = pulsar.getAdvertisedAddress(); this.config = pulsar.getConfiguration(); this.loadManager = pulsar.getLoadManager(); ServiceUnitZkUtils.initZK(pulsar.getLocalZkCache().getZooKeeper(), pulsar.getBrokerServiceUrl()); this.bundleFactory = new NamespaceBundleFactory(pulsar, Hashing.crc32()); this.ownershipCache = new OwnershipCache(pulsar, bundleFactory); }
From source file:com.android.builder.signing.SignedJarApkCreator.java
private static void configureStoredEntry(JarEntry entry, File inputFile) throws IOException { ByteSource byteSource = Files.asByteSource(inputFile); long size = inputFile.length(); entry.setMethod(ZipEntry.STORED); entry.setSize(size);//from w w w.j a va 2s . c om entry.setCompressedSize(size); entry.setCrc(byteSource.hash(Hashing.crc32()).padToLong()); }
From source file:io.bazel.rules.closure.webfiles.WebfilesWriter.java
/** * Adds {@code webfile} {@code data} to zip archive and returns proto index entry. * * <p>The returned value can be written to the manifest associated with a rule so that parent * rules can obtain the data written here. * * @param webfile original information about webfile * @return modified version of {@code webfile} that's suitable for writing to the final manifest *//*from w w w .j a v a 2s .co m*/ public WebfileInfo writeWebfile(WebfileInfo webfile, @WillNotClose InputStream input) throws IOException { checkNotNull(input, "input"); String name = WebfilesUtils.getZipEntryName(webfile); createEntriesForParentDirectories(name); ZipEntry entry = new ZipEntry(name); entry.setComment(webfile.getRunpath()); // Build outputs need to be deterministic. Bazel also doesn't care about modified times because // it uses the file digest to determine if a file is invalidated. So even if we did copy the // time information from the original file, it still might not be a good idea. entry.setCreationTime(EPOCH); entry.setLastModifiedTime(EPOCH); entry.setLastAccessTime(EPOCH); if (isAlreadyCompressed(webfile.getWebpath())) { // When opting out of compression, ZipOutputStream expects us to do ALL THIS entry.setMethod(ZipEntry.STORED); if (input instanceof ByteArrayInputStream) { entry.setSize(input.available()); Hasher hasher = Hashing.crc32().newHasher(); input.mark(-1); ByteStreams.copy(input, Funnels.asOutputStream(hasher)); input.reset(); entry.setCrc(hasher.hash().padToLong()); } else { byte[] data = ByteStreams.toByteArray(input); entry.setSize(data.length); entry.setCrc(Hashing.crc32().hashBytes(data).padToLong()); input = new ByteArrayInputStream(data); } } else { entry.setMethod(ZipEntry.DEFLATED); } HasherInputStream source = new HasherInputStream(input, Hashing.sha256().newHasher()); long offset = channel.position(); zip.putNextEntry(entry); ByteStreams.copy(source, zip); zip.closeEntry(); buffer.flush(); WebfileInfo result = webfile.toBuilder().clearPath() // Now that it's in the zip, we don't need the ctx.action execroot path. .setInZip(true).setOffset(offset).setDigest(ByteString.copyFrom(source.hasher.hash().asBytes())) .build(); webfiles.add(result); return result; }
From source file:com.google.devtools.build.buildjar.JarHelper.java
/** * Copies file or directory entries from the file system into the jar. * Directory entries will be detected and their names automatically '/' * suffixed.//from ww w . j a v a 2s. c om */ protected void copyEntry(String name, File file) throws IOException { if (!names.contains(name)) { if (!file.exists()) { throw new FileNotFoundException(file.getAbsolutePath() + " (No such file or directory)"); } boolean isDirectory = file.isDirectory(); if (isDirectory && !name.endsWith("/")) { name = name + '/'; // always normalize directory names before checking set } if (names.add(name)) { if (verbose) { System.err.println("adding " + file); } // Create a new entry long size = isDirectory ? 0 : file.length(); JarEntry outEntry = new JarEntry(name); long newtime = normalize ? normalizedTimestamp(name) : file.lastModified(); outEntry.setTime(newtime); outEntry.setSize(size); if (size == 0L) { outEntry.setMethod(JarEntry.STORED); outEntry.setCrc(0); out.putNextEntry(outEntry); } else { outEntry.setMethod(storageMethod); if (storageMethod == JarEntry.STORED) { outEntry.setCrc(Files.hash(file, Hashing.crc32()).padToLong()); } out.putNextEntry(outEntry); Files.copy(file, out); } out.closeEntry(); } } }
From source file:com.mbeddr.pluginmanager.com.intellij.ide.plugins.RepositoryHelper.java
@NotNull public static List<IdeaPluginDescriptor> loadPlugins(@Nullable String repositoryUrl, @Nullable BuildNumber buildnumber, @Nullable String channel, boolean forceHttps, @Nullable final ProgressIndicator indicator) throws IOException { String url;//from ww w.j a v a 2 s . c o m final File pluginListFile; final String host; try { URIBuilder uriBuilder; if (repositoryUrl == null) { uriBuilder = new URIBuilder(ApplicationInfoImpl.getShadowInstance().getPluginsListUrl()); pluginListFile = new File(PathManager.getPluginsPath(), channel == null ? PLUGIN_LIST_FILE : channel + "_" + PLUGIN_LIST_FILE); if (pluginListFile.length() > 0) { uriBuilder.addParameter("crc32", Files.hash(pluginListFile, Hashing.crc32()).toString()); } } else { uriBuilder = new URIBuilder(repositoryUrl); pluginListFile = null; } if (!URLUtil.FILE_PROTOCOL.equals(uriBuilder.getScheme())) { uriBuilder.addParameter("build", (buildnumber != null ? buildnumber.asString() : ApplicationInfoImpl.getShadowInstance().getApiVersion())); if (channel != null) uriBuilder.addParameter("channel", channel); } host = uriBuilder.getHost(); url = uriBuilder.build().toString(); } catch (URISyntaxException e) { throw new IOException(e); } if (indicator != null) { indicator.setText2(IdeBundle.message("progress.connecting.to.plugin.manager", host)); } RequestBuilder request = HttpRequests.request(url).forceHttps(forceHttps); return process(repositoryUrl, request.connect(new HttpRequests.RequestProcessor<List<IdeaPluginDescriptor>>() { @Override public List<IdeaPluginDescriptor> process(@NotNull HttpRequests.Request request) throws IOException { if (indicator != null) { indicator.checkCanceled(); } URLConnection connection = request.getConnection(); if (pluginListFile != null && pluginListFile.length() > 0 && connection instanceof HttpURLConnection && ((HttpURLConnection) connection) .getResponseCode() == HttpURLConnection.HTTP_NOT_MODIFIED) { return loadPluginList(pluginListFile); } if (indicator != null) { indicator.checkCanceled(); indicator.setText2(IdeBundle.message("progress.downloading.list.of.plugins", host)); } if (pluginListFile != null) { synchronized (RepositoryHelper.class) { FileUtil.ensureExists(pluginListFile.getParentFile()); request.saveToFile(pluginListFile, indicator); return loadPluginList(pluginListFile); } } else { return parsePluginList(request.getReader()); } } })); }
From source file:org.eclipse.che.core.db.schema.impl.flyway.CustomSqlMigrationResolver.java
private List<ResolvedMigration> resolveSqlMigrations() throws IOException, SQLException { LOG.info("Searching for sql scripts in locations {}", Arrays.toString(flywayConfiguration.getLocations())); final Map<Location, List<Resource>> allResources = finder.findResources(flywayConfiguration); LOG.debug("Found scripts: {}", allResources); final Map<String, Map<String, SqlScript>> scriptsInDir = new HashMap<>(); for (Location location : allResources.keySet()) { final List<Resource> resources = allResources.get(location); for (Resource resource : resources) { final SqlScript newScript = scriptsCreator.createScript(location, resource); if (!scriptsInDir.containsKey(newScript.dir)) { scriptsInDir.put(newScript.dir, new HashMap<>(4)); }/*from w w w . j av a2 s .co m*/ final Map<String, SqlScript> existingScripts = scriptsInDir.get(newScript.dir); final SqlScript existingScript = existingScripts.get(newScript.name); if (existingScript == null) { existingScripts.put(newScript.name, newScript); } else if (Objects.equals(existingScript.vendor, newScript.vendor)) { throw new FlywayException(format( "More than one script with name '%s' is registered for " + "database vendor '%s', script '%s' conflicts with '%s'", newScript.name, existingScript.vendor, newScript, existingScript)); } else if (vendorName.equals(newScript.vendor)) { existingScripts.put(newScript.name, newScript); } } } final Map<MigrationVersion, ResolvedMigration> migrations = new HashMap<>(); for (SqlScript script : scriptsInDir.values().stream().flatMap(scripts -> scripts.values().stream()) .collect(toList())) { final ResolvedMigrationImpl migration = new ResolvedMigrationImpl(); migration.setVersion(versionResolver.resolve(script, flywayConfiguration)); migration.setScript(script.resource.getLocation()); migration.setPhysicalLocation(script.resource.getLocationOnDisk()); migration.setType(MigrationType.SQL); migration.setDescription(script.name); migration.setChecksum(ByteSource.wrap(script.resource.loadAsBytes()).hash(Hashing.crc32()).asInt()); migration.setExecutor(new SqlMigrationExecutor(dbSupport, script.resource, placeholderReplacer, flywayConfiguration.getEncoding())); if (migrations.put(migration.getVersion(), migration) != null) { throw new FlywayException("Two migrations with the same version detected"); } } return new ArrayList<>(migrations.values()); }
From source file:com.android.builder.internal.packaging.zip.ZFile.java
/** * Creates a stored entry. This does not add the entry to the zip file, it just creates the * {@link StoredEntry} object.//from w ww . jav a 2s .c o m * * @param name the name of the entry * @param stream the input stream with the entry's data * @param mayCompress can the entry be compressed? * @return the created entry * @throws IOException failed to create the entry */ @NonNull private StoredEntry makeStoredEntry(@NonNull String name, @NonNull InputStream stream, boolean mayCompress) throws IOException { CloseableByteSource source = mTracker.fromStream(stream); long crc32 = source.hash(Hashing.crc32()).padToLong(); boolean encodeWithUtf8 = !EncodeUtils.canAsciiEncode(name); SettableFuture<CentralDirectoryHeaderCompressInfo> compressInfo = SettableFuture.create(); CentralDirectoryHeader newFileData = new CentralDirectoryHeader(name, source.size(), compressInfo, GPFlags.make(encodeWithUtf8)); newFileData.setCrc32(crc32); /* * Create the new entry and sets its data source. Offset should be set to -1 automatically * because this is a new file. With offset set to -1, StoredEntry does not try to verify the * local header. Since this is a new file, there is no local header and not checking it is * what we want to happen. */ Verify.verify(newFileData.getOffset() == -1); return new StoredEntry(newFileData, this, createSources(mayCompress, source, compressInfo, newFileData)); }