List of usage examples for com.google.common.hash Hasher hash
@CheckReturnValue HashCode hash();
From source file:org.apache.kylin.engine.mr.steps.fdc2.FactDistinctHiveColumnsMapper2.java
private void putRowKeyToHLL(String[] row) { //generate hash for each row key column for (int i = 0; i < nRowKey; i++) { Hasher hc = hf.newHasher(); String colValue = row[intermediateTableDesc.getRowKeyColumnIndexes()[i]]; if (colValue != null) { row_hashcodes[i].set(hc.putString(colValue).hash().asBytes()); } else {/*from w w w . j a v a2 s .c o m*/ row_hashcodes[i].set(hc.putInt(0).hash().asBytes()); } } // user the row key column hash to get a consolidated hash for each cuboid for (int i = 0, n = allCuboidsBitSet.length; i < n; i++) { Hasher hc = hf.newHasher(); for (int position = 0; position < allCuboidsBitSet[i].length; position++) { hc.putBytes(row_hashcodes[allCuboidsBitSet[i][position]].array()); } allCuboidsHLL[i].add(hc.hash().asBytes()); } }
From source file:com.google.gerrit.httpd.raw.HostPageServlet.java
@Inject HostPageServlet(final Provider<CurrentUser> cu, final Provider<WebSession> w, final SitePaths sp, final ThemeFactory themeFactory, final GerritConfig gc, final ServletContext servletContext, final DynamicSet<WebUiPlugin> webUiPlugins, @GerritServerConfig final Config cfg) throws IOException, ServletException { currentUser = cu;/*from w w w.ja v a 2s .c om*/ session = w; config = gc; plugins = webUiPlugins; signedOutTheme = themeFactory.getSignedOutTheme(); signedInTheme = themeFactory.getSignedInTheme(); site = sp; refreshHeaderFooter = cfg.getBoolean("site", "refreshHeaderFooter", true); boolean checkUserAgent = cfg.getBoolean("site", "checkUserAgent", true); final String pageName = "HostPage.html"; template = HtmlDomUtil.parseFile(getClass(), pageName); if (template == null) { throw new FileNotFoundException("No " + pageName + " in webapp"); } if (HtmlDomUtil.find(template, "gerrit_module") == null) { throw new ServletException("No gerrit_module in " + pageName); } if (HtmlDomUtil.find(template, HPD_ID) == null) { throw new ServletException("No " + HPD_ID + " in " + pageName); } String src = "gerrit_ui/gerrit_ui.nocache.js"; if (!IS_DEV) { Element devmode = HtmlDomUtil.find(template, "gwtdevmode"); if (devmode != null) { devmode.getParentNode().removeChild(devmode); } InputStream in = servletContext.getResourceAsStream("/" + src); if (in != null) { Hasher md = Hashing.md5().newHasher(); try { try { final byte[] buf = new byte[1024]; int n; while ((n = in.read(buf)) > 0) { md.putBytes(buf, 0, n); } } finally { in.close(); } } catch (IOException e) { throw new IOException("Failed reading " + src, e); } src += "?content=" + md.hash().toString(); } else { log.debug("No " + src + " in webapp root; keeping noncache.js URL"); } } noCacheName = src; selector = new PermutationSelector("gerrit_ui"); if (checkUserAgent && !IS_DEV) { selector.init(servletContext); } page = new Page(); }
From source file:com.facebook.buck.parser.ParallelDaemonicParserState.java
@SuppressWarnings({ "rawtypes", "unchecked" }) private TargetNode<?> createTargetNode(BuckEventBus eventBus, Cell cell, Path buildFile, BuildTarget target, Map<String, Object> rawNode, TargetNodeListener nodeListener) { BuildRuleType buildRuleType = parseBuildRuleTypeFromRawRule(cell, rawNode); // Because of the way that the parser works, we know this can never return null. Description<?> description = cell.getDescription(buildRuleType); if (target.isFlavored()) { if (description instanceof Flavored) { if (!((Flavored) description).hasFlavors(ImmutableSet.copyOf(target.getFlavors()))) { throw new HumanReadableException("Unrecognized flavor in target %s while parsing %s%s.", target, UnflavoredBuildTarget.BUILD_TARGET_PREFIX, MorePaths .pathWithUnixSeparators(target.getBasePath().resolve(cell.getBuildFileName()))); }/*w w w . j a v a2s. c om*/ } else { LOG.warn( "Target %s (type %s) must implement the Flavored interface " + "before we can check if it supports flavors: %s", target.getUnflavoredBuildTarget(), buildRuleType, target.getFlavors()); throw new HumanReadableException( "Target %s (type %s) does not currently support flavors (tried %s)", target.getUnflavoredBuildTarget(), buildRuleType, target.getFlavors()); } } Cell targetCell = cell.getCell(target); BuildRuleFactoryParams factoryParams = new BuildRuleFactoryParams(targetCell.getFilesystem(), target.withoutCell(), new FilesystemBackedBuildFileTree(cell.getFilesystem(), cell.getBuildFileName()), targetCell.isEnforcingBuckPackageBoundaries()); Object constructorArg = description.createUnpopulatedConstructorArg(); try { ImmutableSet.Builder<BuildTarget> declaredDeps = ImmutableSet.builder(); ImmutableSet.Builder<BuildTargetPattern> visibilityPatterns = ImmutableSet.builder(); try (SimplePerfEvent.Scope scope = SimplePerfEvent.scope(eventBus, PerfEventId.of("MarshalledConstructorArg"), "target", target)) { marshaller.populate(targetCell.getCellRoots(), targetCell.getFilesystem(), factoryParams, constructorArg, declaredDeps, visibilityPatterns, rawNode); } try (SimplePerfEvent.Scope scope = SimplePerfEvent.scope(eventBus, PerfEventId.of("CreatedTargetNode"), "target", target)) { Hasher hasher = Hashing.sha1().newHasher(); hasher.putString(BuckVersion.getVersion(), UTF_8); JsonObjectHashing.hashJsonObject(hasher, rawNode); synchronized (this) { targetsCornucopia.put(target.getUnflavoredBuildTarget(), target); } TargetNode<?> node = new TargetNode(hasher.hash(), description, constructorArg, typeCoercerFactory, factoryParams, declaredDeps.build(), visibilityPatterns.build(), targetCell.getCellRoots()); nodeListener.onCreate(buildFile, node); return node; } } catch (NoSuchBuildTargetException | TargetNode.InvalidSourcePathInputException e) { throw new HumanReadableException(e); } catch (ConstructorArgMarshalException e) { throw new HumanReadableException("%s: %s", target, e.getMessage()); } catch (IOException e) { throw new HumanReadableException(e.getMessage(), e); } }
From source file:org.protelis.vm.util.CodePath.java
/** * @param stack//ww w .j a va 2 s.c o m * The numerical markers forming an execution trace to be * represented */ public CodePath(final TByteList stack) { size = stack.size(); safe = size < 4; if (safe) { /* * Very short stack, an int suffices. */ path = null; int tempHash = 0; for (int i = 0; i < stack.size(); i++) { /* * Suppose we have bytes [1, 2, 3]. * * First, we map the byte to an int, using the operation * described in the comment above. Our first byte, which is the * hex number 0x01, becomes 0x00000001. Then, we shift the byte * towards left of 0 positions (so it remains exactly equal) and * then join it with the current 0x00000000 result through a or * operator, so that the current result is 0x00000001. * * Our second byte is 0x02. It becomes 0x00000002 first, then * gets shifted of 8 bits, namely of two positions in * hexadecimal notation. It becomes 0x00000200, and gets * combined with the current result (0x00000001) with an or, so * the current result becomes 0x00000201. * * At the end of our procedure, when the third value has been * processed, the resulting value is 0x00030201 * */ tempHash |= (stack.get(i) & INT_MASK) << (BITS_PER_BYTE * i); } hash = tempHash; } else { final Hasher hashGen = HASH_FUNCTION.newHasher(size); path = new long[(stack.size() - 1) / (Long.SIZE / Byte.SIZE) + 1]; /* * Here, we run across all the bytes, and we do two operations at * the same time: * * 1) We feed our hasher * * 2) We fill the long[]. This long[] is filled using a strategy * very similar to the one used above for the int hash in case of * size < 4. */ for (int i = 0; i < stack.size(); i++) { final byte b = stack.get(i); hashGen.putByte(b); path[i / (Long.SIZE / Byte.SIZE)] |= (b & LONG_MASK) << (BITS_PER_BYTE * (i % (Long.SIZE / Byte.SIZE))); } hash = hashGen.hash().asInt(); } }
From source file:org.apache.phoenix.filter.SkipScanFilter.java
@Override public int hashCode() { HashFunction hf = Hashing.goodFastHash(32); Hasher h = hf.newHasher(); h.putInt(slots.size());// ww w .j a va 2 s . co m for (int i = 0; i < slots.size(); i++) { h.putInt(slots.get(i).size()); for (int j = 0; j < slots.size(); j++) { h.putBytes(slots.get(i).get(j).getLowerRange()); h.putBytes(slots.get(i).get(j).getUpperRange()); } } return h.hash().asInt(); }
From source file:com.facebook.buck.util.config.Config.java
private HashCode computeOrderIndependentHashCode() { ImmutableMap<String, ImmutableMap<String, String>> rawValues = rawConfig.getValues(); ImmutableSortedMap.Builder<String, ImmutableSortedMap<String, String>> expanded = ImmutableSortedMap .naturalOrder();/*w w w . j ava 2 s . c o m*/ for (String section : rawValues.keySet()) { expanded.put(section, ImmutableSortedMap.copyOf(get(section))); } ImmutableSortedMap<String, ImmutableSortedMap<String, String>> sortedConfigMap = expanded.build(); Hasher hasher = Hashing.sha256().newHasher(); for (Entry<String, ImmutableSortedMap<String, String>> entry : sortedConfigMap.entrySet()) { hasher.putString(entry.getKey(), StandardCharsets.UTF_8); for (Entry<String, String> nestedEntry : entry.getValue().entrySet()) { hasher.putString(nestedEntry.getKey(), StandardCharsets.UTF_8); hasher.putString(nestedEntry.getValue(), StandardCharsets.UTF_8); } } return hasher.hash(); }
From source file:org.apache.kylin.engine.mr.steps.FactDistinctColumnsMapper.java
private void putRowKeyToHLLOld(String[] row) { //generate hash for each row key column for (int i = 0; i < nRowKey; i++) { Hasher hc = hf.newHasher(); String colValue = row[intermediateTableDesc.getRowKeyColumnIndexes()[i]]; if (colValue != null) { row_hashcodes[i].set(hc.putString(colValue).hash().asBytes()); } else {//from w w w . java 2 s .c o m row_hashcodes[i].set(hc.putInt(0).hash().asBytes()); } } // user the row key column hash to get a consolidated hash for each cuboid for (int i = 0, n = allCuboidsBitSet.length; i < n; i++) { Hasher hc = hf.newHasher(); for (int position = 0; position < allCuboidsBitSet[i].length; position++) { hc.putBytes(row_hashcodes[allCuboidsBitSet[i][position]].array()); } allCuboidsHLL[i].add(hc.hash().asBytes()); } }
From source file:fr.inria.eventcloud.api.Quadruple.java
/** * Returns a 128 bits hash value for the current quadruple. * //from www. ja v a 2 s . c o m * @return a 128 bits hash value for the current quadruple. */ public HashCode hashValue() { Hasher hasher = Hashing.murmur3_128().newHasher(); for (int i = 0; i < this.nodes.length; i++) { hasher.putString(this.nodes[i].toString(), Charsets.UTF_8); } if (this.publicationSource != null) { hasher.putUnencodedChars(this.publicationSource); } hasher.putLong(this.publicationTime); return hasher.hash(); }
From source file:org.apache.aurora.scheduler.storage.log.StreamManagerImpl.java
@Nullable private LogEntry tryDecodeFrame(Frame frame, Iterator<Log.Entry> entries) throws CodingException { if (!isHeader(frame)) { LOG.warn("Found a frame with no preceding header, skipping."); return null; }//from ww w. j a v a 2 s . c om FrameHeader header = frame.getHeader(); byte[][] chunks = new byte[header.getChunkCount()][]; Hasher hasher = hashFunction.newHasher(); for (int i = 0; i < header.getChunkCount(); i++) { if (!entries.hasNext()) { logBadFrame(header, i); return null; } LogEntry logEntry = decodeLogEntry(entries.next()); if (!isFrame(logEntry)) { logBadFrame(header, i); return logEntry; } Frame chunkFrame = logEntry.getFrame(); if (!isChunk(chunkFrame)) { logBadFrame(header, i); return logEntry; } byte[] chunkData = chunkFrame.getChunk().getData(); hasher.putBytes(chunkData); chunks[i] = chunkData; } if (!Arrays.equals(header.getChecksum(), hasher.hash().asBytes())) { throw new CodingException("Read back a framed log entry that failed its checksum"); } return Entries.thriftBinaryDecode(Bytes.concat(chunks)); }
From source file:org.sfs.vo.XVersion.java
public Optional<byte[]> calculateMd5() { Hasher hasher = md5().newHasher(); int size = segments.size(); if (segments.isEmpty() && contentLength != null && contentLength <= 0) { return of(EMPTY_MD5); } else if (size == 1) { return segments.first().getReadMd5(); } else if (size >= 2) { for (TransientSegment transientSegment : segments) { hasher.putBytes(transientSegment.getReadMd5().get()); }/* w ww. ja va 2 s . co m*/ return of(hasher.hash().asBytes()); } else { return absent(); } }