List of usage examples for com.google.common.hash Hashing murmur3_128
public static HashFunction murmur3_128(int seed)
From source file:com.github.mgunlogson.cuckoofilter4j.SerializableSaltedHasher.java
private static HashFunction configureHash(Algorithm alg, long seedNSalt, long addlSipSeed) { switch (alg) { case xxHash64: return new xxHashFunction(seedNSalt); case Murmur3_128: return Hashing.murmur3_128((int) seedNSalt); case Murmur3_32: return Hashing.murmur3_32((int) seedNSalt); case sha256:/* ww w . j ava 2 s. c o m*/ return Hashing.sha1(); case sipHash24: return Hashing.sipHash24(seedNSalt, addlSipSeed); default: throw new IllegalArgumentException("Invalid Enum Hashing Algorithm???"); } }
From source file:org.apache.flink.migration.streaming.api.graph.StreamGraphHasherV1.java
@Override public Map<Integer, byte[]> traverseStreamGraphAndGenerateHashes(StreamGraph streamGraph) { // The hash function used to generate the hash final HashFunction hashFunction = Hashing.murmur3_128(0); final Map<Integer, byte[]> hashes = new HashMap<>(); Set<Integer> visited = new HashSet<>(); Queue<StreamNode> remaining = new ArrayDeque<>(); // We need to make the source order deterministic. The source IDs are // not returned in the same order, which means that submitting the same // program twice might result in different traversal, which breaks the // deterministic hash assignment. List<Integer> sources = new ArrayList<>(); for (Integer sourceNodeId : streamGraph.getSourceIDs()) { sources.add(sourceNodeId);// w w w . j av a2 s.co m } Collections.sort(sources); // // Traverse the graph in a breadth-first manner. Keep in mind that // the graph is not a tree and multiple paths to nodes can exist. // // Start with source nodes for (Integer sourceNodeId : sources) { remaining.add(streamGraph.getStreamNode(sourceNodeId)); visited.add(sourceNodeId); } StreamNode currentNode; while ((currentNode = remaining.poll()) != null) { // Generate the hash code. Because multiple path exist to each // node, we might not have all required inputs available to // generate the hash code. if (generateNodeHash(currentNode, hashFunction, hashes, streamGraph.isChainingEnabled())) { // Add the child nodes for (StreamEdge outEdge : currentNode.getOutEdges()) { StreamNode child = outEdge.getTargetVertex(); if (!visited.contains(child.getId())) { remaining.add(child); visited.add(child.getId()); } } } else { // We will revisit this later. visited.remove(currentNode.getId()); } } return hashes; }
From source file:com.scurrilous.circe.guava.GuavaHashProvider.java
@Override protected Hash get(HashParameters params, EnumSet<HashSupport> required) { if (params instanceof SipHash24Parameters) { final SipHash24Parameters sipParams = (SipHash24Parameters) params; return new HasherLongHash(Hashing.sipHash24(sipParams.seedLow(), sipParams.seedHigh()), params.algorithm());/* w ww. j a va 2 s. c o m*/ } if (params instanceof MurmurHash3Parameters) { final MurmurHash3Parameters murmurParams = (MurmurHash3Parameters) params; final int seed = murmurParams.seed(); switch (murmurParams.variant()) { case X86_32: return new HasherIntHash(Hashing.murmur3_32(seed), params.algorithm()); case X64_128: return new HasherHash(Hashing.murmur3_128(seed), params.algorithm()); default: throw new UnsupportedOperationException(); } } throw new UnsupportedOperationException(); }
From source file:org.knime.ext.textprocessing.nodes.transformation.documentvectorhashing.Murmur3_128bitHashingFunction.java
/** * {@inheritDoc}/*from w ww . j a v a2 s . com*/ */ @Override public int hash(final String term, final int seed) { HashFunction hash = Hashing.murmur3_128(seed); HashCode hashCode = hash.hashString(term, Charsets.UTF_8); return hashCode.asInt(); }
From source file:org.apache.flink.streaming.runtime.partitioner.PartialPartitioner.java
@Override public int[] selectChannels(SerializationDelegate<StreamRecord<T>> record, int numChannels) { if (!initializedStats) { this.targetChannelStats = new long[numChannels]; this.initializedStats = true; h = new HashFunction[this.workersPerKey]; for (int i = 0; i < this.workersPerKey; i++) { currentPrime = getNextPrime(currentPrime); h[i] = Hashing.murmur3_128(currentPrime); }/* w w w . j a v a 2 s. com*/ } int[] choices; Object key; try { key = keySelector.getKey(record.getInstance().getValue()); int counter = 0; choices = new int[this.workersPerKey]; if (this.workersPerKey == numChannels) { while (counter < this.workersPerKey) { choices[counter] = counter; counter++; } } else { while (counter < this.workersPerKey) { choices[counter] = (int) (Math.abs(h[counter].hashBytes(serialize(key)).asLong()) % numChannels); counter++; } } } catch (Exception e) { throw new RuntimeException("Could not extract key from " + record.getInstance().getValue(), e); } int selected = selectMinWorker(targetChannelStats, choices); targetChannelStats[selected]++; returnArray[0] = selected; return returnArray; }
From source file:org.apache.flink.streaming.api.graph.StreamGraphHasherV2.java
/** * Returns a map with a hash for each {@link StreamNode} of the {@link * StreamGraph}. The hash is used as the {@link JobVertexID} in order to * identify nodes across job submissions if they didn't change. * * <p>// w ww . j av a 2s .com * <p>The complete {@link StreamGraph} is traversed. The hash is either * computed from the transformation's user-specified id (see * {@link StreamTransformation#getUid()}) or generated in a deterministic way. * * <p> * <p>The generated hash is deterministic with respect to: * <ul> * <li>node-local properties (like parallelism, UDF, node ID), * <li>chained output nodes, and * <li>input nodes hashes * </ul> * * @return A map from {@link StreamNode#id} to hash as 16-byte array. */ @Override public Map<Integer, byte[]> traverseStreamGraphAndGenerateHashes(StreamGraph streamGraph) { // The hash function used to generate the hash final HashFunction hashFunction = Hashing.murmur3_128(0); final Map<Integer, byte[]> hashes = new HashMap<>(); Set<Integer> visited = new HashSet<>(); Queue<StreamNode> remaining = new ArrayDeque<>(); // We need to make the source order deterministic. The source IDs are // not returned in the same order, which means that submitting the same // program twice might result in different traversal, which breaks the // deterministic hash assignment. List<Integer> sources = new ArrayList<>(); for (Integer sourceNodeId : streamGraph.getSourceIDs()) { sources.add(sourceNodeId); } Collections.sort(sources); // // Traverse the graph in a breadth-first manner. Keep in mind that // the graph is not a tree and multiple paths to nodes can exist. // // Start with source nodes for (Integer sourceNodeId : sources) { remaining.add(streamGraph.getStreamNode(sourceNodeId)); visited.add(sourceNodeId); } StreamNode currentNode; while ((currentNode = remaining.poll()) != null) { // Generate the hash code. Because multiple path exist to each // node, we might not have all required inputs available to // generate the hash code. if (generateNodeHash(currentNode, hashFunction, hashes, streamGraph.isChainingEnabled())) { // Add the child nodes for (StreamEdge outEdge : currentNode.getOutEdges()) { StreamNode child = outEdge.getTargetVertex(); if (!visited.contains(child.getId())) { remaining.add(child); visited.add(child.getId()); } } } else { // We will revisit this later. visited.remove(currentNode.getId()); } } return hashes; }
From source file:edu.umd.marbl.mhap.sketch.HashUtils.java
public final static long[][] computeNGramHashesExact(final String seq, final int nGramSize, final int numWords, final int seed) { HashFunction hf = Hashing.murmur3_128(seed); long[][] hashes = new long[seq.length() - nGramSize + 1][numWords]; for (int iter = 0; iter < hashes.length; iter++) { String subStr = seq.substring(iter, iter + nGramSize); for (int word = 0; word < numWords; word++) { HashCode hc = hf.newHasher().putUnencodedChars(subStr).putInt(word).hash(); hashes[iter][word] = hc.asLong(); }/* w w w . ja va 2 s . co m*/ } return hashes; }
From source file:edu.umd.marbl.mhap.sketch.HashUtils.java
public final static long[] computeSequenceHashesLong(final String seq, final int nGramSize, final int seed) { HashFunction hf = Hashing.murmur3_128(seed); long[] hashes = new long[seq.length() - nGramSize + 1]; for (int iter = 0; iter < hashes.length; iter++) { HashCode hc = hf.newHasher().putUnencodedChars(seq.substring(iter, iter + nGramSize)).hash(); hashes[iter] = hc.asLong();/*from w ww . jav a2s . c o m*/ } return hashes; }
From source file:me.j360.dubbo.modules.util.text.HashUtil.java
/** * murmur128, ?//from w w w. ja v a 2 s . co m */ public static long murmur128AsLong(@NotNull byte[] input) { return Hashing.murmur3_128(MURMUR_SEED).hashBytes(input).asLong(); }
From source file:me.j360.dubbo.modules.util.text.HashUtil.java
/** * murmur128, ?/*from w w w .ja v a 2 s.com*/ */ public static long murmur128AsLong(@NotNull String input) { return Hashing.murmur3_128(MURMUR_SEED).hashString(input, Charsets.UTF_8).asLong(); }