Example usage for com.google.common.hash HashFunction hashInt

List of usage examples for com.google.common.hash HashFunction hashInt

Introduction

In this page you can find the example usage for com.google.common.hash HashFunction hashInt.

Prototype

HashCode hashInt(int input);

Source Link

Document

Shortcut for newHasher().putInt(input).hash() ; returns the hash code for the given int value, interpreted in little-endian byte order.

Usage

From source file:mx.itam.metodos.lshclustering.MinhashEmitMapper.java

@Override
public void map(Text id, IntArrayWritable values, Context context) throws IOException, InterruptedException {
    for (int i = 0; i < functionsCount; i++) {
        hashValues[i] = Integer.MAX_VALUE;
    }// www.  j a  va  2 s.  c  om
    for (int i = 0; i < functionsCount; i++) {
        HashFunction hf = functions[i];
        for (Writable wr : values.get()) {
            IntWritable value = (IntWritable) wr;
            int hash = hf.hashInt(value.get()).asInt();
            if (hash < hashValues[i]) {
                hashValues[i] = hash;
            }
        }
    }
    Text sketch = new Text();
    Hasher hasher = lsh.newHasher();
    int band = 0;
    for (int i = 0; i < functionsCount; i++) {
        hasher.putInt(hashValues[i]);
        if (i > 0 && (i % rows) == 0) {
            sketch.set(band + "-" + hasher.hash().toString());
            context.write(new SecondarySortKey(sketch, id), id);
            hasher = lsh.newHasher();
            band++;
        }
    }
    sketch.set(band + "-" + hasher.hash().toString());
    context.write(new SecondarySortKey(sketch, id), id);
}

From source file:mx.itam.metodos.minhashing.MinhashMapper.java

@Override
public void map(Text id, IntArrayWritable values, Context ctx) throws IOException, InterruptedException {
    for (int i = 0; i < functionsCount; i++) {
        hashValues[i] = Integer.MAX_VALUE;
    }//from w w w .  j  a va2 s.c  o  m
    for (int i = 0; i < functionsCount; i++) {
        HashFunction hf = functions[i];
        for (Writable wr : values.get()) {
            IntWritable value = (IntWritable) wr;
            int hash = hf.hashInt(value.get()).asInt();
            if (hash < hashValues[i]) {
                hashValues[i] = hash;
            }
        }
    }
    Text sketch = new Text();
    Hasher hasher = lsh.newHasher();
    int band = 0;
    for (int i = 0; i < functionsCount; i++) {
        hasher.putInt(hashValues[i]);
        if (i > 0 && (i % rows) == 0) {
            sketch.set(band + "-" + hasher.hash().toString());
            write(id, sketch, ctx);
            hasher = lsh.newHasher();
            band++;
        }
    }
    sketch.set(band + "-" + hasher.hash().toString());
    write(id, sketch, ctx);
}

From source file:org.hawkular.alerts.engine.impl.PartitionManagerImpl.java

/**
 * Distribute a new entry across buckets using a consistent hashing strategy.
 *
 * @param newEntry the new entry to distribute
 * @param buckets a table of nodes//from  w ww  .j a  v a 2s. c o  m
 * @return a code of the node which the new entry is placed
 */
public Integer calculateNewEntry(PartitionEntry newEntry, Map<Integer, Integer> buckets) {
    if (newEntry == null) {
        throw new IllegalArgumentException("newEntry must be not null");
    }
    if (isEmpty(buckets)) {
        throw new IllegalArgumentException("buckets must be not null");
    }
    HashFunction md5 = Hashing.md5();
    int numBuckets = buckets.size();
    return buckets.get(Hashing.consistentHash(md5.hashInt(newEntry.hashCode()), numBuckets));
}

From source file:org.hawkular.alerts.engine.impl.PartitionManagerImpl.java

/**
 * Distribute triggers on nodes using a consistent hashing strategy.
 * This strategy allows to scale and minimize changes and re-distribution when cluster changes.
 *
 * @param entries a list of entries to distribute
 * @param buckets a table of nodes/*from w  w w .  j  a va 2s  .co m*/
 * @return a map of entries distributed across nodes
 */
public Map<PartitionEntry, Integer> calculatePartition(List<PartitionEntry> entries,
        Map<Integer, Integer> buckets) {
    if (entries == null) {
        throw new IllegalArgumentException("entries must be not null");
    }
    if (isEmpty(buckets)) {
        throw new IllegalArgumentException("entries must be not null");
    }
    HashFunction md5 = Hashing.md5();
    int numBuckets = buckets.size();
    Map<PartitionEntry, Integer> newPartition = new HashMap<>();
    for (PartitionEntry entry : entries) {
        newPartition.put(entry, buckets.get(Hashing.consistentHash(md5.hashInt(entry.hashCode()), numBuckets)));
    }
    return newPartition;
}