Example usage for org.apache.hadoop.io WritableComparator hashBytes

List of usage examples for org.apache.hadoop.io WritableComparator hashBytes

Introduction

In this page you can find the example usage for org.apache.hadoop.io WritableComparator hashBytes.

Prototype

public static int hashBytes(byte[] bytes, int length) 

Source Link

Document

Compute hash for binary data.

Usage

From source file:cn.iie.haiep.hbase.value.Bytes.java

License:Apache License

/**
 * @param b value/*from   ww  w  .  j  av  a2  s .c  o  m*/
 * @param length length of the value
 * @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the
 * passed in array.  This method is what {@link org.apache.hadoop.io.Text} and
 * {@link ImmutableBytesWritable} use calculating hash code.
 */
public static int hashCode(final byte[] b, final int length) {
    return WritableComparator.hashBytes(b, length);
}

From source file:com.taobao.adfs.util.HashedBytes.java

License:Apache License

public HashedBytes(byte[] bytes) {
    this.bytes = bytes;
    hashCode = WritableComparator.hashBytes(bytes, bytes.length);
}

From source file:org.apache.accumulo.core.data.Column.java

License:Apache License

private static int hash(byte[] b) {
    if (b == null)
        return 0;

    return WritableComparator.hashBytes(b, b.length);
}

From source file:org.apache.accumulo.core.data.Key.java

License:Apache License

@Override
public int hashCode() {
    return WritableComparator.hashBytes(row, row.length)
            + WritableComparator.hashBytes(colFamily, colFamily.length)
            + WritableComparator.hashBytes(colQualifier, colQualifier.length)
            + WritableComparator.hashBytes(colVisibility, colVisibility.length)
            + (int) (timestamp ^ (timestamp >>> 32));
}

From source file:org.apache.accumulo.core.data.Value.java

License:Apache License

@Override
public int hashCode() {
    return WritableComparator.hashBytes(value, this.value.length);
}

From source file:org.apache.mahout.classifier.bayes.mapreduce.common.FeaturePartitioner.java

License:Apache License

/** {@inheritDoc} */
@Override//from w w w  . ja v a  2  s. c  o m
public int getPartition(StringTuple key, DoubleWritable value, int numPartitions) {

    if (key.length() < 2 || key.length() > 3) {
        throw new IllegalArgumentException("StringTuple length out of bounds");
    }

    String feature = key.length() == 2 ? key.stringAt(1) : key.stringAt(2);

    int length = feature.length();
    int right = 0;
    if (length > 0) {
        right = (3 + length) % length;
    }
    int hash = WritableComparator.hashBytes(feature.getBytes(), right);
    return (hash & Integer.MAX_VALUE) % numPartitions;
}

From source file:org.apache.mahout.utils.nlp.collocations.llr.GramKeyPartitioner.java

License:Apache License

@Override
public int getPartition(GramKey key, Gram value, int numPartitions) {
    // see: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java?revision=816664&view=markup
    int length = key.getLength() - 1;
    int right = (offset + length) % length;
    int hash = WritableComparator.hashBytes(key.getBytes(), right);
    return (hash & Integer.MAX_VALUE) % numPartitions;
}

From source file:org.apache.pig.data.DataAtom.java

License:Apache License

@Override
public int hashCode() {
    return (type == Type.STRING) ? stringVal.hashCode()
            : WritableComparator.hashBytes(binaryVal, binaryVal.length);
}

From source file:org.elasticsearch.hadoop.hive.HiveBytesArrayWritable.java

License:Apache License

@Override
public int hashCode() {
    return WritableComparator.hashBytes(ba.bytes(), ba.length());
}

From source file:org.godhuli.rhipe.RHBytesWritable.java

License:Apache License

public int hashCode() {
    return WritableComparator.hashBytes(bytes, size);
}