Example usage for org.apache.hadoop.io LongWritable get

List of usage examples for org.apache.hadoop.io LongWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable get.

Prototype

public long get() 

Source Link

Document

Return the value of this LongWritable.

Usage

From source file:kogiri.common.hadoop.io.reader.map.IndexCloseableMapFileReader.java

License:Apache License

private void readIndex() throws IOException {
    if (this.indexCacheClosed) {
        throw new IOException("readIndex failed because index cache in memory is already closed");
    }//w w w  .j  a v a 2  s  . c o  m

    // read the index entirely into memory
    if (this.keys != null) {
        return;
    }
    this.count = 0;
    this.keys = new WritableComparable[1024];
    this.positions = new long[1024];
    try {
        int skip = INDEX_SKIP;
        LongWritable position = new LongWritable();
        WritableComparable lastKey = null;
        while (true) {
            WritableComparable k = comparator.newKey();

            if (!index.next(k, position)) {
                break;
            }

            // check order to make sure comparator is compatible
            if (lastKey != null && comparator.compare(lastKey, k) > 0) {
                throw new IOException("key out of order: " + k + " after " + lastKey);
            }
            lastKey = k;

            if (skip > 0) {
                skip--;
                continue; // skip this entry
            } else {
                skip = INDEX_SKIP; // reset skip
            }

            if (count == keys.length) { // time to grow arrays
                int newLength = (keys.length * 3) / 2;
                WritableComparable[] newKeys = new WritableComparable[newLength];
                long[] newPositions = new long[newLength];
                System.arraycopy(keys, 0, newKeys, 0, count);
                System.arraycopy(positions, 0, newPositions, 0, count);
                keys = newKeys;
                positions = newPositions;
            }

            keys[count] = k;
            positions[count] = position.get();
            count++;
        }
    } catch (EOFException e) {
        LOG.warn("Unexpected EOF reading " + index + " at entry #" + count + ".  Ignoring.");
    } finally {
        indexClosed = true;
        indexCacheClosed = false;
        index.close();
    }
}

From source file:ldbc.socialnet.dbgen.util.UpdateEventPartitioner.java

License:Open Source License

@Override
public int getPartition(LongWritable key, Text value, int numReduceTasks) {
    return (int) (key.get() % numReduceTasks);
}

From source file:main.okapi.spinner.OpenHashMapEdges.java

License:Apache License

@Override
public void remove(LongWritable targetVertexId) {
    map.remove(targetVertexId.get());
}

From source file:main.okapi.spinner.OpenHashMapEdges.java

License:Apache License

@Override
public EdgeValue getEdgeValue(LongWritable targetVertexId) {
    short v = map.get(targetVertexId.get());
    repValue.setPartition(v);// www .j  av a2  s  .  com
    return repValue;
}

From source file:main.okapi.spinner.OpenHashMapEdges.java

License:Apache License

@Override
public void setEdgeValue(LongWritable targetVertexId, EdgeValue edgeValue) {
    map.put(targetVertexId.get(), edgeValue.getPartition());
}

From source file:mapreducesentiment.SentimentReducer.java

@Override
public void reduce(SentimentKeyWritableComparable key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {

    System.out.println("Entro al reducer.");
    Double sum = 0.0;// w  w  w  .  ja  v a 2 s. c o  m
    long count = 0;

    for (LongWritable val : values) {
        sum += getScore(key.getScore(), val.get());
        count++;
    }

    result.set(sum / count);
    context.write(key, result);
}

From source file:me.tingri.graphs.gimv.JoinReducer.java

License:Apache License

public void reduce(LongWritable key, Iterator<Text> values, OutputCollector<LongWritable, Text> output,
        Reporter reporter) throws IOException {
    String componentId = "";
    Set<Long> fromNodes = new HashSet<Long>();

    while (values.hasNext()) {
        String line = values.next().toString();

        if (line.startsWith(vectorIndicator)) // component info
            componentId = line;/*from  w  w w.  j  a  v  a2s.com*/
        else // edge line
            fromNodes.add(Long.parseLong(line));
    }

    // add self loop. If it already exists it wont harm since it is a set.
    fromNodes.add(key.get());

    for (Long fromNode : fromNodes)
        output.collect(new LongWritable(fromNode), new Text(componentId));
}

From source file:me.tingri.graphs.gimv.VectorGeneratorReducer.java

License:Apache License

public void reduce(LongWritable key, Iterator<Text> values, OutputCollector<LongWritable, Text> output,
        Reporter reporter) throws IOException {
    long curMinNodeId = key.get(); //self-loop assumed

    while (values.hasNext()) {
        long nodeId = Long.parseLong(values.next().toString());

        curMinNodeId = nodeId < curMinNodeId ? nodeId : curMinNodeId;
    }/*from w  ww  .j a v  a 2 s . c o m*/

    output.collect(key, new Text(vectorIndicator + Long.toString(curMinNodeId)));
}

From source file:ml.grafos.okapi.clustering.ap.AffinityPropagation.java

License:Apache License

private void computeClusters(Vertex<APVertexID, APVertexValue, DoubleWritable> vertex) throws IOException {
    APVertexID id = vertex.getId();//  www.  ja va2  s  . c  om
    if (id.type != APVertexType.I) {
        vertex.voteToHalt();
        return;
    }

    final LongArrayListWritable exemplars = getAggregatedValue("exemplars");
    if (exemplars.contains(new LongWritable(id.index))) {
        logger.debug("Point {} is an exemplar.", id.index);
        vertex.getValue().exemplar = new LongWritable(id.index);
        vertex.voteToHalt();
        return;
    }

    long bestExemplar = -1;
    double maxValue = Double.NEGATIVE_INFINITY;
    MapWritable values = vertex.getValue().weights;
    for (LongWritable exemplarWritable : exemplars) {
        final long exemplar = exemplarWritable.get();
        final APVertexID neighbor = new APVertexID(APVertexType.E, exemplar);
        if (!values.containsKey(neighbor)) {
            continue;
        }

        final double value = ((DoubleWritable) values.get(neighbor)).get();
        if (value > maxValue) {
            maxValue = value;
            bestExemplar = exemplar;
        }
    }

    logger.debug("Point {} decides to follow {}.", id.index, bestExemplar);
    vertex.getValue().exemplar = new LongWritable(bestExemplar);
    vertex.voteToHalt();
}

From source file:ml.shifu.shifu.core.varselect.VarSelectReducer.java

License:Apache License

@Override
protected void reduce(LongWritable key, Iterable<ColumnInfo> values, Context context)
        throws IOException, InterruptedException {
    ColumnStatistics column = new ColumnStatistics();
    double sum = 0d;
    double sumSquare = 0d;
    long count = 0L;
    for (ColumnInfo info : values) {
        sum += info.getSumScoreDiff();//from   ww  w.j a  va  2  s .c  o m
        sumSquare += info.getSumSquareScoreDiff();
        count += info.getCount();
    }
    column.setMean(sum / count);
    column.setRms(sumSquare / count);
    column.setVariance((sumSquare / count) - power2(sum / count));
    this.results.add(new Pair(key.get(), column));
}