Example usage for org.apache.hadoop.io LongWritable get

List of usage examples for org.apache.hadoop.io LongWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable get.

Prototype

public long get() 

Source Link

Document

Return the value of this LongWritable.

Usage

From source file:de.unileipzig.dbs.giraph.algorithms.adaptiverepartitioning.ARPVertexValue.java

License:Open Source License

/**
 * Set the actual stable counter// w  w  w  .  j av  a 2 s .  c om
 *
 * @param stableCounter counter of how many superstep's the vertex is stable
 */
public void setStableCounter(LongWritable stableCounter) {
    this.stableCounter = stableCounter.get();
}

From source file:de.unileipzig.dbs.giraph.algorithms.labelpropagation.LPComputation.java

License:Open Source License

/**
 * Returns the current new value. This value is based on all incoming
 * messages. Depending on the number of messages sent to the vertex, the
 * method returns://from  w  w  w.  j  a v a  2  s . c o m
 * <p/>
 * 0 messages:   The current value
 * <p/>
 * 1 message:    The minimum of the message and the current vertex value
 * <p/>
 * >1 messages:  The most frequent of all message values
 *
 * @param vertex   The current vertex
 * @param messages All incoming messages
 * @return the new Value the vertex will become
 */
private long getNewCommunity(Vertex<LongWritable, LPVertexValue, NullWritable> vertex,
        Iterable<LongWritable> messages) {
    long newCommunity;
    //TODO: create allMessages more efficient
    //List<LongWritable> allMessages = Lists.newArrayList(messages);
    List<Long> allMessages = new ArrayList<>();
    for (LongWritable message : messages) {
        allMessages.add(message.get());
    }
    if (allMessages.isEmpty()) {
        // 1. if no messages are received
        newCommunity = vertex.getValue().getCurrentCommunity().get();
    } else if (allMessages.size() == 1) {
        // 2. if just one message are received
        newCommunity = Math.min(vertex.getValue().getCurrentCommunity().get(), allMessages.get(0));
    } else {
        // 3. if multiple messages are received
        newCommunity = getMostFrequent(vertex, allMessages);
    }
    return newCommunity;
}

From source file:de.unileipzig.dbs.giraph.algorithms.labelpropagation.LPVertexValue.java

License:Open Source License

/**
 * Method to set the current partition//from  w  w w . j  a va2s  . c  o m
 *
 * @param lastCommunity current partition
 */
public void setLastCommunity(LongWritable lastCommunity) {
    this.lastCommunity = lastCommunity.get();
}

From source file:de.unileipzig.dbs.giraph.algorithms.labelpropagation.LPVertexValue.java

License:Open Source License

/**
 * Method to set the lastValue of the vertex
 *
 * @param currentCommunity the desired Partition
 */// w w w.j a  va 2s.  c o m
public void setCurrentCommunity(LongWritable currentCommunity) {
    this.currentCommunity = currentCommunity.get();
}

From source file:drdoobs.LongSumReducer.java

@Override
public void reduce(KEY key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {
    long sum = 0;
    for (LongWritable val : values) {
        sum += val.get();

    }/*  w  ww .ja  va  2  s . c  o m*/
    result.set(sum);
    context.write(key, result);
}

From source file:edu.cmu.cs.in.hadoop.HoopInvertedListMapper.java

License:Open Source License

/**
 * //  w  w w  .j  a  v  a 2 s  .  com
 */
public void map(LongWritable key, Text value, OutputCollector<Text, Text> output, Reporter reporter)
        throws IOException {
    debug("map ()");

    if (HoopLink.metrics != null) {
        mapperMarker = new HoopPerformanceMeasure();
        mapperMarker.setMarker("Mapper");
        HoopLink.metrics.getDataSet().add(mapperMarker);
    }

    if (value == null) {
        debug("Internal error: value is null");
        return;
    }

    String line = value.toString(); // We assume here we're getting one file at a time

    HoopDocumentParser parser = new HoopDocumentParser();
    //parser.setDocID(key.toString());
    parser.setKey(key.get());
    parser.setIncludePositions(true);
    parser.loadDocumentFromData(line); // Tokenization happens here

    List<String> tokens = parser.getTokens();

    for (int i = 0; i < tokens.size(); i++) {
        HoopToken token = new HoopToken(tokens.get(i));

        StringBuffer formatted = new StringBuffer();
        formatted.append(key.get());
        formatted.append(":");
        formatted.append(token.getPosition().toString());

        //word.set(token.getValue()+":"+key.toString()); // We need this for the partitioner and reducers
        word.set(token.getValue() + ":" + partitioner.getPartition(new Text("key:" + key.toString()),
                new Text("undef"), partitioner.getNrPartitions())); // We need this for the partitioner and reducers

        output.collect(word, new Text(formatted.toString()));
    }

    debug("map (" + tokens.size() + " tokens) done for key: " + key.toString());

    if (mapperMarker != null) {
        //mapperMarker.getMarkerRaw ();
        mapperMarker.closeMarker();
    }
}

From source file:edu.cse.analyser.components.TempReducer.java

License:Open Source License

@Override
protected void reduce(Text key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {
    long sum = 0;
    int length = 0;
    for (LongWritable val : values) {
        sum += val.get();
        ++length;// w  ww.java  2 s .c o  m
    }
    // -- Since temperature scale used is 10, dividing it by 10 to get the
    // actual value.
    result.set(sum / (SCALE * length));
    context.write(key, result);
}

From source file:edu.hku.sdb.udf.hive.SdbKeyUpdatePlainUDF.java

License:Apache License

public Text evaluate(LongWritable a, Text s, Text p, Text q, Text n) {
    if (a == null || s == null || p == null || q == null || n == null) {
        return null;
    }//from w  ww  . j a  v  a2s.c  o  m

    BigInteger result = UDFHandler.keyUpdate(BigInteger.valueOf(a.get()), TypeCast.textToBigInt(s),
            TypeCast.textToBigInt(p), TypeCast.textToBigInt(q), TypeCast.textToBigInt(n));

    return TypeCast.bigIntToText(result);
}

From source file:edu.indiana.soic.ts.mapreduce.pwd.SWGReduce.java

License:Open Source License

public void reduce(LongWritable key, Iterable<SWGWritable> values, Context context) throws IOException {
    long startTime = System.nanoTime();
    Configuration conf = context.getConfiguration();

    long blockSize = conf.getLong(Constants.BLOCK_SIZE, 1000);
    long noOfSequences = conf.getLong(Constants.NO_OF_SEQUENCES, blockSize * 10);
    long noOfDivisions = conf.getLong(Constants.NO_OF_DIVISIONS, noOfSequences / blockSize);

    // to handle the edge blocks with lesser number of sequences
    int row = (int) (key.get() * blockSize);
    int currentRowBlockSize = (int) blockSize;
    if ((row + blockSize) > (noOfSequences)) {
        currentRowBlockSize = (int) (noOfSequences - row);
    }/*w ww.  ja v  a 2s . co  m*/

    short[][] alignments = new short[(int) currentRowBlockSize][(int) noOfSequences];
    for (SWGWritable alignmentWritable : values) {
        LOG.info("key " + key.get() + " col " + alignmentWritable.getColumnBlock() + " row "
                + alignmentWritable.getRowBlock() + " blocksize " + blockSize);
        DataInput in = alignmentWritable.getDataInput();
        int column = (int) (alignmentWritable.getColumnBlock() * blockSize);

        // to handle the edge blocks with lesser number of sequences
        int currentColumnBlockSize = (int) blockSize;
        if ((column + blockSize) > (noOfSequences)) {
            currentColumnBlockSize = (int) (noOfSequences - column);
        }

        for (int i = 0; i < currentRowBlockSize; i++) {
            // byte[] b = new byte[currentBlockSize /* * 2*/];
            //            System.out.println("row block "+i+"  currentBlockSize"+currentRowBlockSize);
            for (int j = 0; j < currentColumnBlockSize; j++) {
                short readShort = in.readShort();
                //               System.out.print(readShort+" ");
                alignments[i][column + j] = readShort;
            }
        }
    }

    // retrieve the output dir
    String outDir = context.getConfiguration().get("mapred.output.dir");

    FileSystem fs = FileSystem.get(conf);
    // out dir is created in the main driver.
    String childName = "row_" + key.get() + "_" + blockSize;
    Path outFilePart = new Path(outDir, childName);
    writeOutFile(alignments, fs, outFilePart);
    LOG.info("Reduce Processing Time: " + ((System.nanoTime() - startTime) / 1000000));
}

From source file:edu.uci.ics.pregelix.api.util.GlobalEdgeCountAggregator.java

License:Apache License

@Override
public void step(LongWritable partialResult) {
    state.set(state.get() + partialResult.get());
}