Example usage for org.apache.hadoop.io LongWritable get

List of usage examples for org.apache.hadoop.io LongWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable get.

Prototype

public long get() 

Source Link

Document

Return the value of this LongWritable.

Usage

From source file:org.goldenorb.io.checkpoint.CheckPointDataTest.java

License:Apache License

/**
 * Tests the CheckPointDataInput class by reading several different types of Writables from the checkpoint.
 * Asserts that Writables that were written in are of the same value and type when reading in from HDFS.
 * /*from  w ww .  j av  a2s.  c  om*/
 * @throws Exception
 */
@Test
public void testCheckpointInput() throws Exception {

    int superStep = 0;
    int partition = 0;
    OrbConfiguration orbConf = new OrbConfiguration();
    orbConf.set("fs.default.name", "hdfs://localhost:" + cluster.getNameNodePort());
    orbConf.setJobNumber("0");
    orbConf.setFileOutputPath("test");

    CheckPointDataInput checkpointInput = new CheckPointDataInput(orbConf, superStep, partition);

    // Data is read on a FIFO basis

    IntWritable intInput = new IntWritable();
    intInput.readFields(checkpointInput);

    LongWritable longInput = new LongWritable();
    longInput.readFields(checkpointInput);

    Text textInput = new Text();
    textInput.readFields(checkpointInput);

    FloatWritable floatInput = new FloatWritable();
    floatInput.readFields(checkpointInput);

    checkpointInput.close();

    assertThat(checkpointInput, notNullValue());
    assertEquals(intInput.get(), 4);
    assertEquals(longInput.get(), 9223372036854775807L);
    assertEquals(textInput.toString(), "test");
    assertTrue(floatInput.get() == 3.14159F);
}

From source file:org.huahinframework.core.Filter.java

License:Apache License

/**
 * {@inheritDoc}/*from   www  . ja va2s.co m*/
 */
public void map(Writable key, Writable value, Context context) throws IOException, InterruptedException {
    writer.setContext(context);
    init();

    Record record = new Record();
    if (first) {
        LongWritable k = (LongWritable) key;
        Text v = (Text) value;

        record.addGrouping("KEY", k.get());

        String[] strings = StringUtil.split(v.toString(), separator, false);
        if (labels.length != strings.length) {
            if (formatIgnored) {
                throw new DataFormatException("input format error: " + "label.length = " + labels.length
                        + "input.lenght = " + strings.length);
            }

            return;
        }

        for (int i = 0; i < strings.length; i++) {
            record.addValue(labels[i], strings[i]);
        }
    } else {
        record.setKey((Key) key);
        record.setValue((Value) value);
    }

    writer.setDefaultRecord(record);
    filter(record, writer);
}

From source file:org.jhk.pulsing.giraph.computation.FriendTextComputation.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, Text, LongWritable> vertex, Iterable<LongWritable> messages)
        throws IOException {
    long superstep = getSuperstep();

    Map<Long, Long> friendships = new HashMap<>();
    Arrays.asList(vertex.getValue().toString().split(FRIENDSHIP_TEXT_FRIEND_DELIM)).stream().forEach(entry -> {
        String[] split = entry.split(FRIENDSHIP_TEXT_FRIEND_LEVEL_DELIM);
        friendships.put(Long.valueOf(split[0]), Long.valueOf(split[1]));
    });//w  w  w  . j  a  v a  2 s  . c  o  m

    long count = 0L;
    for (LongWritable friend : messages) {
        long friendId = friend.get();
        if (!friendships.containsKey(friendId)) {
            friendships.put(friendId, superstep);
            count++;
        }
    }

    for (Edge<LongWritable, LongWritable> edge : vertex.getEdges()) {
        sendMessage(edge.getTargetVertexId(), vertex.getId());
    }

    if (count > 0) {
        StringJoiner joiner = new StringJoiner(FRIENDSHIP_TEXT_FRIEND_DELIM);
        friendships.keySet().forEach(friendId -> {
            joiner.add(friendId + FRIENDSHIP_TEXT_FRIEND_LEVEL_DELIM + friendships.get(friendId));
        });
        vertexText.set(joiner.toString());
        vertex.setValue(vertexText);
    }

    longIncrement.set(count);
    aggregate(FRIENDSHIP_AGGREGATE, longIncrement);

    vertex.voteToHalt();
}

From source file:org.kiji.examples.music.reduce.SequentialPlayCountReducer.java

License:Apache License

/** {@inheritDoc} */
@Override/*from  www .  ja  v a  2s . c o  m*/
protected void reduce(AvroKey<SongBiGram> key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {
    // Initialize sum to zero.
    long sum = 0L;
    // Add up all the values.  When this reducer is used after SongPlayCounter, every value
    // should be the number 1, so we are just counting the number of times the second song
    // was played after the first (the key).
    for (LongWritable value : values) {
        sum += value.get();
    }

    // Set values for this count.
    final SongBiGram songPair = key.datum();
    final SongCount nextSongCount = SongCount.newBuilder().setCount(sum)
            .setSongId(songPair.getSecondSongPlayed()).build();
    // Write out result for this song.
    context.write(new AvroKey<CharSequence>(songPair.getFirstSongPlayed().toString()),
            new AvroValue<SongCount>(nextSongCount));
}

From source file:org.kiji.examples.music.TestSongPlayCounter.java

License:Apache License

/**
 * Reads a sequence file of (song ID, # of song plays) into a map.
 *
 * @param path Path of the sequence file to read.
 * @param map Map to fill in with (song ID, # of song plays) entries.
 * @throws Exception on I/O error.//from www . j  a v a2s  .co m
 */
private void readSequenceFile(File path, Map<String, Long> map) throws Exception {
    final SequenceFile.Reader reader = new SequenceFile.Reader(getConf(),
            SequenceFile.Reader.file(new Path("file://" + path.toString())));
    final Text songId = new Text();
    final LongWritable nplays = new LongWritable();
    while (reader.next(songId, nplays)) {
        map.put(songId.toString(), nplays.get());
    }
    reader.close();
}

From source file:org.kiji.mapreduce.testlib.SimpleBulkImporter.java

License:Apache License

/** {@inheritDoc} */
@Override/*from   w w w .  j a  v a  2  s .c o m*/
public void produce(LongWritable filePos, Text value, KijiTableContext context) throws IOException {
    final String line = value.toString();
    final String[] split = line.split(":");
    Preconditions.checkState(split.length == 2,
            String.format("Unable to parse bulk-import test input line: '%s'.", line));
    final String rowKey = split[0];
    final int integerValue = Integer.parseInt(split[1]);

    final EntityId eid = context.getEntityId(rowKey);
    context.put(eid, "primitives", "int", integerValue);
    context.put(eid, "primitives", "long", filePos.get());
    context.put(eid, "primitives", "string", String.format("%s-%d", rowKey, integerValue));
}

From source file:org.kiji.mr.reduce.WordCountReducer.java

License:Apache License

/** {@inheritDoc} */
@Override// w w w.j ava2  s .  c o m
protected void reduce(Text key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {
    int sum = 0;
    for (LongWritable val : values) {
        sum += val.get();
    }

    context.write(key, new LongWritable(sum));
}

From source file:org.kitesdk.data.spi.filesystem.TestInputFormatKeyReader.java

License:Apache License

@Override
public DatasetTestUtilities.RecordValidator<LongWritable> getValidator() {
    return new DatasetTestUtilities.RecordValidator<LongWritable>() {
        @Override//from  www. j a va 2  s.c  o  m
        public void validate(LongWritable record, int recordNum) {
            System.err.println(record.toString());
            Assert.assertEquals((long) positions.get(recordNum), record.get());
        }
    };
}

From source file:org.mrgeo.cmd.findholes.mapreduce.FindHolesReducer.java

License:Apache License

/**
 * output of this reducer is://  w ww  .  j a  va 2s  .c o m
 * y: x x x x x x x x
 */
public void reduce(LongWritable key, Iterable<LongWritable> values, Context context)
        throws IOException, InterruptedException {

    pw.print(key.toString() + ":");
    for (LongWritable v : values) {
        pw.print(" " + Long.toString(v.get()));
    }
    pw.println();

}

From source file:org.mrgeo.hdfs.vector.HdfsVectorReader.java

License:Apache License

@Override
public Geometry get(LongWritable featureId) throws IOException {
    HdfsFileReader fileReader = new HdfsFileReader();
    fileReader.initialize(conf, new Path(provider.getResolvedResourceName(true)));
    DelimitedParser delimitedParser = getDelimitedParser();
    FeatureIdRangeVisitor visitor = new FeatureIdRangeVisitor(featureId.get(), featureId.get());
    DelimitedReader reader = new DelimitedReader(fileReader, delimitedParser, visitor);
    if (reader.hasNext()) {
        Geometry geometry = reader.next();
        return geometry;
    }/* w w w  .j ava  2s . co  m*/
    return null;
}