Example usage for org.apache.hadoop.io LongWritable set

List of usage examples for org.apache.hadoop.io LongWritable set

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable set.

Prototype

public void set(long value) 

Source Link

Document

Set the value of this LongWritable.

Usage

From source file:org.broadinstitute.sting.gatk.walkers.indels.ConstrainedMateFixingManager.java

License:Open Source License

private void writeRead(GATKSAMRecord read) {
    try {//w  w  w  . ja va2  s  . co  m
        if (!CommandLineGATK.runningOnHadoop) {
            writer.addAlignment(read);
        } else {
            if (read.getIncludeRead()) {
                LongWritable key = new LongWritable();
                SAMRecordWritable record = new SAMRecordWritable();

                if (read.getReferenceName().equals("*"))
                    key.set(Long.MAX_VALUE);
                else
                    key.set((long) read.getReferenceIndex() << 32 | read.getAlignmentStart() - 1);

                record.set(((SAMRecord) read));

                IndelMapReduce.IMContext.write(key, record);
            }
        }
    } catch (IllegalArgumentException e) {
        throw new UserException(
                "If the maximum allowable reads in memory is too small, it may cause reads to be written out of order when trying to write the BAM; please see the --maxReadsInMemory argument for details.  "
                        + e.getMessage(),
                e);
    } catch (Exception e) {

    }
}

From source file:org.broadinstitute.sting.gatk.walkers.recalibration.TableRecalibrationWalker.java

License:Open Source License

/**
 * Output each read to disk/*from  ww w.  j av a2  s  .  c o m*/
 *
 * @param read   The read to output
 * @param output The FileWriter to write the read to
 * @return The FileWriter
 */
public SAMFileWriter reduce(SAMRecord read, SAMFileWriter output) {
    if (output != null) {
        if (!CommandLineGATK.runningOnHadoop) {
            output.addAlignment(read);
        } else {
            if (((GATKSAMRecord) read).getIncludeRead()) {
                if (!RecalMapReduce.isVariant) {
                    try {
                        LongWritable key = new LongWritable();
                        SAMRecordWritable record = new SAMRecordWritable();

                        if (read.getReferenceName().equals("*"))
                            key.set(Long.MAX_VALUE);
                        else
                            key.set((long) read.getReferenceIndex() << 32 | read.getAlignmentStart() - 1);
                        record.set(read);
                        RecalMapReduce.RMContext.write(key, record);
                    } catch (Exception e) {
                    }
                } else {
                    output.addAlignment(read);
                }
            }
        }
    }
    return output;
}

From source file:org.data2semantics.giraph.SimpleInDegreeCountComputation.java

License:Apache License

@Override
public void compute(Vertex<Text, LongWritable, NullWritable> vertex, Iterable<DoubleWritable> messages)
        throws IOException {
    if (getSuperstep() == 0) {
        Iterable<Edge<Text, NullWritable>> edges = vertex.getEdges();
        for (Edge<Text, NullWritable> edge : edges) {
            sendMessage(edge.getTargetVertexId(), new DoubleWritable(1.0));
        }//from   ww  w . java2s.co  m
    } else {
        long sum = 0;
        for (@SuppressWarnings("unused")
        DoubleWritable message : messages) {
            sum++;
        }
        LongWritable vertexValue = vertex.getValue();
        vertexValue.set(sum);
        vertex.setValue(vertexValue);
        vertex.voteToHalt();
    }
}

From source file:org.data2semantics.giraph.SimpleOutDegreeCountComputation.java

License:Apache License

@Override
public void compute(Vertex<Text, LongWritable, NullWritable> vertex, Iterable<DoubleWritable> messages)
        throws IOException {
    if (getSuperstep() == 0) {
        LongWritable vertexValue = vertex.getValue();
        vertexValue.set(vertex.getNumEdges());
        vertex.setValue(vertexValue);/* w ww  .  j av  a2s.  c  o  m*/

        //we want to send a message to our target vertices as well. otherwise, our output only contains the vertices with an outdegree > 0
        Iterable<Edge<Text, NullWritable>> edges = vertex.getEdges();
        for (Edge<Text, NullWritable> edge : edges) {
            sendMessage(edge.getTargetVertexId(), new DoubleWritable(1.0));
        }
    } else {
        if (vertex.getNumEdges() == 0) {
            LongWritable vertexValue = vertex.getValue();
            vertexValue.set(0);
            vertex.setValue(vertexValue);
        } else {
            //if it has a number of outgoing edges, the proper value is already set in the first step
        }
        vertex.voteToHalt();
    }
}

From source file:org.gradoop.flink.io.impl.tlf.inputformats.TLFRecordReader.java

License:Apache License

/**
 * Reads the next key/value pair from the input for processing.
 *
 * @param key the new key//from ww  w.ja  v  a2 s .c o  m
 * @param value the new value
 * @return true if a key/value pair was found
 * @throws IOException
 */
private boolean next(LongWritable key, Text value) throws IOException {
    if (fsin.getPos() < end && readUntilMatch(TLFConstants.START_TAG.getBytes(Charsets.UTF_8), false)) {
        try {
            buffer.write(TLFConstants.START_TAG.getBytes(Charsets.UTF_8));
            if (readUntilMatch(TLFConstants.END_TAG.getBytes(Charsets.UTF_8), true)) {
                key.set(fsin.getPos());
                if (fsin.getPos() != end) {
                    //- end tag because it is the new start tag and shall not be added
                    valueLength = buffer.getLength() - TLFConstants.END_TAG.getBytes(Charsets.UTF_8).length;
                } else {
                    // in this case there is no new start tag
                    valueLength = buffer.getLength();
                }
                //- end tag because it is the new start tag and shall not be added
                value.set(buffer.getData(), 0, valueLength);
                //set the buffer to position before end tag of old graph which is
                // start tag of the new one
                fsin.seek(fsin.getPos() - TLFConstants.END_TAG.getBytes(Charsets.UTF_8).length);
                return true;
            }
        } finally {
            buffer.reset();
        }
    }
    return false;
}

From source file:org.gradoop.io.impl.tlf.inputformats.TLFRecordReader.java

License:Open Source License

/**
 * Reads the next key/value pair from the input for processing.
 *
 * @param key the new key//w  w w  .  j  a  va 2 s  .co  m
 * @param value the new value
 * @return true if a key/value pair was found
 * @throws IOException
 */
private boolean next(LongWritable key, Text value) throws IOException {
    if (fsin.getPos() < end && readUntilMatch(START_TAG_BYTE, false)) {
        try {
            buffer.write(START_TAG_BYTE);
            if (readUntilMatch(END_TAG_BYTE, true)) {
                key.set(fsin.getPos());
                if (fsin.getPos() != end) {
                    //- end tag because it is the new start tag and shall not be added
                    valueLength = buffer.getLength() - END_TAG_BYTE.length;
                } else {
                    // in this case there is no new start tag
                    valueLength = buffer.getLength();
                }
                //- end tag because it is the new start tag and shall not be added
                value.set(buffer.getData(), 0, valueLength);
                //set the buffer to position before end tag of old graph which is
                // start tag of the new one
                fsin.seek(fsin.getPos() - END_TAG_BYTE.length);
                return true;
            }
        } finally {
            buffer.reset();
        }
    }
    return false;
}

From source file:org.hadoop.tdg.TestPseudoHadoop.java

License:Apache License

/**
 * sorted sequence file/* w  ww.  j  a v a  2s  .  c  o  m*/
 *
 * @throws IOException
 */
@Test
public void mapFileIO() throws IOException {
    LongWritable key = new LongWritable();
    Text value = new Text();
    MapFile.Writer writer = null;
    try {
        writer = new MapFile.Writer(fs.getConf(), fs, DST, key.getClass(), value.getClass());
        for (int i = 0; i < 100; i++) {
            key.set(i);
            value.set(DATA[i % DATA.length]);
            writer.append(key, value);
        }
    } finally {
        IOUtils.closeStream(writer);
    }

    MapFile.Reader reader = null;
    try {
        reader = new MapFile.Reader(fs, DST, fs.getConf());
        LongWritable readerKey = (LongWritable) ReflectionUtils.newInstance(reader.getKeyClass(), fs.getConf());
        Text readerValue = (Text) ReflectionUtils.newInstance(reader.getValueClass(), fs.getConf());
        while (reader.next(readerKey, readerValue)) {
            System.out.printf("%s\t%s\n", readerKey, readerValue);
        }
    } finally {
        IOUtils.closeStream(writer);
    }
}

From source file:org.mitre.bio.mapred.io.FastaRecordReader.java

License:Open Source License

/**
 * Reads the next key/value pair from the input for processing.
 *
 * @param key the key to read data into//from  ww w . j a va2 s .  co  m
 * @param value the value to read data into
 * @return true iff a key/value was read, false if at EOF
 */
@Override
public synchronized boolean next(LongWritable key, Text value) throws IOException {
    this.buffer.reset();
    if (this.pos < this.end) {
        try {
            // Find the being of a new record block
            if (readLinesUntilStartsWithMatch(startTag, false)) {
                // Read until we find the endTag or EOF
                readLinesBeforeStartsWithMatch(startTag, true);
                if (buffer.size() > 0) {
                    key.set(this.pos);
                    value.set(buffer.getData(), 0, buffer.getLength());
                    return true;
                }
            }
        } finally {
            LOG.debug("Uncaught exception!");
            this.buffer.reset();
        }
    }
    return false;
}

From source file:org.msgpack.hadoop.mapred.MessagePackRecordReader.java

License:Apache License

public boolean next(LongWritable key, MessagePackWritable val) throws IOException {
    if (unpacker_.hasNext()) {
        key.set(fileIn_.getPos());
        ImmutableValue obj = unpacker_.unpackValue();
        val.set(obj);
        return true;
    }/*www  .  java  2 s  .  co m*/
    return false;
}

From source file:org.pentaho.hadoop.mapreduce.converter.converters.KettleTypeToLongWritableConverter.java

License:Apache License

@Override
public LongWritable convert(ValueMetaInterface meta, Object obj) throws TypeConversionException {
    try {//from   www.  j  a  v  a  2 s .  c o  m
        LongWritable result = new LongWritable();
        result.set(meta.getInteger(obj));
        return result;
    } catch (KettleValueException ex) {
        throw new TypeConversionException(BaseMessages.getString(TypeConverterFactory.class, "ErrorConverting",
                LongWritable.class.getSimpleName(), obj), ex);
    }
}