Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable() 

Source Link

Usage

From source file:HistogramBucket.java

License:Apache License

@Override
public void readFields(DataInput di) throws IOException {
    attribute.readFields(di);/*  w ww.ja  v a 2 s.c  o  m*/
    LongWritable arraySize = new LongWritable();
    arraySize.readFields(di);
    splits = new ArrayList<DoubleWritable>();
    for (int i = 0; i < Integer.parseInt(arraySize.toString()); i++) {
        DoubleWritable d = new DoubleWritable();
        d.readFields(di);
        splits.add(d);
    }
}

From source file:at.illecker.hama.hybrid.examples.summation.SummationBSP.java

License:Apache License

static void printOutput(BSPJob job, BigDecimal sum) throws IOException {
    FileSystem fs = FileSystem.get(job.getConfiguration());
    FileStatus[] listStatus = fs.listStatus(FileOutputFormat.getOutputPath(job));
    for (FileStatus status : listStatus) {
        if (!status.isDir()) {
            try {
                SequenceFile.Reader reader = new SequenceFile.Reader(fs, status.getPath(),
                        job.getConfiguration());

                Text key = new Text();
                DoubleWritable value = new DoubleWritable();

                if (reader.next(key, value)) {
                    LOG.info("Output File: " + status.getPath());
                    LOG.info("key: '" + key + "' value: '" + value + "' expected: '" + sum.doubleValue() + "'");
                    Assert.assertEquals("Expected value: '" + sum.doubleValue() + "' != '" + value + "'",
                            sum.doubleValue(), value.get(), Math.pow(10, (DOUBLE_PRECISION * -1)));
                }/*w w w  .j av a2 s . c o  m*/
                reader.close();

            } catch (IOException e) {
                if (status.getLen() > 0) {
                    System.out.println("Output File " + status.getPath());
                    FSDataInputStream in = fs.open(status.getPath());
                    IOUtils.copyBytes(in, System.out, job.getConfiguration(), false);
                    in.close();
                }
            }
        }
    }
    // fs.delete(FileOutputFormat.getOutputPath(job), true);
}

From source file:com.cloudera.recordservice.mr.RecordServiceRecord.java

License:Apache License

/**
 * Returns the corresponding Writable object for this column type.
 *///from  w w w .ja  v a  2  s . c  o  m
public Writable getWritableInstance(com.cloudera.recordservice.core.Schema.Type type) {
    switch (type) {
    case BOOLEAN:
        return new BooleanWritable();
    case TINYINT:
        return new ByteWritable();
    case SMALLINT:
        return new ShortWritable();
    case INT:
        return new IntWritable();
    case BIGINT:
        return new LongWritable();
    case FLOAT:
        return new FloatWritable();
    case DOUBLE:
        return new DoubleWritable();
    case VARCHAR:
    case CHAR:
    case STRING:
        return new Text();
    case TIMESTAMP_NANOS:
        return new TimestampNanosWritable();
    case DECIMAL:
        return new DecimalWritable();
    default:
        throw new UnsupportedOperationException("Unexpected type: " + toString());
    }
}

From source file:com.dasasian.chok.util.WritableType.java

License:Apache License

public WritableComparable newWritableComparable() {
    switch (this) {
    case TEXT:/*  w ww .  j a  v  a 2s.  com*/
        return new Text();
    case BYTE:
        return new ByteWritable();
    case INT:
        return new IntWritable();
    case LONG:
        return new LongWritable();
    case FLOAT:
        return new FloatWritable();
    case DOUBLE:
        return new DoubleWritable();
    }
    throw getUnhandledTypeException();
}

From source file:com.datasalt.pangool.utils.HadoopUtils.java

License:Apache License

/**
 * Reads maps of integer -> double/*  ww w .  j a va 2  s.c o  m*/
 */
public static HashMap<Integer, Double> readIntDoubleMap(Path path, FileSystem fs) throws IOException {
    SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, fs.getConf());

    IntWritable topic = new IntWritable();
    DoubleWritable value = new DoubleWritable();

    HashMap<Integer, Double> ret = new HashMap<Integer, Double>();

    while (reader.next(topic)) {
        reader.getCurrentValue(value);

        ret.put(topic.get(), value.get());
    }

    reader.close();
    return ret;
}

From source file:com.gotometrics.orderly.DoubleRowKey.java

License:Apache License

protected Object toDoubleWritable(Object o) {
    if (o == null || o instanceof DoubleWritable)
        return o;
    if (dw == null)
        dw = new DoubleWritable();
    dw.set((Double) o);//from  w  w  w .ja v  a  2 s . c  om
    return dw;
}

From source file:com.gotometrics.orderly.DoubleWritableRowKey.java

License:Apache License

@Override
public Object deserialize(ImmutableBytesWritable w) throws IOException {
    byte[] s = w.get();
    int offset = w.getOffset();
    if (w.getLength() <= 0)
        return null;

    try {//from www.  j a  v  a2  s  .c  om
        long l = Bytes.toLong(s, offset) ^ order.mask();
        if (l == NULL)
            return null;

        if (dw == null)
            dw = new DoubleWritable();

        l--;
        l ^= (~l >> Long.SIZE - 1) | Long.MIN_VALUE;
        dw.set(Double.longBitsToDouble(l));
        return dw;
    } finally {
        RowKeyUtils.seek(w, Bytes.SIZEOF_LONG);
    }
}

From source file:com.gotometrics.orderly.example.DoubleExample.java

License:Apache License

public void serializationExamples() throws Exception {
    DoubleWritableRowKey d = new DoubleWritableRowKey();
    DoubleWritable w = new DoubleWritable();
    ImmutableBytesWritable buffer = new ImmutableBytesWritable();
    byte[] b;//w  ww . j  a v a  2s  .c  o m

    /* Serialize and deserialize into an immutablebyteswritable */
    w.set(-93214.920352);
    b = new byte[d.getSerializedLength(w)];
    buffer.set(b);
    d.serialize(w, buffer);
    buffer.set(b, 0, b.length);
    System.out.println(
            "deserialize(serialize(-93214.920352)) = " + ((DoubleWritable) d.deserialize(buffer)).get());

    /* Serialize and deserialize into a byte array (descending sort)
     */
    d.setOrder(Order.DESCENDING);
    w.set(0);
    System.out.println("deserialize(serialize(0)) = " + ((DoubleWritable) d.deserialize(d.serialize(w))).get());

    /* Serialize and deserialize NULL into a byte array */
    System.out.println("deserialize(serialize(NULL)) = " + d.deserialize(d.serialize(null)));
}

From source file:com.ibm.bi.dml.runtime.matrix.sort.IndexSortComparable.java

License:Open Source License

public IndexSortComparable() {
    _dval = new DoubleWritable();
    _lval = new LongWritable();
}

From source file:com.ibm.bi.dml.runtime.util.MapReduceTool.java

License:Open Source License

public static double[] pickValueWeight(String dir, NumItemsByEachReducerMetaData metadata, double p,
        boolean average) throws IOException {
    long[] counts = metadata.getNumItemsArray();
    long[] ranges = new long[counts.length];
    ranges[0] = counts[0];/* w w  w.j a va2 s .  c om*/
    for (int i = 1; i < counts.length; i++)
        ranges[i] = ranges[i - 1] + counts[i];

    long total = ranges[ranges.length - 1];

    // do averaging only if it is asked for; and sum_wt is even
    average = average && (total % 2 == 0);

    int currentPart = 0;
    double cum_weight = 0;
    long pos = (long) Math.ceil(total * p);
    while (ranges[currentPart] < pos) {
        currentPart++;
        cum_weight += ranges[currentPart];
    }
    int offset;
    if (currentPart > 0)
        offset = (int) (pos - ranges[currentPart - 1] - 1);
    else
        offset = (int) pos - 1;

    FileSystem fs = FileSystem.get(_rJob);
    Path path = new Path(dir);
    FileStatus[] files = fs.listStatus(path);
    Path fileToRead = null;
    for (FileStatus file : files)
        if (file.getPath().toString().endsWith(Integer.toString(currentPart))) {
            fileToRead = file.getPath();
            break;
        }

    if (fileToRead == null)
        throw new RuntimeException("cannot read partition " + currentPart);

    FSDataInputStream currentStream = fs.open(fileToRead);
    DoubleWritable readKey = new DoubleWritable();
    IntWritable readValue = new IntWritable();

    boolean contain0s = false;
    long numZeros = 0;
    if (currentPart == metadata.getPartitionOfZero()) {
        contain0s = true;
        numZeros = metadata.getNumberOfZero();
    }
    ReadWithZeros reader = new ReadWithZeros(currentStream, contain0s, numZeros);

    int numRead = 0;
    while (numRead <= offset) {
        reader.readNextKeyValuePairs(readKey, readValue);
        numRead += readValue.get();
        cum_weight += readValue.get();
    }

    double ret = readKey.get();
    if (average) {
        if (numRead <= offset + 1) {
            reader.readNextKeyValuePairs(readKey, readValue);
            cum_weight += readValue.get();
            ret = (ret + readKey.get()) / 2;
        }
    }
    currentStream.close();
    return new double[] { ret, (average ? -1 : readValue.get()), (average ? -1 : cum_weight) };
}