Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:org.apache.orc.mapred.TestOrcStruct.java

License:Apache License

@Test
public void testFieldAccess() {
    OrcStruct struct = new OrcStruct(TypeDescription.fromString("struct<i:int,j:double,k:string>"));
    struct.setFieldValue("j", new DoubleWritable(1.5));
    struct.setFieldValue("k", new Text("Moria"));
    struct.setFieldValue(0, new IntWritable(42));
    assertEquals(new IntWritable(42), struct.getFieldValue("i"));
    assertEquals(new DoubleWritable(1.5), struct.getFieldValue(1));
    assertEquals(new Text("Moria"), struct.getFieldValue("k"));
    struct.setAllFields(new IntWritable(123), new DoubleWritable(4.5), new Text("ok"));
    assertEquals("123", struct.getFieldValue(0).toString());
    assertEquals("4.5", struct.getFieldValue(1).toString());
    assertEquals("ok", struct.getFieldValue(2).toString());
}

From source file:org.apache.phoenix.hive.objectinspector.PhoenixDoubleObjectInspector.java

License:Apache License

@Override
public DoubleWritable getPrimitiveWritableObject(Object o) {
    return new DoubleWritable(get(o));
}

From source file:org.apache.sysml.runtime.matrix.sort.SamplingSortMRInputFormat.java

License:Apache License

/**
 * Use the input splits to take samples of the input and generate sample
 * keys. By default reads 100,000 keys from 10 locations in the input, sorts
 * them and picks N-1 keys to generate N equally sized partitions.
 * //from   ww  w  .j a va  2  s.c o  m
 * @param conf the job to sample
 * @param partFile where to write the output file to
 * @return index value
 * @throws IOException if something goes wrong
 * @throws InstantiationException if InstantiationException occurs
 * @throws IllegalAccessException if IllegalAccessException occurs
 */
@SuppressWarnings({ "unchecked", "unused", "deprecation" })
public static int writePartitionFile(JobConf conf, Path partFile)
        throws IOException, InstantiationException, IllegalAccessException {
    SamplingSortMRInputFormat inFormat = new SamplingSortMRInputFormat();
    Sampler sampler = new Sampler();

    Class<? extends WritableComparable> targetKeyClass;
    targetKeyClass = (Class<? extends WritableComparable>) conf.getClass(TARGET_KEY_CLASS,
            WritableComparable.class);
    //get input converter information
    int brlen = MRJobConfiguration.getNumRowsPerBlock(conf, (byte) 0);
    int bclen = MRJobConfiguration.getNumColumnsPerBlock(conf, (byte) 0);

    //indicate whether the matrix value in this mapper is a matrix cell or a matrix block
    int partitions = conf.getNumReduceTasks();

    long sampleSize = conf.getLong(SAMPLE_SIZE, 1000);
    InputSplit[] splits = inFormat.getSplits(conf, conf.getNumMapTasks());
    int samples = Math.min(10, splits.length);
    long recordsPerSample = sampleSize / samples;
    int sampleStep = splits.length / samples;
    // take N samples from different parts of the input

    int totalcount = 0;
    for (int i = 0; i < samples; i++) {
        SequenceFileRecordReader reader = (SequenceFileRecordReader) inFormat
                .getRecordReader(splits[sampleStep * i], conf, null);
        int count = 0;
        WritableComparable key = (WritableComparable) reader.createKey();
        Writable value = (Writable) reader.createValue();
        while (reader.next(key, value) && count < recordsPerSample) {
            Converter inputConverter = MRJobConfiguration.getInputConverter(conf, (byte) 0);
            inputConverter.setBlockSize(brlen, bclen);
            inputConverter.convert(key, value);
            while (inputConverter.hasNext()) {
                Pair pair = inputConverter.next();
                if (pair.getKey() instanceof DoubleWritable) {
                    sampler.addValue(new DoubleWritable(((DoubleWritable) pair.getKey()).get()));
                } else if (pair.getValue() instanceof MatrixCell) {
                    sampler.addValue(new DoubleWritable(((MatrixCell) pair.getValue()).getValue()));
                } else
                    throw new IOException("SamplingSortMRInputFormat unsupported key/value class: "
                            + pair.getKey().getClass() + ":" + pair.getValue().getClass());

                count++;
            }
            key = (WritableComparable) reader.createKey();
            value = (Writable) reader.createValue();
        }
        totalcount += count;
    }

    if (totalcount == 0) //empty input files
        sampler.addValue(new DoubleWritable(0));

    FileSystem outFs = partFile.getFileSystem(conf);
    if (outFs.exists(partFile)) {
        outFs.delete(partFile, false);
    }

    //note: key value always double/null as expected by partitioner
    SequenceFile.Writer writer = null;
    int index0 = -1;
    try {
        writer = SequenceFile.createWriter(outFs, conf, partFile, DoubleWritable.class, NullWritable.class);
        NullWritable nullValue = NullWritable.get();
        int i = 0;
        boolean lessthan0 = true;
        for (WritableComparable splitValue : sampler.createPartitions(partitions)) {
            writer.append(splitValue, nullValue);
            if (lessthan0 && ((DoubleWritable) splitValue).get() >= 0) {
                index0 = i;
                lessthan0 = false;
            }
            i++;
        }
        if (lessthan0)
            index0 = partitions - 1;
    } finally {
        IOUtilFunctions.closeSilently(writer);
    }

    return index0;
}

From source file:org.apache.tajo.plan.util.WritableTypeConverter.java

License:Apache License

public static Writable convertDatum2Writable(Datum value) {
    switch (value.kind()) {
    case INT1:/*from   w  w w .  j a v a  2  s . co m*/
        return new ByteWritable(value.asByte());
    case INT2:
        return new ShortWritable(value.asInt2());
    case INT4:
        return new IntWritable(value.asInt4());
    case INT8:
        return new LongWritable(value.asInt8());

    case FLOAT4:
        return new FloatWritable(value.asFloat4());
    case FLOAT8:
        return new DoubleWritable(value.asFloat8());

    // NOTE: value should be DateDatum
    case DATE:
        return new DateWritable(value.asInt4() - DateTimeConstants.UNIX_EPOCH_JDATE);

    // NOTE: value should be TimestampDatum
    case TIMESTAMP:
        TimestampWritable result = new TimestampWritable();
        result.setTime(DateTimeUtil.julianTimeToJavaTime(value.asInt8()));
        return result;

    case CHAR: {
        String str = value.asChars();
        return new HiveCharWritable(new HiveChar(str, str.length()));
    }
    case TEXT:
        return new Text(value.asChars());
    case VARBINARY:
        return new BytesWritable(value.asByteArray());

    case NULL_TYPE:
        return null;
    }

    throw new TajoRuntimeException(new NotImplementedException(TypeStringEncoder.encode(value.type())));
}

From source file:org.archive.bacon.io.SequenceFileStorage.java

License:Apache License

/**
 * Convert the Pig tupleValue to the corresponding Hadoop object.
 *///from   w w w  . j  av  a 2s  . co m
public Writable getWritable(Object tupleValue, Writable nullWritable) throws IOException {
    switch (DataType.findType(tupleValue)) {
    case DataType.BOOLEAN:
        return new BooleanWritable((boolean) tupleValue);

    case DataType.BYTE:
        return new ByteWritable((byte) tupleValue);

    case DataType.CHARARRAY:
        return new Text((String) tupleValue);

    case DataType.INTEGER:
        return new IntWritable((int) tupleValue);

    case DataType.LONG:
        return new LongWritable((long) tupleValue);

    case DataType.DOUBLE:
        return new DoubleWritable((double) tupleValue);

    case DataType.FLOAT:
        return new FloatWritable((float) tupleValue);

    case DataType.BYTEARRAY:
        return new BytesWritable((byte[]) tupleValue);

    // If we get a 'null' from Pig, just pass through the
    // already-instantiated Hadoop nullWritable.
    case DataType.NULL:
        return nullWritable;

    // Don't know what to do with these complex data types.
    case DataType.BAG:
    case DataType.ERROR:
    case DataType.MAP:
    case DataType.TUPLE:
    case DataType.UNKNOWN:
    default:
        throw new IOException("Cannot write values of type: " + DataType.findTypeName(tupleValue));
    }
}

From source file:org.archive.giraph.InDegreeCountComputation.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, DoubleWritable, FloatWritable> vertex,
        Iterable<DoubleWritable> messages) {
    if (getSuperstep() == 0) {
        Iterable<Edge<LongWritable, FloatWritable>> edges = vertex.getEdges();
        for (Edge<LongWritable, FloatWritable> edge : edges) {
            sendMessage(edge.getTargetVertexId(), new DoubleWritable(1.0));
        }//w w w.ja va2 s. c  om
    } else {
        long sum = 0;
        for (DoubleWritable message : messages) {
            sum++;
        }
        DoubleWritable vertexValue = vertex.getValue();
        vertexValue.set(sum);
        vertex.setValue(vertexValue);
        vertex.voteToHalt();
    }
}

From source file:org.archive.giraph.InDegreeCountVertex.java

License:Apache License

@Override
public void compute(Iterable<DoubleWritable> messages) {
    if (getSuperstep() == 0) {
        Iterable<Edge<LongWritable, FloatWritable>> edges = getEdges();
        for (Edge<LongWritable, FloatWritable> edge : edges) {
            sendMessage(edge.getTargetVertexId(), new DoubleWritable(1.0));
        }/*  w w  w  .ja v a  2  s. com*/
    } else {
        long sum = 0;
        for (DoubleWritable message : messages) {
            sum++;
        }
        DoubleWritable vertexValue = getValue();
        vertexValue.set(sum);
        setValue(vertexValue);
        voteToHalt();
    }
}

From source file:org.archive.giraph.PageRankComputation.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, DoubleWritable, FloatWritable> vertex,
        Iterable<DoubleWritable> messages) {
    if (getSuperstep() >= 1) {
        double sum = 0;
        for (DoubleWritable message : messages) {
            sum += message.get();//from  ww w. ja v a 2 s  .co  m
        }
        // add in the dangling factor
        sum += this.<DoubleWritable>getAggregatedValue(DANGLING_SUM_AGG).get();
        float jumpProbability = JUMP_PROBABILITY.get(getConf());
        DoubleWritable vertexValue = new DoubleWritable(jumpProbability + (1 - jumpProbability) * sum);
        vertex.setValue(vertexValue);
        aggregate(NUMVERTICES_SUM_AGG, new LongWritable(1));
    }
    if (getSuperstep() < MAX_SUPERSTEPS.get(getConf())) {
        long edges = vertex.getNumEdges();
        double vertexValue = vertex.getValue().get();
        //dangling nodes -- transfer score evenly to all nodes
        if (0 == edges) {
            aggregate(DANGLING_SUM_AGG, new DoubleWritable(vertexValue / getTotalNumVertices()));
        } else {
            sendMessageToAllEdges(vertex, new DoubleWritable(vertexValue / edges));
        }
    } else {
        vertex.voteToHalt();
    }
}

From source file:org.archive.giraph.PageRankVertex.java

License:Apache License

public void compute(Iterable<DoubleWritable> messages) {
    if (getSuperstep() >= 1) {
        double sum = 0;
        for (DoubleWritable message : messages) {
            sum += message.get();/*from   ww w  . java2  s.  com*/
        }
        // add in the dangling factor
        sum += this.<DoubleWritable>getAggregatedValue(DANGLING_SUM_AGG).get();
        float jump_probability = JUMP_PROBABILITY.get(getConf());
        DoubleWritable vertexValue = new DoubleWritable(jump_probability + (1 - jump_probability) * sum);
        setValue(vertexValue);
        aggregate(NUMVERTICES_SUM_AGG, new LongWritable(1));
    }
    if (getSuperstep() < MAX_SUPERSTEPS.get(getConf())) {
        long edges = getNumEdges();
        double vertexValue = getValue().get();
        //dangling nodes -- transfer score evenly to all nodes
        if (0 == edges)
            aggregate(DANGLING_SUM_AGG, new DoubleWritable(vertexValue / getTotalNumVertices()));
        else
            sendMessageToAllEdges(new DoubleWritable(vertexValue / edges));
    } else {
        voteToHalt();
    }
}

From source file:org.archive.giraph.WeightedPageRankComputation.java

License:Apache License

@Override
public void compute(Vertex<Text, DoubleWritable, FloatWritable> vertex, Iterable<DoubleWritable> messages) {
    if (getSuperstep() >= 1) {
        double sum = 0;
        for (DoubleWritable message : messages) {
            sum += message.get();/*from   w w w  .j a v a2 s .c o  m*/
        }
        // add in the dangling factor
        sum += this.<DoubleWritable>getAggregatedValue(DANGLING_SUM_AGG).get();
        float jumpProbability = JUMP_PROBABILITY.get(getConf());
        DoubleWritable vertexValue = new DoubleWritable(jumpProbability + (1 - jumpProbability) * sum);
        vertex.setValue(vertexValue);
        aggregate(NUMVERTICES_SUM_AGG, new LongWritable(1));
    }
    if (getSuperstep() < MAX_SUPERSTEPS.get(getConf())) {
        long edges = vertex.getNumEdges();
        double vertexValue = vertex.getValue().get();
        //dangling nodes -- transfer score evenly to all nodes
        if (0 == edges) {
            aggregate(DANGLING_SUM_AGG, new DoubleWritable(vertexValue / getTotalNumVertices()));
        } else {
            //Pass 1: Sum up all neighbor weights
            float totalEdgeWeight = 0;
            for (Edge<Text, FloatWritable> edge : vertex.getEdges()) {
                totalEdgeWeight += edge.getValue().get();
            }
            boolean sumEdgeWeightsFlag = SUM_EDGE_WEIGHTS_FLAG.get(getConf());
            if (!sumEdgeWeightsFlag) {
                totalEdgeWeight = 1;
            }
            //Pass 2: send weighted PR value to each neighbor
            if (totalEdgeWeight > 0) {
                for (Edge<Text, FloatWritable> edge : vertex.getEdges()) {
                    sendMessage(edge.getTargetVertexId(),
                            new DoubleWritable((vertexValue * edge.getValue().get()) / totalEdgeWeight));
                }
            }
        }
    } else {
        vertex.voteToHalt();
    }
}