Example usage for org.apache.hadoop.io LongWritable get

List of usage examples for org.apache.hadoop.io LongWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable get.

Prototype

public long get() 

Source Link

Document

Return the value of this LongWritable.

Usage

From source file:org.apache.giraph.reducers.impl.LongXorReduce.java

License:Apache License

@Override
public LongWritable reduce(LongWritable curValue, LongWritable valueToReduce) {
    curValue.set(curValue.get() ^ valueToReduce.get());
    return curValue;
}

From source file:org.apache.giraph.types.LongWritableToLongUnwrapper.java

License:Apache License

@Override
public Long unwrap(LongWritable writableValue) {
    return writableValue.get();
}

From source file:org.apache.giraph.types.ops.collections.array.WLongArrayList.java

License:Apache License

@Override
public void addW(LongWritable value) {
    add(value.get());
}

From source file:org.apache.giraph.types.ops.collections.array.WLongArrayList.java

License:Apache License

@Override
public void setW(int index, LongWritable value) {
    set(index, value.get());
}

From source file:org.apache.giraph.types.ops.collections.array.WLongArrayList.java

License:Apache License

@Override
public void fillW(int from, int to, LongWritable value) {
    if (to > size()) {
        throw new ArrayIndexOutOfBoundsException(
                "End index (" + to + ") is greater than array length (" + size() + ")");
    }// www .  j av a2  s. c om
    Arrays.fill(elements(), from, to, value.get());
}

From source file:org.apache.giraph.types.ops.LongTypeOps.java

License:Apache License

@Override
public LongWritable createCopy(LongWritable from) {
    return new LongWritable(from.get());
}

From source file:org.apache.giraph.types.ops.LongTypeOps.java

License:Apache License

@Override
public void set(LongWritable to, LongWritable from) {
    to.set(from.get());
}

From source file:org.apache.giraph.utils.hashing.LongWritableFunnel.java

License:Apache License

@Override
public void funnel(LongWritable w, PrimitiveSink into) {
    into.putLong(w.get());
}

From source file:org.apache.giraph.writable.kryo.KryoWritableTest.java

License:Apache License

@Test
public void testLongWritable() throws Exception {
    LongWritable from = new LongWritable(0);
    LongWritable to = new LongWritable(0);

    for (int i = 0; i < longTestTimes; i++) {
        from.set(i);/*from   w  w  w  .j av  a  2 s.  com*/
        WritableUtils.copyInto(from, to, true);
        assertEquals(i, to.get());
    }
}

From source file:org.apache.hadoop.examples.QuasiMonteCarlo.java

License:Apache License

/**
 * Run a map/reduce job for estimating Pi.
 *
 * @return the estimated value of Pi/*from   www  .  ja  va2  s .co m*/
 */
public static BigDecimal estimatePi(int numMaps, long numPoints, Path tmpDir, Configuration conf)
        throws IOException, ClassNotFoundException, InterruptedException {
    Job job = Job.getInstance(conf);
    //setup job conf
    job.setJobName(QuasiMonteCarlo.class.getSimpleName());
    job.setJarByClass(QuasiMonteCarlo.class);

    job.setInputFormatClass(SequenceFileInputFormat.class);

    job.setOutputKeyClass(BooleanWritable.class);
    job.setOutputValueClass(LongWritable.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);

    job.setMapperClass(QmcMapper.class);

    job.setReducerClass(QmcReducer.class);
    job.setNumReduceTasks(1);

    // turn off speculative execution, because DFS doesn't handle
    // multiple writers to the same file.
    job.setSpeculativeExecution(false);

    //setup input/output directories
    final Path inDir = new Path(tmpDir, "in");
    final Path outDir = new Path(tmpDir, "out");
    FileInputFormat.setInputPaths(job, inDir);
    FileOutputFormat.setOutputPath(job, outDir);

    final FileSystem fs = FileSystem.get(conf);
    if (fs.exists(tmpDir)) {
        throw new IOException(
                "Tmp directory " + fs.makeQualified(tmpDir) + " already exists.  Please remove it first.");
    }
    if (!fs.mkdirs(inDir)) {
        throw new IOException("Cannot create input directory " + inDir);
    }

    try {
        //generate an input file for each map task
        for (int i = 0; i < numMaps; ++i) {
            final Path file = new Path(inDir, "part" + i);
            final LongWritable offset = new LongWritable(i * numPoints);
            final LongWritable size = new LongWritable(numPoints);
            final SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, file, LongWritable.class,
                    LongWritable.class, CompressionType.NONE);
            try {
                writer.append(offset, size);
            } finally {
                writer.close();
            }
            System.out.println("Wrote input for Map #" + i);
        }

        //start a map/reduce job
        System.out.println("Starting Job");
        final long startTime = Time.monotonicNow();
        job.waitForCompletion(true);
        if (!job.isSuccessful()) {
            System.out.println("Job " + job.getJobID() + " failed!");
            System.exit(1);
        }
        final double duration = (Time.monotonicNow() - startTime) / 1000.0;
        System.out.println("Job Finished in " + duration + " seconds");

        //read outputs
        Path inFile = new Path(outDir, "reduce-out");
        LongWritable numInside = new LongWritable();
        LongWritable numOutside = new LongWritable();
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, inFile, conf);
        try {
            reader.next(numInside, numOutside);
        } finally {
            reader.close();
        }

        //compute estimated value
        final BigDecimal numTotal = BigDecimal.valueOf(numMaps).multiply(BigDecimal.valueOf(numPoints));
        return BigDecimal.valueOf(4).setScale(20).multiply(BigDecimal.valueOf(numInside.get())).divide(numTotal,
                RoundingMode.HALF_UP);
    } finally {
        fs.delete(tmpDir, true);
    }
}