Example usage for org.apache.hadoop.io LongWritable LongWritable

List of usage examples for org.apache.hadoop.io LongWritable LongWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable LongWritable.

Prototype

public LongWritable(long value) 

Source Link

Usage

From source file:com.ebay.nest.io.sede.lazybinary.LazyBinaryLong.java

License:Apache License

LazyBinaryLong(LazyBinaryLong copy) {
    super(copy);
    data = new LongWritable(copy.data.get());
}

From source file:com.ebay.nest.io.sede.objectinspector.primitive.JavaLongObjectInspector.java

License:Apache License

@Override
public Object getPrimitiveWritableObject(Object o) {
    return o == null ? null : new LongWritable(((Long) o).longValue());
}

From source file:com.ebay.nest.io.sede.objectinspector.primitive.WritableLongObjectInspector.java

License:Apache License

@Override
public Object copyObject(Object o) {
    return o == null ? null : new LongWritable(((LongWritable) o).get());
}

From source file:com.ebay.nest.io.sede.objectinspector.primitive.WritableLongObjectInspector.java

License:Apache License

@Override
public Object create(long value) {
    return new LongWritable(value);
}

From source file:com.ery.hadoop.mrddx.hbase.HbaseRecordReader.java

License:Apache License

public LongWritable createKey() {
    return new LongWritable(0);
}

From source file:com.facebook.hive.orc.lazy.OrcLazyLong.java

License:Open Source License

public OrcLazyLong(OrcLazyLong copy) {
    super(copy);/*from w w w. j  av  a 2 s  .  com*/
    if (copy.previous != null) {
        previous = new LongWritable(((LongWritable) copy.previous).get());
    }
}

From source file:com.github.gaoyangthu.demo.mapred.PiEstimator.java

License:Apache License

/**
 * Run a map/reduce job for estimating Pi.
 *
 * @return the estimated value of Pi//ww  w.  j a v a 2 s .c o m
 */
public static BigDecimal estimate(int numMaps, long numPoints, JobConf jobConf) throws IOException {
    //setup job conf
    jobConf.setJobName(PiEstimator.class.getSimpleName());

    jobConf.setInputFormat(SequenceFileInputFormat.class);

    jobConf.setOutputKeyClass(BooleanWritable.class);
    jobConf.setOutputValueClass(LongWritable.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    jobConf.setMapperClass(PiMapper.class);
    jobConf.setNumMapTasks(numMaps);

    jobConf.setReducerClass(PiReducer.class);
    jobConf.setNumReduceTasks(1);

    // turn off speculative execution, because DFS doesn't handle
    // multiple writers to the same file.
    jobConf.setSpeculativeExecution(false);

    //setup input/output directories
    final Path inDir = new Path(TMP_DIR, "in");
    final Path outDir = new Path(TMP_DIR, "out");
    FileInputFormat.setInputPaths(jobConf, inDir);
    FileOutputFormat.setOutputPath(jobConf, outDir);

    final FileSystem fs = FileSystem.get(jobConf);
    if (fs.exists(TMP_DIR)) {
        throw new IOException(
                "Tmp directory " + fs.makeQualified(TMP_DIR) + " already exists.  Please remove it first.");
    }
    if (!fs.mkdirs(inDir)) {
        throw new IOException("Cannot create input directory " + inDir);
    }

    try {
        //generate an input file for each map task
        for (int i = 0; i < numMaps; ++i) {
            final Path file = new Path(inDir, "part" + i);
            final LongWritable offset = new LongWritable(i * numPoints);
            final LongWritable size = new LongWritable(numPoints);
            final SequenceFile.Writer writer = SequenceFile.createWriter(fs, jobConf, file, LongWritable.class,
                    LongWritable.class, CompressionType.NONE);
            try {
                writer.append(offset, size);
            } finally {
                writer.close();
            }
            System.out.println("Wrote input for Map #" + i);
        }

        //start a map/reduce job
        System.out.println("Starting Job");
        final long startTime = System.currentTimeMillis();
        JobClient.runJob(jobConf);
        final double duration = (System.currentTimeMillis() - startTime) / 1000.0;
        System.out.println("Job Finished in " + duration + " seconds");

        //read outputs
        Path inFile = new Path(outDir, "reduce-out");
        LongWritable numInside = new LongWritable();
        LongWritable numOutside = new LongWritable();
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, inFile, jobConf);
        try {
            reader.next(numInside, numOutside);
        } finally {
            reader.close();
        }

        //compute estimated value
        return BigDecimal.valueOf(4).setScale(20).multiply(BigDecimal.valueOf(numInside.get()))
                .divide(BigDecimal.valueOf(numMaps)).divide(BigDecimal.valueOf(numPoints));
    } finally {
        fs.delete(TMP_DIR, true);
    }
}

From source file:com.gotometrics.orderly.TestFixedLongWritableRowKey.java

License:Apache License

@Override
public Object createObject() {
    return new LongWritable(r.nextLong());
}

From source file:com.gotometrics.orderly.TestLongRowKey.java

License:Apache License

@Override
public int compareTo(Object o1, Object o2) {
    if (o1 == null || o2 == null)
        return super.compareTo(o1, o2);
    return super.compareTo(new LongWritable((Long) o1), new LongWritable((Long) o2));
}

From source file:com.gotometrics.orderly.TestLongWritableRowKey.java

License:Apache License

@Override
public Object createObject() {
    if (r.nextInt(128) == 0)
        return null;

    long l = r.nextLong();
    switch (r.nextInt(4)) {
    case 0: /* Single byte: -64 <= x < 64 */
        l = (l & 127) - 64;/*from  www . ja va2s .  c o m*/
        break;

    case 1: /* Double byte: -8192 <= x < 8192 */
        l = (l & 16383) - 8192;
        break;

    case 2: /* 1-2 MB */
        l = (l & ((1 << 21) - 1)) - (1 << 20);
        break;

    /* case 3: do nothing */
    }

    return new LongWritable(l);
}