Example usage for org.apache.hadoop.io LongWritable get

List of usage examples for org.apache.hadoop.io LongWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable get.

Prototype

public long get() 

Source Link

Document

Return the value of this LongWritable.

Usage

From source file:com.asakusafw.runtime.directio.hadoop.SequenceFileFormatTest.java

License:Apache License

/**
 * Test method for output./*from  w  w  w  .j a  v  a 2  s  . c  o m*/
 * @throws Exception if failed
 */
@SuppressWarnings("deprecation")
@Test
public void output() throws Exception {
    final int count = 10000;
    LocalFileSystem fs = FileSystem.getLocal(conf);
    Path path = new Path(folder.newFile("testing").toURI());
    try (ModelOutput<StringOption> out = format.createOutput(StringOption.class, fs, path, new Counter())) {
        StringOption value = new StringOption();
        for (int i = 0; i < count; i++) {
            value.modify("Hello, world at " + i);
            out.write(value);
        }
    }
    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        LongWritable k = new LongWritable();
        Text v = new Text();
        for (int i = 0; i < count; i++) {
            String answer = "Hello, world at " + i;
            assertThat(answer, reader.next(k, v), is(true));
            assertThat(answer, k.get(), is(1L));
            assertThat(answer, v.toString(), is(answer));
        }
        assertThat("eof", reader.next(k), is(false));
    }
}

From source file:com.asakusafw.runtime.io.sequencefile.SequenceFileUtilTest.java

License:Apache License

/**
 * Uses a large sequence file with original API.
 * @throws Exception if failed// ww w .jav a 2s .c  o m
 */
@Test
public void original_large() throws Exception {
    Path path = new Path("large");

    LongWritable key = new LongWritable();
    LongWritable value = new LongWritable();

    try (SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, path, key.getClass(),
            value.getClass())) {
        for (long i = 0; i < 300000; i++) {
            key.set(i);
            value.set(i + 1);
            writer.append(key, value);
        }
    }

    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        for (long i = 0; i < 300000; i++) {
            assertThat(reader.next(key, value), is(true));
            assertThat(key.get(), is(i));
            assertThat(value.get(), is(i + 1));
        }
        assertThat(reader.next(key, value), is(false));
    }
}

From source file:com.asakusafw.runtime.io.sequencefile.SequenceFileUtilTest.java

License:Apache License

/**
 * Reads a large sequence file.//from ww  w  .j  ava2 s .  c  om
 * @throws Exception if failed
 */
@Test
public void read_large() throws Exception {
    Path path = new Path("large");

    LongWritable key = new LongWritable();
    LongWritable value = new LongWritable();

    try (SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, path, key.getClass(),
            value.getClass())) {
        for (long i = 0; i < 300000; i++) {
            key.set(i);
            value.set(i + 1);
            writer.append(key, value);
        }
    }

    FileStatus status = fs.getFileStatus(path);
    try (InputStream in = new FileInputStream(fs.pathToFile(path));
            SequenceFile.Reader reader = SequenceFileUtil.openReader(in, status, conf)) {
        for (long i = 0; i < 300000; i++) {
            assertThat(reader.next(key, value), is(true));
            assertThat(key.get(), is(i));
            assertThat(value.get(), is(i + 1));
        }
        assertThat(reader.next(key, value), is(false));
    }
}

From source file:com.asakusafw.runtime.io.sequencefile.SequenceFileUtilTest.java

License:Apache License

/**
 * Creates a large sequence file.//w  w  w.java 2  s .  co  m
 * @throws Exception if failed
 */
@Test
public void write_large() throws Exception {
    Path path = new Path("testing");

    LongWritable key = new LongWritable();
    LongWritable value = new LongWritable();
    try (OutputStream out = new FileOutputStream(fs.pathToFile(path));
            SequenceFile.Writer writer = SequenceFileUtil.openWriter(new BufferedOutputStream(out), conf,
                    key.getClass(), value.getClass(), null)) {
        for (long i = 0; i < 300000; i++) {
            key.set(i);
            value.set(i + 1);
            writer.append(key, value);
        }
    }

    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        for (long i = 0; i < 300000; i++) {
            assertThat(reader.next(key, value), is(true));
            assertThat(key.get(), is(i));
            assertThat(value.get(), is(i + 1));
        }
        assertThat(reader.next(key, value), is(false));
    }
}

From source file:com.asakusafw.runtime.io.sequencefile.SequenceFileUtilTest.java

License:Apache License

/**
 * Creates a compressed sequence file.//from ww w . j  a  v a2 s  .c  o m
 * @throws Exception if failed
 */
@Test
public void write_compressed() throws Exception {
    DefaultCodec codec = new DefaultCodec();
    codec.setConf(conf);

    Path path = new Path("testing");

    LongWritable key = new LongWritable();
    LongWritable value = new LongWritable();
    try (OutputStream out = new FileOutputStream(fs.pathToFile(path));
            SequenceFile.Writer writer = SequenceFileUtil.openWriter(new BufferedOutputStream(out), conf,
                    key.getClass(), value.getClass(), codec);) {
        for (long i = 0; i < 300000; i++) {
            key.set(i);
            value.set(i + 1);
            writer.append(key, value);
        }
    }

    try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
        for (long i = 0; i < 300000; i++) {
            assertThat(reader.next(key, value), is(true));
            assertThat(key.get(), is(i));
            assertThat(value.get(), is(i + 1));
        }
        assertThat(reader.next(key, value), is(false));
    }
}

From source file:com.axiomine.largecollections.kryo.serializers.LongWritableSerializer.java

License:Apache License

public void write(Kryo kryo, Output output, LongWritable object) {
    output.writeLong(object.get(), false);
}

From source file:com.benchmark.mapred.PiEstimator.java

License:Apache License

/**
 * Run a map/reduce job for estimating Pi.
 *
 * @return the estimated value of Pi//  www .j a v a 2  s .  c  om
 */
public static BigDecimal estimate(int numMaps, long numPoints, JobConf jobConf) throws IOException {
    //setup job conf
    jobConf.setJobName(PiEstimator.class.getSimpleName());

    jobConf.setInputFormat(SequenceFileInputFormat.class);

    jobConf.setOutputKeyClass(BooleanWritable.class);
    jobConf.setOutputValueClass(LongWritable.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    jobConf.setMapperClass(PiMapper.class);
    jobConf.setNumMapTasks(numMaps);

    jobConf.setReducerClass(PiReducer.class);
    jobConf.setNumReduceTasks(1);

    // turn off speculative execution, because DFS doesn't handle
    // multiple writers to the same file.
    jobConf.setSpeculativeExecution(false);

    //setup input/output directories
    //final Path inDir = new Path(TMP_DIR, "in");
    final Path inDir = new Path("/home/hadoop1/tmp_dir", "in");
    System.out.println("inDir =" + inDir.toString());
    //final Path outDir = new Path(TMP_DIR, "out");
    final Path outDir = new Path("/home/hadoop1/tmp_dir", "out");
    System.out.println("outDir =" + outDir.toString());
    FileInputFormat.setInputPaths(jobConf, inDir);
    FileOutputFormat.setOutputPath(jobConf, outDir);

    final FileSystem fs = FileSystem.get(jobConf);
    if (fs.exists(TMP_DIR)) {
        throw new IOException(
                "Tmp directory " + fs.makeQualified(TMP_DIR) + " already exists.  Please remove it first.");
    }
    if (!fs.mkdirs(inDir)) {
        throw new IOException("Cannot create input directory " + inDir);
    }

    try {
        //generate an input file for each map task
        for (int i = 0; i < numMaps; ++i) {
            final Path file = new Path(inDir, "part" + i);
            final LongWritable offset = new LongWritable(i * numPoints);
            final LongWritable size = new LongWritable(numPoints);
            final SequenceFile.Writer writer = SequenceFile.createWriter(fs, jobConf, file, LongWritable.class,
                    LongWritable.class, CompressionType.NONE);
            try {
                writer.append(offset, size);
            } finally {
                writer.close();
            }
            System.out.println("Wrote input for Map #" + i);
        }

        //start a map/reduce job
        System.out.println("Starting Job");
        final long startTime = System.currentTimeMillis();
        JobClient.runJob(jobConf);
        final double duration = (System.currentTimeMillis() - startTime) / 1000.0;
        System.out.println("Job Finished in " + duration + " seconds");

        //read outputs
        Path inFile = new Path(outDir, "reduce-out");
        LongWritable numInside = new LongWritable();
        LongWritable numOutside = new LongWritable();
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, inFile, jobConf);
        try {
            reader.next(numInside, numOutside);
        } finally {
            reader.close();
        }

        //compute estimated value
        return BigDecimal.valueOf(4).setScale(20).multiply(BigDecimal.valueOf(numInside.get()))
                .divide(BigDecimal.valueOf(numMaps)).divide(BigDecimal.valueOf(numPoints));
    } finally {
        fs.delete(TMP_DIR, true);
    }
}

From source file:com.bizosys.hsearch.kv.indexing.KVMapperFile.java

License:Apache License

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

    if (isSkipHeader) {
        isSkipHeader = false;/*from  ww  w.  ja va 2  s  .  c o  m*/
        if (0 == key.get())
            return;
    }

    if (null == result) {
        ArrayList<String> resultL = new ArrayList<String>();
        LineReaderUtil.fastSplit(resultL, value.toString(), KVIndexer.FIELD_SEPARATOR);
        result = new String[resultL.size()];
    }

    Arrays.fill(result, null);

    LineReaderUtil.fastSplit(result, value.toString(), KVIndexer.FIELD_SEPARATOR);
    kBase.map(result, context);

}

From source file:com.blackberry.logdriver.pig.GzipLoadFunc.java

License:Apache License

@Override
public Tuple getNext() throws IOException {
    boolean moreData = rr.nextKeyValue();

    if (!moreData) {
        return null;
    }// w w w  . j a  v  a2  s  . c  om

    LongWritable lineNumber = rr.getCurrentKey();
    String message = rr.getCurrentValue().toString();

    Tuple tuple = tupleFactory.newTuple(2);
    tuple.set(0, lineNumber.get());
    tuple.set(1, message);

    return tuple;
}

From source file:com.caseystella.analytics.distribution.RotationTest.java

License:Apache License

public static DataPoint nextDataPoint(Random r, LongWritable ts, long delta, List<DataPoint> points) {
    double val = r.nextDouble() * 1000;
    DataPoint dp = (new DataPoint(ts.get(), val, null, "foo"));
    if (points != null) {
        points.add(dp);//from  w  w  w. j  ava2s .  c o m
    }
    ts.set(ts.get() + delta);
    return dp;
}