Example usage for org.apache.hadoop.io LongWritable LongWritable

List of usage examples for org.apache.hadoop.io LongWritable LongWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io LongWritable LongWritable.

Prototype

public LongWritable(long value) 

Source Link

Usage

From source file:com.alectenharmsel.research.MoabLicensesMapperTest.java

@Test
public void testLicenseLine() throws IOException {
    driver.withInput(new LongWritable(0), new Text(
            "05/11 22:58:25  INFO:     License cfd_solv_ser        0 of   6 available  (Idle: 33.3%  Active: 66.67%)"))
            .withOutput(new Text("cfd_solv_ser-05-11"), new Text("0,6")).runTest();
}

From source file:com.alectenharmsel.research.MoabLogSearchMapper.java

License:Apache License

public void map(LongWritable key, Text contents, Context context) throws IOException, InterruptedException {
    String tmp = contents.toString();

    if (tmp.contains("ERROR")) {
        context.write(new LongWritable(0), contents);
    }/*from  w  ww .j  a  va  2  s  .  c om*/
}

From source file:com.asakusafw.directio.hive.serde.LongOptionInspector.java

License:Apache License

@Override
public Object getPrimitiveWritableObject(Object o) {
    LongOption object = (LongOption) o;//w  ww .j a  v a 2s . c  o m
    if (object == null || object.isNull()) {
        return null;
    }
    return new LongWritable(object.get());
}

From source file:com.asakusafw.thundergate.runtime.cache.mapreduce.TableJoinBaseMapper.java

License:Apache License

private static <T extends Writable> Set<LongWritable> buildConflicts(Context context, Class<T> dataType)
        throws IOException {
    Configuration conf = context.getConfiguration();
    List<Path> caches = getPatchPaths(context);
    T buffer = ReflectionUtils.newInstance(dataType, conf);
    Set<LongWritable> results = new HashSet<>();
    for (Path path : caches) {
        try (ModelInput<T> input = TemporaryStorage.openInput(conf, dataType, path)) {
            while (input.readTo(buffer)) {
                results.add(new LongWritable(((ThunderGateCacheSupport) buffer).__tgc__SystemId()));
            }/*from   ww  w  .ja va2 s.  c  om*/
        }
    }
    return results;
}

From source file:com.avira.couchdoop.AppTest.java

License:Apache License

@Test
public void testMapper() throws IOException {
    // Inputs/*from w w w  .ja va  2 s .c  om*/
    mapDriver.addInput(new LongWritable(0L), new Text("Mary had a little lamb,"));
    mapDriver.addInput(new LongWritable(24L), new Text("Little lamb, little lamb."));

    // Outputs
    mapDriver.addOutput(new LongWritable(0L), new Text("Mary had a little lamb,"));
    mapDriver.addOutput(new LongWritable(24L), new Text("Little lamb, little lamb."));

    mapDriver.runTest();
}

From source file:com.avira.couchdoop.AppTest.java

License:Apache License

@Test
public void testReducer() throws IOException {
    // Inputs//from   w  ww.  j  a v  a  2  s  . c o m
    List<Text> values = new ArrayList<Text>();
    values.add(new Text("Mary had a little lamb,"));
    values.add(new Text("Little lamb, little lamb."));
    reduceDriver.addInput(new LongWritable(10L), values);

    // Outputs
    reduceDriver.addOutput(new LongWritable(10L), new Text("Mary had a little lamb,"));
    reduceDriver.addOutput(new LongWritable(10L), new Text("Little lamb, little lamb."));

    reduceDriver.runTest();
}

From source file:com.avira.couchdoop.AppTest.java

License:Apache License

@Test
public void testJob() throws IOException {
    // Inputs//from   ww  w. j  a  v a 2s. c  o m
    mrDriver.addInput(new LongWritable(0L), new Text("Mary had a little lamb,"));
    mrDriver.addInput(new LongWritable(24L), new Text("Little lamb, little lamb."));

    // Outputs
    mrDriver.addOutput(new LongWritable(0L), new Text("Mary had a little lamb,"));
    mrDriver.addOutput(new LongWritable(24L), new Text("Little lamb, little lamb."));

    mrDriver.runTest();
}

From source file:com.awcoleman.BouncyCastleGenericCDRHadoop.RawFileRecordReader.java

License:Apache License

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {

    if (isProcessed)
        return false;

    currentKey = new Text(path.getName());

    int recordCounter = 0;
    while ((obj = asnin.readObject()) != null) {

        CallDetailRecord thisCdr = new CallDetailRecord((ASN1Sequence) obj);
        recordCounter++;// w w w.j  a v  a  2s  .c o m

        System.out.println("CallDetailRecord " + thisCdr.getRecordNumber() + " Calling "
                + thisCdr.getCallingNumber() + " Called " + thisCdr.getCalledNumber() + " Start Date-Time "
                + thisCdr.getStartDate() + "-" + thisCdr.getStartTime() + " duration " + thisCdr.getDuration());

    }
    isProcessed = true;

    //Return number of records
    currentValue = new LongWritable(recordCounter);

    return true;
}

From source file:com.axiomine.largecollections.kryo.serializers.LongWritableSerializer.java

License:Apache License

public LongWritable read(Kryo kryo, Input input, Class<LongWritable> type) {
    return new LongWritable(input.readLong(false));
}

From source file:com.benchmark.mapred.PiEstimator.java

License:Apache License

/**
 * Run a map/reduce job for estimating Pi.
 *
 * @return the estimated value of Pi//from  ww  w .ja v a2 s . c  o m
 */
public static BigDecimal estimate(int numMaps, long numPoints, JobConf jobConf) throws IOException {
    //setup job conf
    jobConf.setJobName(PiEstimator.class.getSimpleName());

    jobConf.setInputFormat(SequenceFileInputFormat.class);

    jobConf.setOutputKeyClass(BooleanWritable.class);
    jobConf.setOutputValueClass(LongWritable.class);
    jobConf.setOutputFormat(SequenceFileOutputFormat.class);

    jobConf.setMapperClass(PiMapper.class);
    jobConf.setNumMapTasks(numMaps);

    jobConf.setReducerClass(PiReducer.class);
    jobConf.setNumReduceTasks(1);

    // turn off speculative execution, because DFS doesn't handle
    // multiple writers to the same file.
    jobConf.setSpeculativeExecution(false);

    //setup input/output directories
    //final Path inDir = new Path(TMP_DIR, "in");
    final Path inDir = new Path("/home/hadoop1/tmp_dir", "in");
    System.out.println("inDir =" + inDir.toString());
    //final Path outDir = new Path(TMP_DIR, "out");
    final Path outDir = new Path("/home/hadoop1/tmp_dir", "out");
    System.out.println("outDir =" + outDir.toString());
    FileInputFormat.setInputPaths(jobConf, inDir);
    FileOutputFormat.setOutputPath(jobConf, outDir);

    final FileSystem fs = FileSystem.get(jobConf);
    if (fs.exists(TMP_DIR)) {
        throw new IOException(
                "Tmp directory " + fs.makeQualified(TMP_DIR) + " already exists.  Please remove it first.");
    }
    if (!fs.mkdirs(inDir)) {
        throw new IOException("Cannot create input directory " + inDir);
    }

    try {
        //generate an input file for each map task
        for (int i = 0; i < numMaps; ++i) {
            final Path file = new Path(inDir, "part" + i);
            final LongWritable offset = new LongWritable(i * numPoints);
            final LongWritable size = new LongWritable(numPoints);
            final SequenceFile.Writer writer = SequenceFile.createWriter(fs, jobConf, file, LongWritable.class,
                    LongWritable.class, CompressionType.NONE);
            try {
                writer.append(offset, size);
            } finally {
                writer.close();
            }
            System.out.println("Wrote input for Map #" + i);
        }

        //start a map/reduce job
        System.out.println("Starting Job");
        final long startTime = System.currentTimeMillis();
        JobClient.runJob(jobConf);
        final double duration = (System.currentTimeMillis() - startTime) / 1000.0;
        System.out.println("Job Finished in " + duration + " seconds");

        //read outputs
        Path inFile = new Path(outDir, "reduce-out");
        LongWritable numInside = new LongWritable();
        LongWritable numOutside = new LongWritable();
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, inFile, jobConf);
        try {
            reader.next(numInside, numOutside);
        } finally {
            reader.close();
        }

        //compute estimated value
        return BigDecimal.valueOf(4).setScale(20).multiply(BigDecimal.valueOf(numInside.get()))
                .divide(BigDecimal.valueOf(numMaps)).divide(BigDecimal.valueOf(numPoints));
    } finally {
        fs.delete(TMP_DIR, true);
    }
}