Example usage for org.apache.hadoop.mapred JobConf setOutputFormat

List of usage examples for org.apache.hadoop.mapred JobConf setOutputFormat

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf setOutputFormat.

Prototype

public void setOutputFormat(Class<? extends OutputFormat> theClass) 

Source Link

Document

Set the OutputFormat implementation for the map-reduce job.

Usage

From source file:de.tudarmstadt.lt.nlkg.ConvertInvertSVO.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf(), ConvertInvertSVO.class);
    conf.setJobName(ConvertInvertSVO.class.getSimpleName());

    conf.setMapperClass(ConversionMapper.class);
    conf.setCombinerClass(IdentityReducer.class);
    conf.setReducerClass(IdentityReducer.class);

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(ConvertedWritable.class);

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    FileInputFormat.setInputPaths(conf, new Path(args[0]));
    FileOutputFormat.setOutputPath(conf, new Path(args[1]));

    JobClient.runJob(conf);/*  w  w w  .j  ava 2s  .c o  m*/
    return 0;
}

From source file:de.tudarmstadt.lt.nlkg.ConvertSVO.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf(), ConvertSVO.class);
    conf.setJobName(ConvertSVO.class.getSimpleName());

    conf.setMapperClass(ConversionMapper.class);
    conf.setCombinerClass(IdentityReducer.class);
    conf.setReducerClass(IdentityReducer.class);

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(ConvertedWritable.class);

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    FileInputFormat.setInputPaths(conf, new Path(args[0]));
    FileOutputFormat.setOutputPath(conf, new Path(args[1]));

    JobClient.runJob(conf);/*  www.  ja  v  a  2  s. c  o m*/
    return 0;
}

From source file:drivers.CalculatePageRank.java

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf();
    JobConf job = new JobConf(conf, this.getClass());
    Path in = new Path(args[0]);
    Path out = new Path(args[1]);
    FileInputFormat.setInputPaths(job, in);
    FileOutputFormat.setOutputPath(job, out);
    job.setJobName("Fiqie|Calculate");
    job.setMapperClass(CalculatePageRank1Mapper.class);
    job.setReducerClass(CalculatePageRank1Reducer.class);
    job.setInputFormat(KeyValueTextInputFormat.class);
    job.setOutputFormat(TextOutputFormat.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    JobClient.runJob(job);//from   ww  w  . j a  v a 2s . co  m
    return 0;
}

From source file:drivers.FinishPageRank.java

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf();
    JobConf job = new JobConf(conf, this.getClass());
    Path in = new Path(args[0]);
    Path out = new Path(args[1]);
    FileInputFormat.setInputPaths(job, in);
    FileOutputFormat.setOutputPath(job, out);
    job.setJobName("Fiqie|Finish");
    job.setOutputKeyComparatorClass(DecreasingComparator.class);
    job.setMapperClass(SortingPageRankMapper.class);
    job.setReducerClass(SortingPageRankReducer.class);
    job.setInputFormat(KeyValueTextInputFormat.class);
    job.setOutputFormat(TextOutputFormat.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    JobClient.runJob(job);/*w ww  . j a v a2s.  c om*/
    return 0;
}

From source file:drivers.InitPageRank.java

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf();
    JobConf job = new JobConf(conf, this.getClass());
    Path in = new Path(args[0]);
    Path out = new Path(args[1]);
    FileInputFormat.setInputPaths(job, in);
    FileOutputFormat.setOutputPath(job, out);
    job.setJobName("Fiqie|Init");
    job.setMapperClass(InitPageRankMapper.class);
    job.setReducerClass(InitPageRankReducer.class);
    job.setInputFormat(KeyValueTextInputFormat.class);
    job.setOutputFormat(TextOutputFormat.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    JobClient.runJob(job);/*from ww w. j av  a  2  s .c  o m*/
    return 0;
}

From source file:edu.brown.cs.mapreduce.benchmarks.Benchmark3.java

License:Open Source License

public int run(String[] args) throws Exception {
    BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args);

    Date startTime = new Date();
    System.out.println("Job started: " + startTime);

    // -------------------------------------------
    // Phase #1/*from  ww w  . j  a va 2s .  co m*/
    // -------------------------------------------
    JobConf p1_job = base.getJobConf();
    p1_job.setJobName(p1_job.getJobName() + ".Phase1");
    Path p1_output = new Path(base.getOutputPath().toString() + "/phase1");
    FileOutputFormat.setOutputPath(p1_job, p1_output);

    //
    // Make sure we have our properties
    //
    String required[] = { BenchmarkBase.PROPERTY_START_DATE, BenchmarkBase.PROPERTY_STOP_DATE };
    for (String req : required) {
        if (!base.getOptions().containsKey(req)) {
            System.err.println("ERROR: The property '" + req + "' is not set");
            System.exit(1);
        }
    } // FOR

    p1_job.setInputFormat(
            base.getSequenceFile() ? SequenceFileInputFormat.class : KeyValueTextInputFormat.class);
    if (base.getSequenceFile())
        p1_job.setOutputFormat(SequenceFileOutputFormat.class);
    p1_job.setOutputKeyClass(Text.class);
    p1_job.setOutputValueClass(Text.class);
    p1_job.setMapperClass(
            base.getTupleData() ? edu.brown.cs.mapreduce.benchmarks.benchmark3.phase1.TupleWritableMap.class
                    : edu.brown.cs.mapreduce.benchmarks.benchmark3.phase1.TextMap.class);
    p1_job.setReducerClass(
            base.getTupleData() ? edu.brown.cs.mapreduce.benchmarks.benchmark3.phase1.TupleWritableReduce.class
                    : edu.brown.cs.mapreduce.benchmarks.benchmark3.phase1.TextReduce.class);
    p1_job.setCompressMapOutput(base.getCompress());

    // -------------------------------------------
    // Phase #2
    // -------------------------------------------
    JobConf p2_job = base.getJobConf();
    p2_job.setJobName(p2_job.getJobName() + ".Phase2");
    p2_job.setInputFormat(
            base.getSequenceFile() ? SequenceFileInputFormat.class : KeyValueTextInputFormat.class);
    if (base.getSequenceFile())
        p2_job.setOutputFormat(SequenceFileOutputFormat.class);
    p2_job.setOutputKeyClass(Text.class);
    p2_job.setOutputValueClass(Text.class);
    p2_job.setMapperClass(IdentityMapper.class);
    p2_job.setReducerClass(
            base.getTupleData() ? edu.brown.cs.mapreduce.benchmarks.benchmark3.phase2.TupleWritableReduce.class
                    : edu.brown.cs.mapreduce.benchmarks.benchmark3.phase2.TextReduce.class);
    p2_job.setCompressMapOutput(base.getCompress());
    p2_job.setNumMapTasks(60);

    // -------------------------------------------
    // Phase #3
    // -------------------------------------------
    JobConf p3_job = base.getJobConf();
    p3_job.setJobName(p3_job.getJobName() + ".Phase3");
    p3_job.setNumReduceTasks(1);
    p3_job.setInputFormat(
            base.getSequenceFile() ? SequenceFileInputFormat.class : KeyValueTextInputFormat.class);
    p3_job.setOutputKeyClass(Text.class);
    p3_job.setOutputValueClass(Text.class);
    //p3_job.setMapperClass(Phase3Map.class);
    p3_job.setMapperClass(IdentityMapper.class);
    p3_job.setReducerClass(
            base.getTupleData() ? edu.brown.cs.mapreduce.benchmarks.benchmark3.phase3.TupleWritableReduce.class
                    : edu.brown.cs.mapreduce.benchmarks.benchmark3.phase3.TextReduce.class);

    //
    // Execute #1
    //
    base.runJob(p1_job);

    //
    // Execute #2
    //
    Path p2_output = new Path(base.getOutputPath().toString() + "/phase2");
    FileOutputFormat.setOutputPath(p2_job, p2_output);
    FileInputFormat.setInputPaths(p2_job, p1_output);
    base.runJob(p2_job);

    //
    // Execute #3
    //
    Path p3_output = new Path(base.getOutputPath().toString() + "/phase3");
    FileOutputFormat.setOutputPath(p3_job, p3_output);
    FileInputFormat.setInputPaths(p3_job, p2_output);
    base.runJob(p3_job);

    // There does need to be a combine if (base.getCombine()) base.runCombine();

    return 0;
}

From source file:edu.brown.cs.mapreduce.benchmarks.Sort.java

License:Open Source License

public int run(String[] args) throws Exception {
    BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args);

    JobConf p1_job = base.getJobConf();
    p1_job.setMapperClass(IdentityMapper.class);
    p1_job.setReducerClass(IdentityReducer.class);

    p1_job.setInputFormat(KeyValueTextInputFormat.class);
    p1_job.setOutputFormat(SequenceFileOutputFormat.class);
    //p1_job.setOutputFormat(NullOutputFormat.class);
    p1_job.setOutputKeyClass(Text.class);
    p1_job.setOutputValueClass(Text.class);

    base.runJob(p1_job);//w  w  w . j  ava 2s .c  o m
    if (base.getCombine())
        base.runCombine();

    return 0;
}

From source file:edu.iit.marketbasket.MarketBasket.java

public static void main(String[] args) throws IOException {

    JobConf conf = new JobConf(MarketBasket.class);
    conf.setJobName("MarketBasket");
    conf.setMapperClass(Map.class);
    conf.setReducerClass(Reduce.class);
    //conf.setJarByClass(MarketBasket.class);
    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);
    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);
    FileInputFormat.setInputPaths(conf, new Path(args[0]));
    FileOutputFormat.setOutputPath(conf, new Path(args[1]));
    JobClient.runJob(conf).waitForCompletion();
}

From source file:edu.ncku.ikdd.ArtistAnalysis.java

public static void main(String[] argv) throws Exception {
    JobConf conf = new JobConf(ArtistAnalysis.class);
    conf.setJobName("artistanalysis");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    conf.setMapperClass(Map.class);
    conf.setCombinerClass(Combine.class);
    conf.setReducerClass(Reduce.class);

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    FileInputFormat.setInputPaths(conf, new Path(argv[0]));
    FileOutputFormat.setOutputPath(conf, new Path(argv[1]));

    JobClient.runJob(conf);/*  w  w w .  jav  a 2s  .com*/
}

From source file:edu.ncku.ikdd.DataMining.java

public static void main(String[] argv) throws Exception {
    int candidateLength = 1;
    FileSystem dfs = FileSystem.get(new Configuration());
    do {//w  ww.  j a  v a2s  .co m
        JobConf countConf = new JobConf(DataMining.class);

        countConf.setOutputKeyClass(Text.class);
        countConf.setOutputValueClass(IntWritable.class);

        countConf.setMapperClass(CountMap.class);
        countConf.setCombinerClass(CountCombine.class);
        countConf.setReducerClass(CountReduce.class);

        countConf.setInputFormat(TextInputFormat.class);
        countConf.setOutputFormat(TextOutputFormat.class);

        FileInputFormat.setInputPaths(countConf, new Path(argv[0]));
        FileOutputFormat.setOutputPath(countConf, new Path(count_path + String.valueOf(candidateLength)));
        countConf.setInt("minSupport", Integer.valueOf(argv[2]));
        countConf.setInt("candidateLength", candidateLength);
        JobClient.runJob(countConf);

        ++candidateLength;

        JobConf candidateConf = new JobConf(DataMining.class);

        candidateConf.setOutputKeyClass(Text.class);
        candidateConf.setOutputValueClass(Text.class);

        candidateConf.setMapperClass(CandidateMap.class);
        candidateConf.setReducerClass(CandidateReduce.class);

        candidateConf.setInputFormat(TextInputFormat.class);
        candidateConf.setOutputFormat(TextOutputFormat.class);

        FileInputFormat.setInputPaths(candidateConf,
                new Path(count_path + String.valueOf(candidateLength - 1) + "/part-00000"));
        FileOutputFormat.setOutputPath(candidateConf,
                new Path(candidate_path + String.valueOf(candidateLength)));
        candidateConf.setInt("candidateLength", candidateLength);

        JobClient.runJob(candidateConf);

    } while (dfs.getFileStatus(new Path(candidate_path + String.valueOf(candidateLength) + "/part-00000"))
            .getLen() > 0);

    BufferedReader br;
    BufferedWriter bw = new BufferedWriter(
            new OutputStreamWriter(dfs.create(new Path(argv[1] + "/part-00000"))));
    String line;
    for (int i = 1; i < candidateLength; ++i) {
        br = new BufferedReader(
                new InputStreamReader(dfs.open(new Path(count_path + String.valueOf(i) + "/part-00000"))));
        while ((line = br.readLine()) != null) {
            bw.write(line + "\n");
        }
        br.close();
    }
    bw.close();
}