List of usage examples for org.apache.hadoop.mapred JobConf setInputFormat
public void setInputFormat(Class<? extends InputFormat> theClass)
From source file:edu.brown.cs.mapreduce.benchmarks.Grep.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); // ------------------------------------------- // Search//from w w w.java 2s . c o m // ------------------------------------------- JobConf p1_job = base.getJobConf(); // // We have to grab all of the dirs in the directory that was passed in // // List<Path> inputs = base.getInputPaths(); // if (false && inputs.size() == 1) { // Path input_path = inputs.get(0); // FileSystem fs = null; // try { // fs = FileSystem.get(this.getConf()); // if (fs.getFileStatus(input_path).isDir()) { // //p1_job.set("mapred.input.dir", ""); // FileStatus paths[] = fs.listStatus(input_path); // for (FileStatus p : paths) { // FileInputFormat.addInputPath(p1_job, p.getPath()); // } // } // } catch (Exception ex) { // ex.printStackTrace(); // System.exit(-1); // } // } if (base.getSequenceFile()) p1_job.setInputFormat(SequenceFileInputFormat.class); p1_job.setMapperClass(Grep.RegexMapper.class); //p1_job.setCombinerClass(LongSumReducer.class); //p1_job.setReducerClass(LongSumReducer.class); //p1_job.setOutputFormat(SequenceFileOutputFormat.class); p1_job.setOutputKeyClass(Text.class); p1_job.setOutputValueClass(Text.class); base.runJob(p1_job); if (base.getCombine()) base.runCombine(); return 0; }
From source file:edu.brown.cs.mapreduce.benchmarks.Sort.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); JobConf p1_job = base.getJobConf(); p1_job.setMapperClass(IdentityMapper.class); p1_job.setReducerClass(IdentityReducer.class); p1_job.setInputFormat(KeyValueTextInputFormat.class); p1_job.setOutputFormat(SequenceFileOutputFormat.class); //p1_job.setOutputFormat(NullOutputFormat.class); p1_job.setOutputKeyClass(Text.class); p1_job.setOutputValueClass(Text.class); base.runJob(p1_job);//from w w w . j av a 2s . co m if (base.getCombine()) base.runCombine(); return 0; }
From source file:edu.brown.cs.mapreduce.demo.OrderSum.java
License:Open Source License
/** * The main driver for word count map/reduce program. * Invoke this method to submit the map/reduce job. * @throws IOException When there is communication problems with the job tracker. *///from w w w . j a v a 2 s . c om public int run(String[] args) throws Exception { JobConf conf = new JobConf(this.getConf(), OrderSum.class); conf.setJobName(OrderSum.class.getSimpleName()); // Input File Format conf.setInputFormat(KeyValueTextInputFormat.class); // Output Key/Value Types conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(DoubleWritable.class); // Map/Reduce Classes conf.setMapperClass(OrderSum.OrderSumMapper.class); conf.setReducerClass(OrderSum.OrderSumReducer.class); // Input/Output Paths (HDFS) FileInputFormat.setInputPaths(conf, "/demo/input/"); FileOutputFormat.setOutputPath(conf, new Path("/demo/output/")); /***** Additional Features *****/ // Compression //conf.setCompressMapOutput(true); // Combine //conf.setCombinerClass(OrderSum.OrderSumReducer.class); // Create a single output file conf.setNumReduceTasks(1); // Pass search date on command-line /* uncomment configure! if (args.length == 1) { conf.set("edu.brown.cs.pavlo.search_date", args[0]); }*/ // Bombs away! JobClient.runJob(conf); return 0; }
From source file:edu.iit.marketbasket.MarketBasket.java
public static void main(String[] args) throws IOException { JobConf conf = new JobConf(MarketBasket.class); conf.setJobName("MarketBasket"); conf.setMapperClass(Map.class); conf.setReducerClass(Reduce.class); //conf.setJarByClass(MarketBasket.class); conf.setInputFormat(TextInputFormat.class); conf.setOutputFormat(TextOutputFormat.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(IntWritable.class); FileInputFormat.setInputPaths(conf, new Path(args[0])); FileOutputFormat.setOutputPath(conf, new Path(args[1])); JobClient.runJob(conf).waitForCompletion(); }
From source file:edu.ncku.ikdd.ArtistAnalysis.java
public static void main(String[] argv) throws Exception { JobConf conf = new JobConf(ArtistAnalysis.class); conf.setJobName("artistanalysis"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setMapperClass(Map.class); conf.setCombinerClass(Combine.class); conf.setReducerClass(Reduce.class); conf.setInputFormat(TextInputFormat.class); conf.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(conf, new Path(argv[0])); FileOutputFormat.setOutputPath(conf, new Path(argv[1])); JobClient.runJob(conf);//from w w w . j a va2 s .c o m }
From source file:edu.ncku.ikdd.DataMining.java
public static void main(String[] argv) throws Exception { int candidateLength = 1; FileSystem dfs = FileSystem.get(new Configuration()); do {//w ww . j av a 2 s . c o m JobConf countConf = new JobConf(DataMining.class); countConf.setOutputKeyClass(Text.class); countConf.setOutputValueClass(IntWritable.class); countConf.setMapperClass(CountMap.class); countConf.setCombinerClass(CountCombine.class); countConf.setReducerClass(CountReduce.class); countConf.setInputFormat(TextInputFormat.class); countConf.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(countConf, new Path(argv[0])); FileOutputFormat.setOutputPath(countConf, new Path(count_path + String.valueOf(candidateLength))); countConf.setInt("minSupport", Integer.valueOf(argv[2])); countConf.setInt("candidateLength", candidateLength); JobClient.runJob(countConf); ++candidateLength; JobConf candidateConf = new JobConf(DataMining.class); candidateConf.setOutputKeyClass(Text.class); candidateConf.setOutputValueClass(Text.class); candidateConf.setMapperClass(CandidateMap.class); candidateConf.setReducerClass(CandidateReduce.class); candidateConf.setInputFormat(TextInputFormat.class); candidateConf.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(candidateConf, new Path(count_path + String.valueOf(candidateLength - 1) + "/part-00000")); FileOutputFormat.setOutputPath(candidateConf, new Path(candidate_path + String.valueOf(candidateLength))); candidateConf.setInt("candidateLength", candidateLength); JobClient.runJob(candidateConf); } while (dfs.getFileStatus(new Path(candidate_path + String.valueOf(candidateLength) + "/part-00000")) .getLen() > 0); BufferedReader br; BufferedWriter bw = new BufferedWriter( new OutputStreamWriter(dfs.create(new Path(argv[1] + "/part-00000")))); String line; for (int i = 1; i < candidateLength; ++i) { br = new BufferedReader( new InputStreamReader(dfs.open(new Path(count_path + String.valueOf(i) + "/part-00000")))); while ((line = br.readLine()) != null) { bw.write(line + "\n"); } br.close(); } bw.close(); }
From source file:edu.ncku.ikdd.TempRecord.java
public static void main(String[] argv) throws Exception { JobConf conf = new JobConf(TempRecord.class); conf.setJobName("temprecord"); conf.setOutputKeyClass(IntWritable.class); conf.setOutputValueClass(IntWritable.class); conf.setMapperClass(Map.class); conf.setCombinerClass(Reduce.class); conf.setReducerClass(Reduce.class); conf.setInputFormat(TextInputFormat.class); conf.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(conf, new Path(argv[0])); FileOutputFormat.setOutputPath(conf, new Path(argv[1])); JobClient.runJob(conf);/*ww w . ja v a 2 s . c o m*/ }
From source file:edu.ncku.ikdd.TitleParser.java
public static void main(String[] argv) throws Exception { JobConf conf = new JobConf(TitleParser.class); conf.setJobName("titleparser"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.setMapperClass(Map.class); conf.setCombinerClass(Reduce.class); conf.setReducerClass(Reduce.class); conf.setInputFormat(TextInputFormat.class); conf.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(conf, new Path(argv[0])); FileOutputFormat.setOutputPath(conf, new Path(argv[1])); JobClient.runJob(conf);//from ww w . ja va 2s . c o m }
From source file:edu.ncku.ikdd.WordCount.java
public static void main(String[] argv) throws Exception { JobConf conf = new JobConf(WordCount.class); conf.setJobName("wordcount"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(IntWritable.class); conf.setMapperClass(Map.class); conf.setCombinerClass(Reduce.class); conf.setReducerClass(Reduce.class); conf.setInputFormat(TextInputFormat.class); conf.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(conf, new Path(argv[0])); FileOutputFormat.setOutputPath(conf, new Path(argv[1])); JobClient.runJob(conf);/*from ww w. j a v a 2 s .co m*/ }
From source file:edu.ohsu.sonmezsysbio.cloudbreak.command.CommandNovoalignSingleEnds.java
public void runHadoopJob(Configuration configuration) throws IOException, URISyntaxException { JobConf conf = new JobConf(configuration); conf.setJobName("Single End Alignment"); conf.setJarByClass(Cloudbreak.class); FileInputFormat.addInputPath(conf, new Path(hdfsDataDir)); Path outputDir = new Path(hdfsAlignmentsDir); FileSystem.get(conf).delete(outputDir); FileOutputFormat.setOutputPath(conf, outputDir); addDistributedCacheFile(conf, reference, "novoalign.reference"); addDistributedCacheFile(conf, pathToNovoalign, "novoalign.executable"); if (pathToNovoalignLicense != null) { addDistributedCacheFile(conf, pathToNovoalignLicense, "novoalign.license"); }/*ww w. j av a 2 s . c o m*/ DistributedCache.createSymlink(conf); conf.set("mapred.task.timeout", "3600000"); conf.set("novoalign.threshold", threshold); conf.set("novoalign.quality.format", qualityFormat); conf.setInputFormat(SequenceFileInputFormat.class); conf.setMapperClass(NovoalignSingleEndMapper.class); conf.setMapOutputKeyClass(Text.class); conf.setMapOutputValueClass(Text.class); conf.setCompressMapOutput(true); conf.setReducerClass(SingleEndAlignmentsToPairsReducer.class); conf.setOutputFormat(SequenceFileOutputFormat.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(Text.class); conf.set("mapred.output.compress", "true"); conf.set("mapred.output.compression", "org.apache.hadoop.io.compress.SnappyCodec"); JobClient.runJob(conf); }