List of usage examples for org.apache.hadoop.util GenericOptionsParser getConfiguration
public Configuration getConfiguration()
From source file:com.alectenharmsel.hadoop.qa.LineCount.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parse = new GenericOptionsParser(new Configuration(), args); Configuration conf = parse.getConfiguration(); String[] remainingArgs = parse.getRemainingArgs(); if (remainingArgs.length != 2) { System.err.println("Usage: LineCount <input> <output>"); System.exit(-1);//from w ww.jav a2s . c om } Job job = Job.getInstance(conf, "LineCount"); job.setJarByClass(LineCount.class); job.setMapperClass(Map.class); job.setCombinerClass(Reduce.class); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); FileInputFormat.addInputPath(job, new Path(remainingArgs[0])); FileOutputFormat.setOutputPath(job, new Path(remainingArgs[1])); boolean success = job.waitForCompletion(true); int res = success ? 0 : 1; System.exit(res); }
From source file:com.alectenharmsel.research.FileCombine.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parse = new GenericOptionsParser(new Configuration(), args); Configuration conf = parse.getConfiguration(); int res = ToolRunner.run(conf, new FileCombine(), parse.getRemainingArgs()); System.exit(res);// w ww. ja v a2 s .c o m }
From source file:com.alectenharmsel.research.hadoop.CodeTokenizer.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parse = new GenericOptionsParser(new Configuration(), args); Configuration conf = parse.getConfiguration(); int res = ToolRunner.run(conf, new CodeTokenizer(), parse.getRemainingArgs()); System.exit(res);//ww w .j a v a2 s .com }
From source file:com.alectenharmsel.research.hadoop.LcCounters.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parse = new GenericOptionsParser(new Configuration(), args); Configuration conf = parse.getConfiguration(); String[] remainingArgs = parse.getRemainingArgs(); if (remainingArgs.length != 2) { System.err.println("Usage: LineCount <input> <output>"); System.exit(-1);// w w w . j a v a2 s . com } Job job = Job.getInstance(conf, "LineCount"); job.setJarByClass(LineCount.class); job.setMapperClass(Map.class); job.setCombinerClass(Reduce.class); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); FileInputFormat.addInputPath(job, new Path(remainingArgs[0])); FileOutputFormat.setOutputPath(job, new Path(remainingArgs[1])); boolean success = job.waitForCompletion(true); //Get the counter here and print it Counters counters = job.getCounters(); long total = counters.findCounter(LcCounters.NUM_LINES).getValue(); System.out.println(Long.toString(total)); int res = success ? 0 : 1; System.exit(res); }
From source file:com.alectenharmsel.research.hadoop.MoabLicenseInfo.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parser = new GenericOptionsParser(new Configuration(), args); Configuration conf = parser.getConfiguration(); conf.set("mapreduce.output.textoutputformat.separator", ","); String[] remainingArgs = parser.getRemainingArgs(); if (remainingArgs.length != 2) { System.err.println("Usage: LineCount <input> <output>"); System.exit(-1);/*from w w w .ja va 2 s .c o m*/ } Job job = Job.getInstance(conf, "MoabLicenseInfo"); job.setJarByClass(MoabLicenseInfo.class); job.setMapperClass(Map.class); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(remainingArgs[0])); FileOutputFormat.setOutputPath(job, new Path(remainingArgs[1])); int res = job.waitForCompletion(true) ? 0 : 1; System.exit(res); }
From source file:com.alectenharmsel.research.LcCounters.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parse = new GenericOptionsParser(new Configuration(), args); Configuration conf = parse.getConfiguration(); int res = ToolRunner.run(conf, new LineCount(), parse.getRemainingArgs()); System.exit(res);// w ww .j av a 2s .com }
From source file:com.alectenharmsel.research.MoabLicenses.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parser = new GenericOptionsParser(new Configuration(), args); Configuration conf = parser.getConfiguration(); conf.set("mapreduce.output.textoutputformat.separator", ","); int res = ToolRunner.run(conf, new MoabLicenses(), parser.getRemainingArgs()); System.exit(res);//from w ww. j ava 2 s . c om }
From source file:com.alectenharmsel.research.MoabLogSearch.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parse = new GenericOptionsParser(new Configuration(), args); Configuration conf = parse.getConfiguration(); int res = ToolRunner.run(conf, new MoabLogSearch(), parse.getRemainingArgs()); System.exit(res);/*from w ww .ja v a 2 s . com*/ }
From source file:com.alectenharmsel.research.SrcTok.java
License:Apache License
public static void main(String[] args) throws Exception { GenericOptionsParser parse = new GenericOptionsParser(new Configuration(), args); Configuration conf = parse.getConfiguration(); int res = ToolRunner.run(conf, new SrcTok(), parse.getRemainingArgs()); System.exit(res);/*from w ww . ja v a 2 s. c om*/ }
From source file:com.architecting.ch07.MapReduceIndexerTool.java
License:Apache License
private void addDistributedCacheFile(File file, Configuration conf) throws IOException { String HADOOP_TMP_FILES = "tmpfiles";// see Hadoop's GenericOptionsParser String tmpFiles = conf.get(HADOOP_TMP_FILES, ""); if (tmpFiles.length() > 0) { // already present? tmpFiles = tmpFiles + ","; }/* ww w. j a v a 2 s . c om*/ GenericOptionsParser parser = new GenericOptionsParser(new Configuration(conf), new String[] { "--files", file.getCanonicalPath() }); String additionalTmpFiles = parser.getConfiguration().get(HADOOP_TMP_FILES); assert additionalTmpFiles != null; assert additionalTmpFiles.length() > 0; tmpFiles += additionalTmpFiles; conf.set(HADOOP_TMP_FILES, tmpFiles); }