List of usage examples for org.apache.commons.cli HelpFormatter setWidth
public void setWidth(int width)
From source file:edu.umd.shrawanraina.BuildInvertedIndexCompressed.java
/** * Runs this tool.//from www .ja v a 2s.c o m */ @SuppressWarnings({ "static-access" }) public int run(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT)); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT)); options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of reducers") .create(NUM_REDUCERS)); CommandLine cmdline; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); return -1; } if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) { System.out.println("args: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(120); formatter.printHelp(this.getClass().getName(), options); ToolRunner.printGenericCommandUsage(System.out); return -1; } String inputPath = cmdline.getOptionValue(INPUT); String outputPath = cmdline.getOptionValue(OUTPUT); int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ? Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS)) : 1; LOG.info("Tool name: " + BuildInvertedIndexCompressed.class.getSimpleName()); LOG.info(" - input path: " + inputPath); LOG.info(" - output path: " + outputPath); LOG.info(" - num reducers: " + reduceTasks); Job job = Job.getInstance(getConf()); job.setJobName(BuildInvertedIndexCompressed.class.getSimpleName()); job.setJarByClass(BuildInvertedIndexCompressed.class); job.setNumReduceTasks(reduceTasks); FileInputFormat.setInputPaths(job, new Path(inputPath)); FileOutputFormat.setOutputPath(job, new Path(outputPath)); job.setMapOutputKeyClass(PairOfStringInt.class); job.setMapOutputValueClass(VIntWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(BytesWritable.class); job.setOutputFormatClass(MapFileOutputFormat.class); job.setMapperClass(MyMapper.class); job.setReducerClass(MyReducer.class); // Delete the output directory if it exists already. Path outputDir = new Path(outputPath); FileSystem.get(getConf()).delete(outputDir, true); /* // Delete side data directory if it exists already. Path sdDir = new Path("sidedata"); FileSystem.get(getConf()).delete(sdDir, true); */ long startTime = System.currentTimeMillis(); job.waitForCompletion(true); System.out.println("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds"); return 0; }
From source file:StripesPMI_nocombiner.java
/** * Runs this tool./* ww w . j a va 2 s . c o m*/ */ @SuppressWarnings({ "static-access" }) public int run(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT)); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT)); options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of reducers") .create(NUM_REDUCERS)); CommandLine cmdline; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); return -1; } if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) { System.out.println("args: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(120); formatter.printHelp(this.getClass().getName(), options); ToolRunner.printGenericCommandUsage(System.out); return -1; } String inputPath = cmdline.getOptionValue(INPUT); String outputPath = cmdline.getOptionValue(OUTPUT) + "_TMP";// cmdline.getOptionValue(OUTPUT); int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ? Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS)) : 1; LOG.info("Tool: " + StripesPMI_nocombiner.class.getSimpleName()); LOG.info(" - input path: " + inputPath); LOG.info(" - output path: " + outputPath); LOG.info(" - number of reducers: " + reduceTasks); Job job_first = Job.getInstance(getConf()); job_first.setJobName(StripesPMI_nocombiner.class.getSimpleName()); job_first.setJarByClass(StripesPMI_nocombiner.class); // Delete the output directory if it exists already. Path outputDir = new Path(outputPath); FileSystem.get(getConf()).delete(outputDir, true); job_first.setNumReduceTasks(reduceTasks); FileInputFormat.setInputPaths(job_first, new Path(inputPath)); FileOutputFormat.setOutputPath(job_first, new Path(outputPath)); job_first.setMapOutputKeyClass(Text.class); job_first.setMapOutputValueClass(String2IntOpenHashMapWritable.class); job_first.setOutputKeyClass(PairOfStrings.class);// Text.class);// PairOfStrings.class); job_first.setOutputValueClass(DoubleWritable.class); job_first.setOutputFormatClass(TextOutputFormat.class);// changed job_first.setMapperClass(MyMapper_first.class); // job_first.setCombinerClass(MyCombiner.class); job_first.setReducerClass(MyReducer_first.class); long startTime = System.currentTimeMillis(); job_first.waitForCompletion(true); // ////////////////START.: run the second MR job to just aggregate result//////////////// inputPath = outputPath;// cmdline.getOptionValue(INPUT); outputPath = cmdline.getOptionValue(OUTPUT); Job job_second = Job.getInstance(getConf()); job_second.setJobName(StripesPMI_nocombiner.class.getSimpleName()); job_second.setJarByClass(StripesPMI_nocombiner.class); // Delete the output directory if it exists already. outputDir = new Path(outputPath); FileSystem.get(getConf()).delete(outputDir, true); job_second.setNumReduceTasks(reduceTasks); FileInputFormat.setInputPaths(job_second, new Path(inputPath)); FileOutputFormat.setOutputPath(job_second, new Path(outputPath)); job_second.setMapOutputKeyClass(Text.class); job_second.setMapOutputValueClass(DoubleWritable.class); job_second.setOutputKeyClass(Text.class);// PairOfStrings.class); job_second.setOutputValueClass(DoubleWritable.class); // job_second.setOutputFormatClass(TextOutputFormat.class);// changed job_second.setMapperClass(MyMapper_second.class); // job_second.setCombinerClass(MyCombiner.class); job_second.setReducerClass(MyReducer_second.class); job_second.waitForCompletion(true); // END//////////// System.out.println("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds"); return 0; }
From source file:edu.umd.gorden2.StripesPMI.java
/** * Runs this tool.// www . j a v a2 s. co m */ @SuppressWarnings({ "static-access" }) public int run(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT)); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT)); options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of reducers") .create(NUM_REDUCERS)); CommandLine cmdline; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); return -1; } if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) { System.out.println("args: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(120); formatter.printHelp(this.getClass().getName(), options); ToolRunner.printGenericCommandUsage(System.out); return -1; } String inputPath = cmdline.getOptionValue(INPUT); String outputPath = cmdline.getOptionValue(OUTPUT); int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ? Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS)) : 1; LOG.info("Tool: " + StripesPMI.class.getSimpleName()); LOG.info(" - input path: " + inputPath); LOG.info(" - output path: " + outputPath); LOG.info(" - number of reducers: " + reduceTasks); Job job = Job.getInstance(getConf()); job.setJobName(StripesPMI.class.getSimpleName()); job.setJarByClass(StripesPMI.class); // Delete the output directory if it exists already. Path outputDir = new Path(outputPath); FileSystem.get(getConf()).delete(outputDir, true); job.setNumReduceTasks(reduceTasks); FileInputFormat.setInputPaths(job, new Path(inputPath)); FileOutputFormat.setOutputPath(job, new Path(outputPath)); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(HMapStFW.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(DoubleWritable.class); job.setMapperClass(MyMapper.class); job.setCombinerClass(MyCombiner.class); job.setReducerClass(MyReducer.class); job.addCacheFile(new URI("wc/part-r-00000")); // // wordcount job Job job2 = Job.getInstance(getConf()); job2.setJobName("Wordcount"); job2.setJarByClass(PairsPMI.class); String outputPath2 = "wc"; // Delete the output directory if it exists already. Path outputDir2 = new Path(outputPath2); FileSystem.get(getConf()).delete(outputDir2, true); job2.setNumReduceTasks(1); FileInputFormat.setInputPaths(job2, new Path(inputPath)); FileOutputFormat.setOutputPath(job2, new Path(outputPath2)); job2.setMapOutputKeyClass(Text.class); job2.setMapOutputValueClass(IntWritable.class); job2.setOutputKeyClass(Text.class); job2.setOutputValueClass(IntWritable.class); job2.setMapperClass(MyMapper2.class); job2.setCombinerClass(MyReducer2.class); job2.setReducerClass(MyReducer2.class); long startTime = System.currentTimeMillis(); job2.waitForCompletion(true); job.waitForCompletion(true); System.out.println("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds"); return 0; }
From source file:es.ua.alex952.main.MainBatch.java
/** * Main constructor that parses all arguments from the command line * /*from w w w . j a va 2s . co m*/ * @param args Command line arguments */ public MainBatch(String[] args) { //Operation creation for usage print Option create = OptionBuilder.withLongOpt("create").withDescription("switch for creating a job") .create("c"); Option daemon = OptionBuilder.withArgName("id").withLongOpt("daemon") .withDescription("daemon mode for monitorizing the job after its creation").hasOptionalArg() .create("d"); Option configfile = OptionBuilder.withArgName("config.properties").withLongOpt("configfile") .withDescription("the properties config file that has all the program specific configurations") .hasArg().create("cf"); Option parametersfile = OptionBuilder.withArgName("parameters.properties").withLongOpt("parametersfile") .withDescription( "properties paramters file that has all the job specific parameters for its creation") .hasArg().create("pf"); Option sourcelanguage = OptionBuilder.withArgName("sl.txt").withLongOpt("sourcelanguage") .withDescription("text file containing all the sentences to be translated").hasArg().create("sl"); Option referencetranslations = OptionBuilder.withArgName("rt.txt").withLongOpt("referencetranslations") .withDescription("text file with a translation of reference for each source language sentence") .hasArg().create("rt"); Option gold = OptionBuilder.withArgName("gold.txt").withLongOpt("gold").withDescription( "text file with the gold standards given for the job. It has a three lines format that is composed by one line for the source language sentence, one for the reference translation, and the last one for the correct translation") .hasArg().create("g"); Option daemonfrecuency = OptionBuilder.withArgName("daemon frecuency").withLongOpt("daemonfrecuency") .withDescription("daemon check frecuency").hasArg().create("df"); Option help = OptionBuilder.withLongOpt("help").withDescription("shows this help message").create("h"); options.addOption(create); options.addOption(daemon); options.addOption(daemonfrecuency); options.addOption(configfile); options.addOption(parametersfile); options.addOption(sourcelanguage); options.addOption(referencetranslations); options.addOption(gold); options.addOption(help); //Option parsing CommandLineParser clp = new BasicParser(); try { CommandLine cl = clp.parse(options, args); if (cl.hasOption("help") || cl.getOptions().length == 0) { HelpFormatter hf = new HelpFormatter(); hf.setWidth(100); hf.printHelp("CrowdFlowerTasks", options); op = Operation.QUIT; return; } if (cl.hasOption("daemon") && !cl.hasOption("c")) { if (cl.getOptionValue("daemon") == null) { logger.error("The daemon option must have a job id if it isn't along with create option"); op = Operation.QUIT; return; } else if (!cl.hasOption("configfile")) { logger.error("The config file is mandatory"); op = Operation.QUIT; return; } try { Integer.parseInt(cl.getOptionValue("daemon")); this.id = cl.getOptionValue("daemon"); this.configFile = cl.getOptionValue("configfile"); this.op = Operation.DAEMON; if (cl.hasOption("daemonfrecuency")) { try { Long l = Long.parseLong(id); this.frecuency = l; } catch (NumberFormatException e) { this.logger.info("The frecuency is not a number. Setting to default: 10 sec"); } } else { this.logger.info("Daemon frecuency not set. Setting to default: 10 sec"); } } catch (NumberFormatException e) { this.logger.error("The id following daemon option must be an integer"); this.op = Operation.QUIT; return; } } else { if (!cl.hasOption("gold") || !cl.hasOption("configfile") || !cl.hasOption("parametersfile") || !cl.hasOption("referencetranslations") || !cl.hasOption("sourcelanguage")) { logger.error( "The files gold, tr, lo, config.properties and parameters.properties are mandatory for creating jobs"); this.op = Operation.QUIT; return; } else { if (cl.hasOption("daemon")) this.daemon = true; else { if (cl.hasOption("daemonfrecuency")) this.logger.info( "Daemon frecuency parameter found, ignoring it as there's not a daemon option"); } this.configFile = cl.getOptionValue("configfile"); this.parametersFile = cl.getOptionValue("parametersfile"); this.pathGold = cl.getOptionValue("gold"); this.pathLO = cl.getOptionValue("sourcelanguage"); this.pathTR = cl.getOptionValue("referencetranslations"); this.op = Operation.CREATE; } } } catch (ParseException ex) { logger.error("Failed argument parsing", ex); } }
From source file:edu.umd.shrawanraina.UserLocation.java
/** * Runs this tool.//from w w w .j a v a 2 s . c o m */ @SuppressWarnings({ "static-access" }) public int run(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT)); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT)); options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of reducers") .create(NUM_REDUCERS)); options.addOption(new Option(COMBINER, "use combiner")); options.addOption(new Option(INMAPPER_COMBINER, "user in-mapper combiner")); CommandLine cmdline; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); return -1; } if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) { System.out.println("args: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(120); formatter.printHelp(this.getClass().getName(), options); ToolRunner.printGenericCommandUsage(System.out); return -1; } String inputPath = cmdline.getOptionValue(INPUT); String outputPath = cmdline.getOptionValue(OUTPUT); int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ? Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS)) : 1; boolean useCombiner = cmdline.hasOption(COMBINER); boolean useInmapCombiner = cmdline.hasOption(INMAPPER_COMBINER); LOG.info("Tool: " + UserLocation.class.getSimpleName()); LOG.info(" - input path: " + inputPath); LOG.info(" - output path: " + outputPath); LOG.info(" - number of reducers: " + reduceTasks); LOG.info(" - use combiner: " + useCombiner); LOG.info(" - use in-mapper combiner: " + useInmapCombiner); runJob1(inputPath, outputPath, reduceTasks, useCombiner, useInmapCombiner); runJob2(outputPath, useCombiner, useInmapCombiner); return 0; }
From source file:edu.umd.gorden2.PairsPMI.java
/** * Runs this tool.//from ww w . j av a 2 s. co m */ @SuppressWarnings({ "static-access" }) public int run(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT)); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT)); options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of reducers") .create(NUM_REDUCERS)); CommandLine cmdline; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); return -1; } if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) { System.out.println("args: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(120); formatter.printHelp(this.getClass().getName(), options); ToolRunner.printGenericCommandUsage(System.out); return -1; } String inputPath = cmdline.getOptionValue(INPUT); String outputPath = cmdline.getOptionValue(OUTPUT); int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ? Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS)) : 1; LOG.info("Tool: " + PairsPMI.class.getSimpleName()); LOG.info(" - input path: " + inputPath); LOG.info(" - output path: " + outputPath); LOG.info(" - number of reducers: " + reduceTasks); // // Pair job Job job = Job.getInstance(getConf()); job.setJobName(PairsPMI.class.getSimpleName()); job.setJarByClass(PairsPMI.class); // Delete the output directory if it exists already. Path outputDir = new Path(outputPath); FileSystem.get(getConf()).delete(outputDir, true); job.setNumReduceTasks(reduceTasks); FileInputFormat.setInputPaths(job, new Path(inputPath)); FileOutputFormat.setOutputPath(job, new Path(outputPath)); job.setMapOutputKeyClass(PairOfStrings.class); job.setMapOutputValueClass(DoubleWritable.class); job.setOutputKeyClass(PairOfStrings.class); job.setOutputValueClass(DoubleWritable.class); job.setMapperClass(MyMapper.class); job.setCombinerClass(MyCombiner.class); job.setReducerClass(MyReducer.class); //job.setPartitionerClass(MyPartitioner.class); job.addCacheFile(new URI("wc/part-r-00000")); long startTime = System.currentTimeMillis(); // // wordcount job Job job2 = Job.getInstance(getConf()); job2.setJobName("Wordcount"); job2.setJarByClass(PairsPMI.class); String outputPath2 = "wc"; // Delete the output directory if it exists already. Path outputDir2 = new Path(outputPath2); FileSystem.get(getConf()).delete(outputDir2, true); job2.setNumReduceTasks(1); FileInputFormat.setInputPaths(job2, new Path(inputPath)); FileOutputFormat.setOutputPath(job2, new Path(outputPath2)); job2.setMapOutputKeyClass(Text.class); job2.setMapOutputValueClass(IntWritable.class); job2.setOutputKeyClass(Text.class); job2.setOutputValueClass(IntWritable.class); job2.setMapperClass(MyMapper2.class); job2.setCombinerClass(MyReducer2.class); job2.setReducerClass(MyReducer2.class); // add side file to job1 job.addCacheFile(new URI("wc/part-r-00000")); job2.waitForCompletion(true); job.waitForCompletion(true); System.out.println("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds"); return 0; }
From source file:gr.evoltrio.conf.CliParametersParser.java
/** * TODO correct this to smt more generic *///from ww w. java2 s.c o m private void parseParameters() { try { // validate that block-size has been set if (line.hasOption("help")) { // print the value of block-size System.out.println(line.getOptionValue("help")); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(120); formatter.printHelp("java -jar EvolTrio.jar [ OPTIONS ] FILE", options); } } catch (Exception e) { // TODO: handle exception } }
From source file:com.linkedin.helix.tools.ClusterSetup.java
public static void printUsage(Options cliOptions) { HelpFormatter helpFormatter = new HelpFormatter(); helpFormatter.setWidth(1000); helpFormatter.printHelp("java " + ClusterSetup.class.getName(), cliOptions); }
From source file:PairsPMI_M.java
/** * Runs this tool./*from ww w . java 2s . c om*/ */ @SuppressWarnings({ "static-access" }) public int run(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT)); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT)); options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of reducers") .create(NUM_REDUCERS)); CommandLine cmdline; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); return -1; } if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) { System.out.println("args: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(120); formatter.printHelp(this.getClass().getName(), options); ToolRunner.printGenericCommandUsage(System.out); return -1; } // First MapReduce Job String inputPath = cmdline.getOptionValue(INPUT); String outputPath = cmdline.getOptionValue(OUTPUT); int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ? Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS)) : 1; LOG.info("Tool name: " + PairsPMI_M.class.getSimpleName()); LOG.info(" - input path: " + inputPath); LOG.info(" - output path: " + outputPath); LOG.info(" - tmp path: " + outputPath + "/tmp"); LOG.info(" - num reducers: " + reduceTasks); Job job = Job.getInstance(getConf()); job.setJobName(PairsPMI_M.class.getSimpleName()); job.setJarByClass(PairsPMI_M.class); // Delete the tmp directory if it exists already Path tmpDir = new Path("tmp_wj"); FileSystem.get(getConf()).delete(tmpDir, true); job.setNumReduceTasks(reduceTasks); FileInputFormat.setInputPaths(job, new Path(inputPath)); FileOutputFormat.setOutputPath(job, new Path("tmp_wj")); job.setMapOutputKeyClass(PairOfStrings.class); job.setMapOutputValueClass(FloatWritable.class); job.setOutputKeyClass(PairOfStrings.class); job.setOutputValueClass(FloatWritable.class); job.setOutputFormatClass(SequenceFileOutputFormat.class); // job.setOutputFormatClass(TextOutputFormat.class); job.setMapperClass(MyMapper.class); job.setCombinerClass(MyCombiner.class); job.setReducerClass(MyReducer.class); job.setPartitionerClass(MyPartitioner.class); long startTime = System.currentTimeMillis(); job.waitForCompletion(true); double time1 = (System.currentTimeMillis() - startTime) / 1000.0; System.out.println("Job Finished in " + time1 + " seconds"); numRecords = job.getCounters().findCounter("org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS") .getValue(); /* * Second MapReduce Job */ LOG.info("Tool name: " + PairsPMI_M.class.getSimpleName()); LOG.info("second stage of MapReduce"); LOG.info(" - input from tmp path: " + outputPath + "/tmp_wj"); LOG.info(" - output path: " + outputPath); LOG.info(" - num reducers: " + reduceTasks); // set the global variable Configuration conf = getConf(); conf.setLong("numRec", numRecords); job = Job.getInstance(getConf()); job.setJobName(PairsPMI_M.class.getSimpleName()); job.setJarByClass(PairsPMI_M.class); // Delete the output directory if it exists already Path outputDir = new Path(outputPath); FileSystem.get(getConf()).delete(outputDir, true); job.setNumReduceTasks(reduceTasks); FileInputFormat.setInputPaths(job, new Path("tmp_wj/part*")); FileOutputFormat.setOutputPath(job, new Path(outputPath)); job.setMapOutputKeyClass(PairOfStrings.class); job.setMapOutputValueClass(FloatWritable.class); // job.setOutputKeyClass(PairOfStrings.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(FloatWritable.class); job.setInputFormatClass(SequenceFileInputFormat.class); // job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); job.setMapperClass(MyMapperSecond.class); // job.setCombinerClass(MyCombiner.class); job.setReducerClass(MyReducerSecond.class); job.setPartitionerClass(MyPartitioner.class); startTime = System.currentTimeMillis(); job.waitForCompletion(true); double time2 = (System.currentTimeMillis() - startTime) / 1000.0; System.out.println("Second job finished in " + time2 + " seconds"); System.out.println("Total time: " + (time1 + time2) + " seconds"); return 0; }
From source file:edu.umd.cloud9.example.clustering.IterateGMM.java
/** * Runs this tool.//from w w w .j a va 2s . c o m */ @SuppressWarnings({ "static-access" }) public int run(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT)); options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT)); options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of reducers") .create(NUM_REDUCERS)); CommandLine cmdline; CommandLineParser parser = new GnuParser(); try { cmdline = parser.parse(options, args); } catch (ParseException exp) { System.err.println("Error parsing command line: " + exp.getMessage()); return -1; } if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) { System.out.println("args: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(120); formatter.printHelp(this.getClass().getName(), options); ToolRunner.printGenericCommandUsage(System.out); return -1; } String inputPath0 = cmdline.getOptionValue(INPUT); String outputPath = cmdline.getOptionValue(OUTPUT); int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ? Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS)) : 1; LOG.info("Tool: " + IterateGMM.class.getSimpleName()); LOG.info(" - input path: " + inputPath0); String inputPath = inputPath0 + "/points"; LOG.info(" - output path: " + outputPath); LOG.info(" - number of reducers: " + reduceTasks); int iterations = 0; Configuration conf = getConf(); while (iterations == 0 || !FinishIteration(inputPath0, iterations, conf)) { LOG.info("** iterations: " + iterations); try { Job job = Job.getInstance(conf); job.setJobName(IterateGMM.class.getSimpleName()); job.setJarByClass(IterateGMM.class); // set the path of the information of k clusters in this iteration job.getConfiguration().set("clusterpath", inputPath0 + "/cluster" + iterations); job.setNumReduceTasks(reduceTasks); FileInputFormat.setInputPaths(job, new Path(inputPath)); FileOutputFormat.setOutputPath(job, new Path(outputPath)); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(PairOfStrings.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setMapperClass(MyMapper.class); job.setReducerClass(MyReducer.class); job.setPartitionerClass(MyPartitioner.class); // Delete the output directory if it exists already. Path outputDir = new Path(outputPath); FileSystem.get(getConf()).delete(outputDir, true); long startTime = System.currentTimeMillis(); job.waitForCompletion(true); LOG.info("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds"); reNameFile(inputPath0, outputPath, iterations + 1, conf, reduceTasks); } catch (Exception exp) { exp.printStackTrace(); } iterations++; } return 0; }