Example usage for org.apache.commons.cli OptionBuilder withArgName

List of usage examples for org.apache.commons.cli OptionBuilder withArgName

Introduction

In this page you can find the example usage for org.apache.commons.cli OptionBuilder withArgName.

Prototype

public static OptionBuilder withArgName(String name) 

Source Link

Document

The next Option created will have the specified argument value name.

Usage

From source file:edu.umd.gorden2.BuildPersonalizedPageRankRecords.java

/**
 * Runs this tool./*  ww  w  .  j  a  v  a 2s .c  o  m*/
 */
@SuppressWarnings({ "static-access" })
public int run(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT));
    options.addOption(
            OptionBuilder.withArgName("num").hasArg().withDescription("number of nodes").create(NUM_NODES));
    options.addOption(
            OptionBuilder.withArgName("sources").hasArg().withDescription("sources").create("sources"));

    CommandLine cmdline;
    CommandLineParser parser = new GnuParser();

    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        return -1;
    }

    if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT) || !cmdline.hasOption(NUM_NODES)) {
        System.out.println("args: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp(this.getClass().getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    String inputPath = cmdline.getOptionValue(INPUT);
    String outputPath = cmdline.getOptionValue(OUTPUT);
    int n = Integer.parseInt(cmdline.getOptionValue(NUM_NODES));
    String m = cmdline.getOptionValue("sources");

    LOG.info("Tool name: " + BuildPersonalizedPageRankRecords.class.getSimpleName());
    LOG.info(" - inputDir: " + inputPath);
    LOG.info(" - outputDir: " + outputPath);
    LOG.info(" - numNodes: " + n);
    LOG.info(" - sources: " + m);

    Configuration conf = getConf();
    conf.setInt(NODE_CNT_FIELD, n);
    conf.setStrings("sources", m);
    conf.setInt("mapred.min.split.size", 1024 * 1024 * 1024);

    Job job = Job.getInstance(conf);
    job.setJobName(BuildPersonalizedPageRankRecords.class.getSimpleName() + ":" + inputPath);
    job.setJarByClass(BuildPersonalizedPageRankRecords.class);

    job.setNumReduceTasks(0);

    FileInputFormat.addInputPath(job, new Path(inputPath));
    FileOutputFormat.setOutputPath(job, new Path(outputPath));

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);

    job.setMapOutputKeyClass(IntWritable.class);
    job.setMapOutputValueClass(PageRankNode.class);

    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(PageRankNode.class);

    job.setMapperClass(MyMapper.class);

    // Delete the output directory if it exists already.
    FileSystem.get(conf).delete(new Path(outputPath), true);

    job.waitForCompletion(true);

    return 0;
}

From source file:edu.umd.cloud9.collection.trec.TrecForwardIndexBuilder.java

/**
 * Runs this tool./*from   w  w  w  .  j av  a  2  s  . c om*/
 */
@SuppressWarnings("static-access")
public int run(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("(required) collection path")
            .create(COLLECTION_OPTION));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("(required) output index path")
            .create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("(required) DocnoMapping data")
            .create(MAPPING_OPTION));

    CommandLine cmdline;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        return -1;
    }

    if (!cmdline.hasOption(COLLECTION_OPTION) || !cmdline.hasOption(INDEX_OPTION)
            || !cmdline.hasOption(MAPPING_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(this.getClass().getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION);
    String indexFile = cmdline.getOptionValue(INDEX_OPTION);
    String mappingFile = cmdline.getOptionValue(MAPPING_OPTION);
    String tmpDir = "tmp-" + TrecForwardIndexBuilder.class.getSimpleName() + "-" + random.nextInt(10000);

    Job job = new Job(getConf(), TrecForwardIndexBuilder.class.getSimpleName() + ":" + collectionPath);
    job.setJarByClass(TrecForwardIndexBuilder.class);
    FileSystem fs = FileSystem.get(getConf());

    LOG.info("Tool name: " + TrecForwardIndexBuilder.class.getSimpleName());
    LOG.info(" - collection path: " + collectionPath);
    LOG.info(" - index file: " + indexFile);
    LOG.info(" - DocnoMapping file: " + mappingFile);
    LOG.info(" - temp output directory: " + tmpDir);

    job.setNumReduceTasks(1);

    if (job.getConfiguration().get("mapred.job.tracker").equals("local")) {
        job.getConfiguration().set(DOCNO_MAPPING_FILE_PROPERTY, mappingFile);
    } else {
        DistributedCache.addCacheFile(new URI(mappingFile), job.getConfiguration());
    }

    FileInputFormat.setInputPaths(job, new Path(collectionPath));
    FileOutputFormat.setOutputPath(job, new Path(tmpDir));
    FileOutputFormat.setCompressOutput(job, false);

    job.setInputFormatClass(TrecDocumentInputFormat.class);
    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(MyMapper.class);

    // delete the output directory if it exists already
    FileSystem.get(getConf()).delete(new Path(tmpDir), true);

    job.waitForCompletion(true);
    Counters counters = job.getCounters();
    int numDocs = (int) counters.findCounter(Count.DOCS).getValue();

    String inputFile = tmpDir + "/" + "part-r-00000";

    LOG.info("Writing " + numDocs + " doc offseta to " + indexFile);
    LineReader reader = new LineReader(fs.open(new Path(inputFile)));

    FSDataOutputStream writer = fs.create(new Path(indexFile), true);

    writer.writeUTF(edu.umd.cloud9.collection.trec.TrecForwardIndex.class.getCanonicalName());
    writer.writeUTF(collectionPath);
    writer.writeInt(numDocs);

    int cnt = 0;
    Text line = new Text();
    while (reader.readLine(line) > 0) {
        String[] arr = line.toString().split("\\t");
        long offset = Long.parseLong(arr[1]);
        int len = Integer.parseInt(arr[2]);

        writer.writeLong(offset);
        writer.writeInt(len);

        cnt++;
        if (cnt % 100000 == 0) {
            LOG.info(cnt + " docs");
        }
    }
    reader.close();
    writer.close();
    LOG.info(cnt + " docs total. Done!");

    if (numDocs != cnt) {
        throw new RuntimeException("Unexpected number of documents in building forward index!");
    }

    fs.delete(new Path(tmpDir), true);

    return 0;
}

From source file:com.googlecode.jmxtrans.cli.CommonsCliArgumentParser.java

@Nonnull
private Options getOptions() {
    Options options = new Options();
    options.addOption("c", true, "Continue processing even if one of the JSON configuration file is invalid.");
    options.addOption("j", true, "Directory where json configuration is stored. Default is .");
    options.addOption("f", true, "A single json file to execute.");
    options.addOption("e", false, "Run endlessly. Default false.");
    options.addOption("q", true, "Path to quartz configuration file.");
    options.addOption("s", true, "Seconds between server job runs (not defined with cron). Default: 60");
    options.addOption(OptionBuilder.withArgName("a").withLongOpt("additionalJars").hasArgs()
            .withValueSeparator(',')
            .withDescription("Comma delimited list of additional jars to add to the class path").create("a"));
    options.addOption("h", false, "Help");
    return options;
}

From source file:com.cloudera.sqoop.tool.ExportTool.java

/**
 * Construct the set of options that control exports.
 * @return the RelatedOptions that can be used to parse the export
 * arguments.//from   w  w w  . j  a va 2 s .  c o  m
 */
protected RelatedOptions getExportOptions() {
    RelatedOptions exportOpts = new RelatedOptions("Export control arguments");

    exportOpts.addOption(
            OptionBuilder.withDescription("Use direct export fast path").withLongOpt(DIRECT_ARG).create());
    exportOpts.addOption(OptionBuilder.withArgName("table-name").hasArg().withDescription("Table to populate")
            .withLongOpt(TABLE_ARG).create());
    exportOpts.addOption(
            OptionBuilder.withArgName("n").hasArg().withDescription("Use 'n' map tasks to export in parallel")
                    .withLongOpt(NUM_MAPPERS_ARG).create(NUM_MAPPERS_SHORT_ARG));
    exportOpts.addOption(OptionBuilder.withArgName("dir").hasArg()
            .withDescription("HDFS source path for the export").withLongOpt(EXPORT_PATH_ARG).create());
    exportOpts.addOption(OptionBuilder.withArgName("key").hasArg()
            .withDescription("Update records by specified key column").withLongOpt(UPDATE_KEY_ARG).create());
    exportOpts.addOption(OptionBuilder.withArgName("table-name").hasArg()
            .withDescription("Intermediate staging table").withLongOpt(STAGING_TABLE_ARG).create());
    exportOpts.addOption(
            OptionBuilder.withDescription("Indicates that any data in " + "staging table can be deleted")
                    .withLongOpt(CLEAR_STAGING_TABLE_ARG).create());

    return exportOpts;
}

From source file:esg.common.shell.cmds.ESGFrealize.java

public void doInitOptions() {
    getOptions().addOption("a", "all", false, "realize all dataset files");
    Option dataset = OptionBuilder.withArgName("datasetdir").hasArg().withLongOpt("dataset")
            .withDescription("lists the files of a particular dataset").create("ds");
    getOptions().addOption(dataset);//from  w ww .ja  va  2s  .  com
    Option regex = OptionBuilder.withArgName("regex").hasArg()
            .withDescription("Select only dataset files that match regex").create("regex");
    getOptions().addOption(regex);
}

From source file:ComputeCooccurrenceMatrixStripes.java

/**
 * Runs this tool./*from   w w  w  .j  a v a 2s .c  o  m*/
 */
@SuppressWarnings({ "static-access" })
public int run(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("window size").create(WINDOW));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("number of reducers")
            .create(NUM_REDUCERS));

    CommandLine cmdline;
    CommandLineParser parser = new GnuParser();

    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        return -1;
    }

    if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) {
        System.out.println("args: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp(this.getClass().getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    String inputPath = cmdline.getOptionValue(INPUT);
    String outputPath = cmdline.getOptionValue(OUTPUT);
    int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ? Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS))
            : 1;
    int window = cmdline.hasOption(WINDOW) ? Integer.parseInt(cmdline.getOptionValue(WINDOW)) : 2;

    LOG.info("Tool: " + ComputeCooccurrenceMatrixStripes.class.getSimpleName());
    LOG.info(" - input path: " + inputPath);
    LOG.info(" - output path: " + outputPath);
    LOG.info(" - window: " + window);
    LOG.info(" - number of reducers: " + reduceTasks);

    Job job = Job.getInstance(getConf());
    job.setJobName(ComputeCooccurrenceMatrixStripes.class.getSimpleName());
    job.setJarByClass(ComputeCooccurrenceMatrixStripes.class);

    // Delete the output directory if it exists already.
    Path outputDir = new Path(outputPath);
    FileSystem.get(getConf()).delete(outputDir, true);

    job.getConfiguration().setInt("window", window);

    job.setNumReduceTasks(reduceTasks);

    FileInputFormat.setInputPaths(job, new Path(inputPath));
    FileOutputFormat.setOutputPath(job, new Path(outputPath));

    job.setMapOutputKeyClass(Text.class);
    job.setOutputValueClass(String2IntOpenHashMapWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(String2IntOpenHashMapWritable.class);

    job.setMapperClass(MyMapper.class);
    job.setCombinerClass(MyReducer.class);
    job.setReducerClass(MyReducer.class);

    long startTime = System.currentTimeMillis();
    job.waitForCompletion(true);
    System.out.println("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");

    return 0;
}

From source file:acromusashi.stream.example.ml.client.KMeansDrpcClient.java

/**
 * ???/*from w  w  w  .ja v a 2s.c  om*/
 * 
 * @return ??
 */
public static Options createOptions() {
    Options cliOptions = new Options();

    // KMeansTopology
    OptionBuilder.hasArg(true);
    OptionBuilder.withArgName("KMeansTopology Conf Path");
    OptionBuilder.withDescription("KMeansTopology Conf Path");
    OptionBuilder.isRequired(true);
    Option confPathOption = OptionBuilder.create("c");

    // LOF
    OptionBuilder.hasArg(true);
    OptionBuilder.withArgName("KMeans Data");
    OptionBuilder.withDescription("KMeans Data");
    OptionBuilder.isRequired(true);
    Option dataOption = OptionBuilder.create("d");

    // 
    OptionBuilder.withDescription("show help");
    Option helpOption = OptionBuilder.create("h");

    cliOptions.addOption(confPathOption);
    cliOptions.addOption(dataOption);
    cliOptions.addOption(helpOption);
    return cliOptions;
}

From source file:ivory.app.TrecForwardIndexBuilder.java

/**
 * Runs this tool./*ww  w. j av a2 s  . co m*/
 */
@SuppressWarnings("static-access")
public int run(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("(required) collection path")
            .create(COLLECTION_OPTION));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("(required) output index path")
            .create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("(required) DocnoMapping data")
            .create(MAPPING_OPTION));

    CommandLine cmdline;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        return -1;
    }

    if (!cmdline.hasOption(COLLECTION_OPTION) || !cmdline.hasOption(INDEX_OPTION)
            || !cmdline.hasOption(MAPPING_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(this.getClass().getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    String collectionPath = cmdline.getOptionValue(COLLECTION_OPTION);
    String indexFile = cmdline.getOptionValue(INDEX_OPTION);
    String mappingFile = cmdline.getOptionValue(MAPPING_OPTION);
    String tmpDir = "tmp-" + TrecForwardIndexBuilder.class.getSimpleName() + "-" + random.nextInt(10000);

    Configuration conf = getConf();
    conf.set("mapreduce.map.memory.mb", "4096");
    conf.set("mapreduce.map.java.opts", "-Xmx4096m");

    Job job = new Job(conf, TrecForwardIndexBuilder.class.getSimpleName() + ":" + collectionPath);
    job.setJarByClass(TrecForwardIndexBuilder.class);
    FileSystem fs = FileSystem.get(getConf());

    LOG.info("Tool name: " + TrecForwardIndexBuilder.class.getSimpleName());
    LOG.info(" - collection path: " + collectionPath);
    LOG.info(" - index file: " + indexFile);
    LOG.info(" - DocnoMapping file: " + mappingFile);
    LOG.info(" - temp output directory: " + tmpDir);

    job.setNumReduceTasks(1);

    if (job.getConfiguration().get("mapred.job.tracker").equals("local")) {
        job.getConfiguration().set(DOCNO_MAPPING_FILE_PROPERTY, mappingFile);
    } else {
        DistributedCache.addCacheFile(new URI(mappingFile), job.getConfiguration());
    }

    FileInputFormat.setInputPaths(job, new Path(collectionPath));
    FileOutputFormat.setOutputPath(job, new Path(tmpDir));
    FileOutputFormat.setCompressOutput(job, false);

    job.setInputFormatClass(TrecDocumentInputFormat.class);
    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(MyMapper.class);

    // delete the output directory if it exists already
    FileSystem.get(conf).delete(new Path(tmpDir), true);

    job.waitForCompletion(true);
    Counters counters = job.getCounters();
    int numDocs = (int) counters.findCounter(Count.DOCS).getValue();

    String inputFile = tmpDir + "/" + "part-r-00000";

    LOG.info("Writing " + numDocs + " doc offseta to " + indexFile);
    LineReader reader = new LineReader(fs.open(new Path(inputFile)));

    FSDataOutputStream writer = fs.create(new Path(indexFile), true);

    writer.writeUTF(edu.umd.cloud9.collection.trec.TrecForwardIndex.class.getCanonicalName());
    writer.writeUTF(collectionPath);
    writer.writeInt(numDocs);

    int cnt = 0;
    Text line = new Text();
    while (reader.readLine(line) > 0) {
        String[] arr = line.toString().split("\\t");
        long offset = Long.parseLong(arr[1]);
        int len = Integer.parseInt(arr[2]);

        writer.writeLong(offset);
        writer.writeInt(len);

        cnt++;
        if (cnt % 100000 == 0) {
            LOG.info(cnt + " docs");
        }
    }
    reader.close();
    writer.close();
    LOG.info(cnt + " docs total. Done!");

    if (numDocs != cnt) {
        throw new RuntimeException("Unexpected number of documents in building forward index!");
    }

    fs.delete(new Path(tmpDir), true);

    return 0;
}

From source file:edu.umd.gorden2.ExtractTopPersonalizedPageRankNodes.java

/**
 * Runs this tool./*  w w w.j av  a2s  .  c  om*/
 */
@SuppressWarnings({ "static-access" })
public int run(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT));
    options.addOption(OptionBuilder.withArgName("num").hasArg().withDescription("top n").create(TOP));
    options.addOption(
            OptionBuilder.withArgName("sources").hasArg().withDescription("sources").create("sources"));

    CommandLine cmdline;
    CommandLineParser parser = new GnuParser();

    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        return -1;
    }

    if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(TOP)) {
        System.out.println("args: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp(this.getClass().getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    String inputPath = cmdline.getOptionValue(INPUT);
    String outputPath = "result";
    int n = Integer.parseInt(cmdline.getOptionValue(TOP));
    String m = cmdline.getOptionValue("sources");

    LOG.info("Tool name: " + ExtractTopPersonalizedPageRankNodes.class.getSimpleName());
    LOG.info(" - input: " + inputPath);
    LOG.info(" - output: " + outputPath);
    LOG.info(" - top: " + n);
    LOG.info(" - sources: " + m);

    Configuration conf = getConf();
    conf.setInt("mapred.min.split.size", 1024 * 1024 * 1024);
    conf.setInt("n", n);

    String[] mm = m.split(",");
    for (int i = 0; i < mm.length; i++) {
        Job job = Job.getInstance(conf);
        job.setJobName(ExtractTopPersonalizedPageRankNodes.class.getName() + ":" + inputPath + "#" + i);
        job.setJarByClass(ExtractTopPersonalizedPageRankNodes.class);

        job.getConfiguration().setInt("index", i);
        job.setNumReduceTasks(1);

        FileInputFormat.addInputPath(job, new Path(inputPath));
        FileOutputFormat.setOutputPath(job, new Path(outputPath + "/" + i));

        job.setInputFormatClass(SequenceFileInputFormat.class);
        job.setOutputFormatClass(TextOutputFormat.class);

        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(FloatWritable.class);

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(FloatWritable.class);

        job.setMapperClass(MyMapper.class);
        job.setReducerClass(MyReducer.class);

        // Delete the output directory if it exists already.
        FileSystem.get(conf).delete(new Path(outputPath + "/" + i), true);
        job.waitForCompletion(true);
    }

    for (int i = 0; i < mm.length; i++) {
        String outm = outputPath + "/" + i + "/part-r-00000";
        FileSystem fs = FileSystem.get(getConf());
        for (FileStatus f : fs.listStatus(new Path(outm))) {
            BufferedReader d = new BufferedReader(new InputStreamReader(fs.open(f.getPath())));
            String line = "Source: " + mm[i];
            System.out.println(line);
            while (true) {
                line = d.readLine();
                if (line == null)
                    break;
                System.out.println(line);

            }
        }
    }
    return 0;
}

From source file:acromusashi.stream.example.ml.client.LofDrpcClient.java

/**
 * ???//from  ww w .  j  a  va2 s  .  c om
 * 
 * @return ??
 */
public static Options createOptions() {
    Options cliOptions = new Options();

    // LofTopology
    OptionBuilder.hasArg(true);
    OptionBuilder.withArgName("LofTopology Conf Path");
    OptionBuilder.withDescription("LofTopology Conf Path");
    OptionBuilder.isRequired(true);
    Option confPathOption = OptionBuilder.create("c");

    // LOF
    OptionBuilder.hasArg(true);
    OptionBuilder.withArgName("LOF Data");
    OptionBuilder.withDescription("LOF Data");
    OptionBuilder.isRequired(true);
    Option dataOption = OptionBuilder.create("d");

    // 
    OptionBuilder.withDescription("show help");
    Option helpOption = OptionBuilder.create("h");

    cliOptions.addOption(confPathOption);
    cliOptions.addOption(dataOption);
    cliOptions.addOption(helpOption);
    return cliOptions;
}