Example usage for org.apache.commons.cli Parser parse

List of usage examples for org.apache.commons.cli Parser parse

Introduction

In this page you can find the example usage for org.apache.commons.cli Parser parse.

Prototype

public CommandLine parse(Options options, String[] arguments) throws ParseException 

Source Link

Document

Parses the specified arguments based on the specifed Options .

Usage

From source file:org.apache.hadoop.mapred.pipes.Submitter.java

@Override
public int run(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();/*w w w.j a  va 2s .  c om*/
        return 1;
    }
    cli.addOption("input", false, "input path to the maps", "path");
    cli.addOption("output", false, "output path from the reduces", "path");

    cli.addOption("jar", false, "job jar file", "path");
    cli.addOption("inputformat", false, "java classname of InputFormat", "class");
    //cli.addArgument("javareader", false, "is the RecordReader in Java");
    cli.addOption("map", false, "java classname of Mapper", "class");
    cli.addOption("partitioner", false, "java classname of Partitioner", "class");
    cli.addOption("reduce", false, "java classname of Reducer", "class");
    cli.addOption("writer", false, "java classname of OutputFormat", "class");
    cli.addOption("program", false, "URI to application executable", "class");
    cli.addOption("reduces", false, "number of reduces", "num");
    cli.addOption("jobconf", false,
            "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val");
    Parser parser = cli.createParser();
    try {

        GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args);
        CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());

        JobConf job = new JobConf(getConf());

        if (results.hasOption("input")) {
            FileInputFormat.setInputPaths(job, (String) results.getOptionValue("input"));
        }
        if (results.hasOption("output")) {
            FileOutputFormat.setOutputPath(job, new Path((String) results.getOptionValue("output")));
        }
        if (results.hasOption("jar")) {
            job.setJar((String) results.getOptionValue("jar"));
        }
        if (results.hasOption("inputformat")) {
            setIsJavaRecordReader(job, true);
            job.setInputFormat(getClass(results, "inputformat", job, InputFormat.class));
        }
        if (results.hasOption("javareader")) {
            setIsJavaRecordReader(job, true);
        }
        if (results.hasOption("map")) {
            setIsJavaMapper(job, true);
            job.setMapperClass(getClass(results, "map", job, Mapper.class));
        }
        if (results.hasOption("partitioner")) {
            job.setPartitionerClass(getClass(results, "partitioner", job, Partitioner.class));
        }
        if (results.hasOption("reduce")) {
            setIsJavaReducer(job, true);
            job.setReducerClass(getClass(results, "reduce", job, Reducer.class));
        }
        if (results.hasOption("reduces")) {
            job.setNumReduceTasks(Integer.parseInt((String) results.getOptionValue("reduces")));
        }
        if (results.hasOption("writer")) {
            setIsJavaRecordWriter(job, true);
            job.setOutputFormat(getClass(results, "writer", job, OutputFormat.class));
        }
        if (results.hasOption("program")) {
            setExecutable(job, (String) results.getOptionValue("program"));
        }
        if (results.hasOption("jobconf")) {
            LOG.warn("-jobconf option is deprecated, please use -D instead.");
            String options = (String) results.getOptionValue("jobconf");
            StringTokenizer tokenizer = new StringTokenizer(options, ",");
            while (tokenizer.hasMoreTokens()) {
                String keyVal = tokenizer.nextToken().trim();
                String[] keyValSplit = keyVal.split("=", 2);
                job.set(keyValSplit[0], keyValSplit[1]);
            }
        }
        // if they gave us a jar file, include it into the class path
        String jarFile = job.getJar();
        if (jarFile != null) {
            final URL[] urls = new URL[] { FileSystem.getLocal(job).pathToFile(new Path(jarFile)).toURL() };
            //FindBugs complains that creating a URLClassLoader should be
            //in a doPrivileged() block. 
            ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
                public ClassLoader run() {
                    return new URLClassLoader(urls);
                }
            });
            job.setClassLoader(loader);
        }

        runJob(job);
        return 0;
    } catch (ParseException pe) {
        LOG.info("Error : " + pe);
        cli.printUsage();
        return 1;
    }

}

From source file:org.apache.hadoop.mapred.pipes.SubmitterToAccels.java

@Override
public int run(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();//from ww w  .  j  a  v a  2s. c  om
        return 1;
    }

    cli.addOption("input", false, "input path to the maps", "path");
    cli.addOption("output", false, "output path from the reduces", "path");

    cli.addOption("cpubin", false, "URI to application cpu executable", "class");
    cli.addOption("gpubin", false, "URI to application gpu executable", "class");

    Parser parser = cli.createParser();
    try {
        GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args);
        CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());
        JobConf job = new JobConf(getConf());

        if (results.hasOption("input")) {
            FileInputFormat.setInputPaths(job, (String) results.getOptionValue("input"));
        }
        if (results.hasOption("output")) {
            FileOutputFormat.setOutputPath(job, new Path((String) results.getOptionValue("output")));
        }
        if (results.hasOption("cpubin")) {
            setCPUExecutable(job, (String) results.getOptionValue("cpubin"));
        }
        if (results.hasOption("gpubin")) {
            setGPUExecutable(job, (String) results.getOptionValue("gpubin"));
        }
        // if they gave us a jar file, include it into the class path
        String jarFile = job.getJar();
        if (jarFile != null) {
            final URL[] urls = new URL[] { FileSystem.getLocal(job).pathToFile(new Path(jarFile)).toURL() };
            //FindBugs complains that creating a URLClassLoader should be
            //in a doPrivileged() block. 
            ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
                public ClassLoader run() {
                    return new URLClassLoader(urls);
                }
            });
            job.setClassLoader(loader);
        }
        runJob(job);
        return 0;
    } catch (ParseException pe) {
        LOG.info("Error :" + pe);
        cli.printUsage();
        return 1;
    }
}

From source file:org.apache.hama.pipes.Submitter.java

@Override
public int run(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();//from  ww  w. ja  va2  s  .  c om
        return 1;
    }

    LOG.debug("Hama pipes Submitter started!");

    cli.addOption("input", false, "input path for bsp", "path");
    cli.addOption("output", false, "output path from bsp", "path");

    cli.addOption("jar", false, "job jar file", "path");
    cli.addOption("inputformat", false, "java classname of InputFormat", "class");
    // cli.addArgument("javareader", false, "is the RecordReader in Java");

    cli.addOption("partitioner", false, "java classname of Partitioner", "class");
    cli.addOption("outputformat", false, "java classname of OutputFormat", "class");

    cli.addOption("cachefiles", false, "additional cache files to add", "space delimited paths");

    cli.addOption("interpreter", false, "interpreter, like python or bash", "executable");

    cli.addOption("jobname", false, "the jobname", "name");

    cli.addOption("programArgs", false, "program arguments", "arguments");
    cli.addOption("bspTasks", false, "how many bsp tasks to launch", "number");
    cli.addOption("streaming", false, "if supplied, streaming is used instead of pipes", "");

    cli.addOption("jobconf", false,
            "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val");

    cli.addOption("program", false, "URI to application executable", "class");
    Parser parser = cli.createParser();
    try {

        // check generic arguments -conf
        GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args);
        // get other arguments
        CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());

        BSPJob job = new BSPJob(getConf());

        if (results.hasOption("input")) {
            FileInputFormat.setInputPaths(job, results.getOptionValue("input"));
        }
        if (results.hasOption("output")) {
            FileOutputFormat.setOutputPath(job, new Path(results.getOptionValue("output")));
        }
        if (results.hasOption("jar")) {
            job.setJar(results.getOptionValue("jar"));
        }

        if (results.hasOption("jobname")) {
            job.setJobName(results.getOptionValue("jobname"));
        }

        if (results.hasOption("inputformat")) {
            job.setInputFormat(getClass(results, "inputformat", conf, InputFormat.class));
        }

        if (results.hasOption("partitioner")) {
            job.setPartitioner(getClass(results, "partitioner", conf, Partitioner.class));
        }

        if (results.hasOption("outputformat")) {
            job.setOutputFormat(getClass(results, "outputformat", conf, OutputFormat.class));
        }

        if (results.hasOption("streaming")) {
            LOG.info("Streaming enabled!");
            job.set("hama.streaming.enabled", "true");
        }

        if (results.hasOption("jobconf")) {
            LOG.warn("-jobconf option is deprecated, please use -D instead.");
            String options = results.getOptionValue("jobconf");
            StringTokenizer tokenizer = new StringTokenizer(options, ",");
            while (tokenizer.hasMoreTokens()) {
                String keyVal = tokenizer.nextToken().trim();
                String[] keyValSplit = keyVal.split("=", 2);
                job.set(keyValSplit[0], keyValSplit[1]);
            }
        }

        if (results.hasOption("bspTasks")) {
            int optionValue = Integer.parseInt(results.getOptionValue("bspTasks"));
            conf.setInt("bsp.local.tasks.maximum", optionValue);
            conf.setInt("bsp.peers.num", optionValue);
        }

        if (results.hasOption("program")) {
            String executablePath = results.getOptionValue("program");
            setExecutable(job.getConfiguration(), executablePath);
            DistributedCache.addCacheFile(new Path(executablePath).toUri(), conf);
        }

        if (results.hasOption("interpreter")) {
            job.getConfiguration().set("hama.pipes.executable.interpretor",
                    results.getOptionValue("interpreter"));
        }

        if (results.hasOption("programArgs")) {
            job.getConfiguration().set("hama.pipes.executable.args",
                    Joiner.on(" ").join(results.getOptionValues("programArgs")));
            // job.getConfiguration().set("hama.pipes.resolve.executable.args",
            // "true");
        }

        if (results.hasOption("cachefiles")) {
            FileSystem fs = FileSystem.get(getConf());
            String[] optionValues = results.getOptionValues("cachefiles");
            for (String s : optionValues) {
                Path path = new Path(s);
                FileStatus[] globStatus = fs.globStatus(path);
                for (FileStatus f : globStatus) {
                    if (!f.isDir()) {
                        DistributedCache.addCacheFile(f.getPath().toUri(), job.getConfiguration());
                    } else {
                        LOG.info("Ignoring directory " + f.getPath() + " while globbing.");
                    }
                }
            }
        }

        // if they gave us a jar file, include it into the class path
        String jarFile = job.getJar();
        if (jarFile != null) {
            @SuppressWarnings("deprecation")
            final URL[] urls = new URL[] { FileSystem.getLocal(conf).pathToFile(new Path(jarFile)).toURL() };
            // FindBugs complains that creating a URLClassLoader should be
            // in a doPrivileged() block.
            ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
                @Override
                public ClassLoader run() {
                    return new URLClassLoader(urls);
                }
            });
            conf.setClassLoader(loader);
        }

        runJob(job);
        return 0;
    } catch (ParseException pe) {
        LOG.info("Error : " + pe);
        cli.printUsage();
        return 1;
    }

}

From source file:org.apache.hama.pipes.util.SequenceFileDumper.java

public static void main(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();// w  w w.j a  v  a2 s . co  m
        return;
    }

    // Add arguments
    cli.addOption("file", false, "The Sequence File containing the Clusters", "path");
    cli.addOption("output", false, "The output file.  If not specified, dumps to the console", "path");
    cli.addOption("substring", false, "The number of chars of the FormatString() to print", "number");
    cli.addOption("count", false, "Report the count only", "number");

    Parser parser = cli.createParser();
    try {
        HamaConfiguration conf = new HamaConfiguration();
        CommandLine cmdLine = parser.parse(cli.options, args);

        if (cmdLine.hasOption("file")) {
            Path path = new Path(cmdLine.getOptionValue("file"));

            FileSystem fs = FileSystem.get(path.toUri(), conf);
            if (!fs.isFile(path)) {
                System.out.println("File does not exist: " + path.toString());
                return;
            }
            SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf);

            Writer writer;
            if (cmdLine.hasOption("output")) {
                writer = new FileWriter(cmdLine.getOptionValue("output"));
            } else {
                writer = new OutputStreamWriter(System.out);
            }

            writer.append("Input Path: ").append(String.valueOf(path)).append(LINE_SEP);

            int sub = Integer.MAX_VALUE;
            if (cmdLine.hasOption("substring")) {
                sub = Integer.parseInt(cmdLine.getOptionValue("substring"));
            }

            Writable key;
            if (reader.getKeyClass() != NullWritable.class) {
                key = (Writable) reader.getKeyClass().newInstance();
            } else {
                key = NullWritable.get();
            }
            Writable value;
            if (reader.getValueClass() != NullWritable.class) {
                value = (Writable) reader.getValueClass().newInstance();
            } else {
                value = NullWritable.get();
            }

            writer.append("Key class: ").append(String.valueOf(reader.getKeyClass())).append(" Value Class: ")
                    .append(String.valueOf(value.getClass())).append(LINE_SEP);
            writer.flush();

            long count = 0;
            boolean countOnly = cmdLine.hasOption("count");
            if (countOnly == false) {
                while (reader.next(key, value)) {
                    writer.append("Key: ").append(String.valueOf(key));
                    String str = value.toString();
                    writer.append(": Value: ").append(str.length() > sub ? str.substring(0, sub) : str);
                    writer.write(LINE_SEP);
                    writer.flush();
                    count++;
                }
                writer.append("Count: ").append(String.valueOf(count)).append(LINE_SEP);

            } else { // count only
                while (reader.next(key, value)) {
                    count++;
                }
                writer.append("Count: ").append(String.valueOf(count)).append(LINE_SEP);
            }
            writer.flush();

            if (cmdLine.hasOption("output")) {
                writer.close();
            }
            reader.close();

        } else {
            cli.printUsage();
        }

    } catch (ParseException e) {
        LOG.error(e.getMessage());
        cli.printUsage();
        return;
    }
}

From source file:org.apache.hcatalog.cli.HCatCli.java

@SuppressWarnings("static-access")
public static void main(String[] args) {

    try {/*from w  w  w  . jav a  2  s  .  c o m*/
        LogUtils.initHiveLog4j();
    } catch (LogInitializationException e) {

    }

    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
        ss.out = new PrintStream(System.out, true, "UTF-8");
        ss.err = new PrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
        System.exit(1);
    }

    HiveConf conf = ss.getConf();

    HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());

    SessionState.start(ss);

    Options options = new Options();

    // -e 'quoted-query-string'
    options.addOption(OptionBuilder.hasArg().withArgName("exec")
            .withDescription("hcat command given from command line").create('e'));

    // -f <query-file>
    options.addOption(
            OptionBuilder.hasArg().withArgName("file").withDescription("hcat commands in file").create('f'));

    // -g
    options.addOption(OptionBuilder.hasArg().withArgName("group")
            .withDescription("group for the db/table specified in CREATE statement").create('g'));

    // -p
    options.addOption(OptionBuilder.hasArg().withArgName("perms")
            .withDescription("permissions for the db/table specified in CREATE statement").create('p'));

    // -D
    options.addOption(OptionBuilder.hasArgs(2).withArgName("property=value").withValueSeparator()
            .withDescription("use hadoop value for given property").create('D'));

    // [-h|--help]
    options.addOption(new Option("h", "help", false, "Print help information"));

    Parser parser = new GnuParser();
    CommandLine cmdLine = null;

    try {
        cmdLine = parser.parse(options, args);

    } catch (ParseException e) {
        printUsage(options, ss.err);
        System.exit(1);
    }
    // -e
    String execString = (String) cmdLine.getOptionValue('e');
    // -f
    String fileName = (String) cmdLine.getOptionValue('f');
    // -h
    if (cmdLine.hasOption('h')) {
        printUsage(options, ss.out);
        System.exit(0);
    }

    if (execString != null && fileName != null) {
        ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
        printUsage(options, ss.err);
        System.exit(1);
    }

    // -p
    String perms = (String) cmdLine.getOptionValue('p');
    if (perms != null) {
        validatePermissions(ss, conf, perms);
    }

    // -g
    String grp = (String) cmdLine.getOptionValue('g');
    if (grp != null) {
        conf.set(HCatConstants.HCAT_GROUP, grp);
    }

    // -D
    setConfProperties(conf, cmdLine.getOptionProperties("D"));

    if (execString != null) {
        System.exit(processLine(execString));
    }

    try {
        if (fileName != null) {
            System.exit(processFile(fileName));
        }
    } catch (FileNotFoundException e) {
        ss.err.println("Input file not found. (" + e.getMessage() + ")");
        System.exit(1);
    } catch (IOException e) {
        ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
        System.exit(1);
    }

    // -h
    printUsage(options, ss.err);
    System.exit(1);
}

From source file:org.apache.hive.hcatalog.cli.HCatCli.java

@SuppressWarnings("static-access")
public static void main(String[] args) {

    try {/*  ww w  .  j  a v  a 2s. c  o m*/
        LogUtils.initHiveLog4j();
    } catch (LogInitializationException e) {

    }
    LOG = LoggerFactory.getLogger(HCatCli.class);

    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
        ss.out = new PrintStream(System.out, true, "UTF-8");
        ss.err = new PrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
        System.exit(1);
    }

    HiveConf conf = ss.getConf();

    HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
    String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
    final String MR_ENGINE = "mr";
    if (!MR_ENGINE.equalsIgnoreCase(engine)) {
        HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_ENGINE, MR_ENGINE);
        LOG.info("Forcing " + ConfVars.HIVE_EXECUTION_ENGINE + " to " + MR_ENGINE);
    }

    Options options = new Options();

    // -e 'quoted-query-string'
    options.addOption(OptionBuilder.hasArg().withArgName("exec")
            .withDescription("hcat command given from command line").create('e'));

    // -f <query-file>
    options.addOption(
            OptionBuilder.hasArg().withArgName("file").withDescription("hcat commands in file").create('f'));

    // -g
    options.addOption(OptionBuilder.hasArg().withArgName("group")
            .withDescription("group for the db/table specified in CREATE statement").create('g'));

    // -p
    options.addOption(OptionBuilder.hasArg().withArgName("perms")
            .withDescription("permissions for the db/table specified in CREATE statement").create('p'));

    // -D
    options.addOption(OptionBuilder.hasArgs(2).withArgName("property=value").withValueSeparator()
            .withDescription("use hadoop value for given property").create('D'));

    // [-h|--help]
    options.addOption(new Option("h", "help", false, "Print help information"));

    Parser parser = new GnuParser();
    CommandLine cmdLine = null;

    try {
        cmdLine = parser.parse(options, args);

    } catch (ParseException e) {
        printUsage(options, System.err);
        // Note, we print to System.err instead of ss.err, because if we can't parse our
        // commandline, we haven't even begun, and therefore cannot be expected to have
        // reasonably constructed or started the SessionState.
        System.exit(1);
    }

    // -D : process these first, so that we can instantiate SessionState appropriately.
    setConfProperties(conf, cmdLine.getOptionProperties("D"));

    // Now that the properties are in, we can instantiate SessionState.
    SessionState.start(ss);

    // -h
    if (cmdLine.hasOption('h')) {
        printUsage(options, ss.out);
        sysExit(ss, 0);
    }

    // -e
    String execString = (String) cmdLine.getOptionValue('e');

    // -f
    String fileName = (String) cmdLine.getOptionValue('f');
    if (execString != null && fileName != null) {
        ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
        printUsage(options, ss.err);
        sysExit(ss, 1);
    }

    // -p
    String perms = (String) cmdLine.getOptionValue('p');
    if (perms != null) {
        validatePermissions(ss, conf, perms);
    }

    // -g
    String grp = (String) cmdLine.getOptionValue('g');
    if (grp != null) {
        conf.set(HCatConstants.HCAT_GROUP, grp);
    }

    // all done parsing, let's run stuff!

    if (execString != null) {
        sysExit(ss, processLine(execString));
    }

    try {
        if (fileName != null) {
            sysExit(ss, processFile(fileName));
        }
    } catch (FileNotFoundException e) {
        ss.err.println("Input file not found. (" + e.getMessage() + ")");
        sysExit(ss, 1);
    } catch (IOException e) {
        ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
        sysExit(ss, 1);
    }

    // -h
    printUsage(options, ss.err);
    sysExit(ss, 1);
}

From source file:org.apache.hive.hcatalog.streaming.StreamingIntegrationTester.java

public static void main(String[] args) {

    try {//from  w  w  w .ja  v a2s. c o  m
        LogUtils.initHiveLog4j();
    } catch (LogUtils.LogInitializationException e) {
        System.err.println("Unable to initialize log4j " + StringUtils.stringifyException(e));
        System.exit(-1);
    }

    Options options = new Options();

    options.addOption(OptionBuilder.hasArg().withArgName("abort-pct")
            .withDescription("Percentage of transactions to abort, defaults to 5").withLongOpt("abortpct")
            .create('a'));

    options.addOption(OptionBuilder.hasArgs().withArgName("column-names")
            .withDescription("column names of table to write to").withLongOpt("columns").withValueSeparator(',')
            .isRequired().create('c'));

    options.addOption(OptionBuilder.hasArg().withArgName("database")
            .withDescription("Database of table to write to").withLongOpt("database").isRequired().create('d'));

    options.addOption(OptionBuilder.hasArg().withArgName("frequency")
            .withDescription("How often to commit a transaction, in seconds, defaults to 1")
            .withLongOpt("frequency").create('f'));

    options.addOption(OptionBuilder.hasArg().withArgName("iterations")
            .withDescription("Number of batches to write, defaults to 10").withLongOpt("num-batches")
            .create('i'));

    options.addOption(OptionBuilder.hasArg().withArgName("metastore-uri")
            .withDescription("URI of Hive metastore").withLongOpt("metastore-uri").isRequired().create('m'));

    options.addOption(OptionBuilder.hasArg().withArgName("num_transactions")
            .withDescription("Number of transactions per batch, defaults to 100").withLongOpt("num-txns")
            .create('n'));

    options.addOption(OptionBuilder.hasArgs().withArgName("partition-values")
            .withDescription("partition values, must be provided in order of partition columns, "
                    + "if not provided table is assumed to not be partitioned")
            .withLongOpt("partition").withValueSeparator(',').create('p'));

    options.addOption(OptionBuilder.hasArg().withArgName("records-per-transaction")
            .withDescription("records to write in each transaction, defaults to 100")
            .withLongOpt("records-per-txn").withValueSeparator(',').create('r'));

    options.addOption(OptionBuilder.hasArgs().withArgName("column-types")
            .withDescription("column types, valid values are string, int, float, decimal, date, " + "datetime")
            .withLongOpt("schema").withValueSeparator(',').isRequired().create('s'));

    options.addOption(OptionBuilder.hasArg().withArgName("table").withDescription("Table to write to")
            .withLongOpt("table").isRequired().create('t'));

    options.addOption(OptionBuilder.hasArg().withArgName("num-writers")
            .withDescription("Number of writers to create, defaults to 2").withLongOpt("writers").create('w'));

    options.addOption(OptionBuilder.hasArg(false).withArgName("pause")
            .withDescription("Wait on keyboard input after commit & batch close. default: disabled")
            .withLongOpt("pause").create('x'));

    Parser parser = new GnuParser();
    CommandLine cmdline = null;
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        usage(options);
    }

    boolean pause = cmdline.hasOption('x');
    String db = cmdline.getOptionValue('d');
    String table = cmdline.getOptionValue('t');
    String uri = cmdline.getOptionValue('m');
    int txnsPerBatch = Integer.parseInt(cmdline.getOptionValue('n', "100"));
    int writers = Integer.parseInt(cmdline.getOptionValue('w', "2"));
    int batches = Integer.parseInt(cmdline.getOptionValue('i', "10"));
    int recordsPerTxn = Integer.parseInt(cmdline.getOptionValue('r', "100"));
    int frequency = Integer.parseInt(cmdline.getOptionValue('f', "1"));
    int ap = Integer.parseInt(cmdline.getOptionValue('a', "5"));
    float abortPct = ((float) ap) / 100.0f;
    String[] partVals = cmdline.getOptionValues('p');
    String[] cols = cmdline.getOptionValues('c');
    String[] types = cmdline.getOptionValues('s');

    StreamingIntegrationTester sit = new StreamingIntegrationTester(db, table, uri, txnsPerBatch, writers,
            batches, recordsPerTxn, frequency, abortPct, partVals, cols, types, pause);
    sit.go();
}

From source file:org.apache.jackrabbit.standalone.cli.JcrClient.java

/**
 * Run client//www .  j a  va 2s  .  c o m
 * @param args
 *        the arguments
 */
private void run(String[] args) {
    try {
        // parse arguments
        Parser parser = new BasicParser();
        org.apache.commons.cli.CommandLine cl = parser.parse(options, args);

        // Set locale
        this.setLocale(cl);

        // Welcome message
        System.out.println(bundle.getString("word.welcome"));

        // check interactive mode
        if (cl.hasOption("source")) {
            this.runNonInteractive(cl);
        } else {
            this.runInteractive();
        }
    } catch (Exception e) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("jcrclient", options);
        e.printStackTrace();
        return;
    }
}

From source file:org.apache.reef.tang.formats.CommandLine.java

/**
 * Process command line arguments.// w w  w .j  a  v  a  2 s .co  m
 *
 * @param <T> a type
 * @param args the command line arguments to be parsed
 * @param argClasses the target named classes to be set
 * @return Selfie if the command line parsing succeeded, null (or exception) otherwise.
 * @throws IOException if parsing fails
 * @throws BindException if a binding of short-named parameter fails
 */
@SafeVarargs
public final <T> CommandLine processCommandLine(final String[] args,
        final Class<? extends Name<?>>... argClasses) throws IOException, BindException {

    for (final Class<? extends Name<?>> c : argClasses) {
        registerShortNameOfClass(c);
    }

    final Options o = getCommandLineOptions();
    o.addOption(new Option("?", "help"));
    final Parser g = new GnuParser();

    final org.apache.commons.cli.CommandLine cl;
    try {
        cl = g.parse(o, args);
    } catch (final ParseException e) {
        throw new IOException("Could not parse config file", e);
    }

    if (cl.hasOption("?")) {
        new HelpFormatter().printHelp("reef", o);
        return null;
    }

    for (final Option option : cl.getOptions()) {

        final String shortName = option.getOpt();
        final String value = option.getValue();

        if (applicationOptions.containsKey(option)) {
            applicationOptions.get(option).process(option);
        } else {
            try {
                conf.bind(shortNames.get(shortName), value);
            } catch (final BindException e) {
                throw new BindException("Could not bind shortName " + shortName + " to value " + value, e);
            }
        }
    }

    return this;
}

From source file:org.apache.sentry.binding.hive.authz.SentryConfigTool.java

/**
 * parse arguments//from ww  w .j  a  v a2  s.c  o  m
 * 
 * <pre>
 *   -d,--debug                  Enable debug output
 *   -e,--query <arg>            Query privilege verification, requires -u
 *   -h,--help                   Print usage
 *   -i,--policyIni <arg>        Policy file path
 *   -j,--jdbcURL <arg>          JDBC URL
 *   -l,--listPrivs,--listPerms  List privilges for given user, requires -u
 *   -p,--password <arg>         Password
 *   -s,--sentry-site <arg>      sentry-site file path
 *   -u,--user <arg>             user name
 *   -v,--validate               Validate policy file
 *   -I,--import                 Import policy file
 *   -E,--export                 Export policy file
 *   -o,--overwrite              Overwrite the exist role data when do the import
 * </pre>
 * 
 * @param args
 */
private void parseArgs(String[] args) {
    boolean enableDebug = false;

    Options sentryOptions = new Options();

    Option helpOpt = new Option("h", "help", false, "Print usage");
    helpOpt.setRequired(false);

    Option validateOpt = new Option("v", "validate", false, "Validate policy file");
    validateOpt.setRequired(false);

    Option queryOpt = new Option("e", "query", true, "Query privilege verification, requires -u");
    queryOpt.setRequired(false);

    Option listPermsOpt = new Option("l", "listPerms", false, "list permissions for given user, requires -u");
    listPermsOpt.setRequired(false);
    Option listPrivsOpt = new Option("listPrivs", false, "list privileges for given user, requires -u");
    listPrivsOpt.setRequired(false);

    Option importOpt = new Option("I", "import", true, "Import policy file");
    importOpt.setRequired(false);

    Option exportOpt = new Option("E", "export", true, "Export policy file");
    exportOpt.setRequired(false);
    // required args
    OptionGroup sentryOptGroup = new OptionGroup();
    sentryOptGroup.addOption(helpOpt);
    sentryOptGroup.addOption(validateOpt);
    sentryOptGroup.addOption(queryOpt);
    sentryOptGroup.addOption(listPermsOpt);
    sentryOptGroup.addOption(listPrivsOpt);
    sentryOptGroup.addOption(importOpt);
    sentryOptGroup.addOption(exportOpt);
    sentryOptGroup.setRequired(true);
    sentryOptions.addOptionGroup(sentryOptGroup);

    // optional args
    Option jdbcArg = new Option("j", "jdbcURL", true, "JDBC URL");
    jdbcArg.setRequired(false);
    sentryOptions.addOption(jdbcArg);

    Option sentrySitePath = new Option("s", "sentry-site", true, "sentry-site file path");
    sentrySitePath.setRequired(false);
    sentryOptions.addOption(sentrySitePath);

    Option globalPolicyPath = new Option("i", "policyIni", true, "Policy file path");
    globalPolicyPath.setRequired(false);
    sentryOptions.addOption(globalPolicyPath);

    Option userOpt = new Option("u", "user", true, "user name");
    userOpt.setRequired(false);
    sentryOptions.addOption(userOpt);

    Option passWordOpt = new Option("p", "password", true, "Password");
    userOpt.setRequired(false);
    sentryOptions.addOption(passWordOpt);

    Option debugOpt = new Option("d", "debug", false, "enable debug output");
    debugOpt.setRequired(false);
    sentryOptions.addOption(debugOpt);

    Option overwriteOpt = new Option("o", "overwrite", false, "enable import overwrite");
    overwriteOpt.setRequired(false);
    sentryOptions.addOption(overwriteOpt);

    try {
        Parser parser = new GnuParser();
        CommandLine cmd = parser.parse(sentryOptions, args);

        for (Option opt : cmd.getOptions()) {
            if (opt.getOpt().equals("s")) {
                setSentrySiteFile(opt.getValue());
            } else if (opt.getOpt().equals("i")) {
                setPolicyFile(opt.getValue());
            } else if (opt.getOpt().equals("e")) {
                setQuery(opt.getValue());
            } else if (opt.getOpt().equals("j")) {
                setJdbcURL(opt.getValue());
            } else if (opt.getOpt().equals("u")) {
                setUser(opt.getValue());
            } else if (opt.getOpt().equals("p")) {
                setPassWord(opt.getValue());
            } else if (opt.getOpt().equals("l") || opt.getOpt().equals("listPrivs")) {
                setListPrivs(true);
            } else if (opt.getOpt().equals("v")) {
                setValidate(true);
            } else if (opt.getOpt().equals("I")) {
                setImportPolicyFilePath(opt.getValue());
            } else if (opt.getOpt().equals("E")) {
                setExportPolicyFilePath(opt.getValue());
            } else if (opt.getOpt().equals("h")) {
                usage(sentryOptions);
            } else if (opt.getOpt().equals("d")) {
                enableDebug = true;
            } else if (opt.getOpt().equals("o")) {
                setImportOverwriteRole(true);
            }
        }

        if (isListPrivs() && (getUser() == null)) {
            throw new ParseException("Can't use -l without -u ");
        }
        if ((getQuery() != null) && (getUser() == null)) {
            throw new ParseException("Must use -u with -e ");
        }
    } catch (ParseException e1) {
        usage(sentryOptions);
    }

    if (!enableDebug) {
        // turn off log
        LogManager.getRootLogger().setLevel(Level.OFF);
    }
}