Example usage for org.apache.commons.cli Parser parse

List of usage examples for org.apache.commons.cli Parser parse

Introduction

In this page you can find the example usage for org.apache.commons.cli Parser parse.

Prototype

public CommandLine parse(Options options, String[] arguments) throws ParseException 

Source Link

Document

Parses the specified arguments based on the specifed Options .

Usage

From source file:nor.core.Nor.java

/**
 * ??/*from  www  .j  av a2s  .  c om*/
 * @param args ?????
 * @throws MalformedURLException
 */
@SuppressWarnings("static-access")
public static void main(final String[] args) {
    LOGGER.info("main", "Start up...");

    final Options ops = new Options();
    ops.addOption(OptionBuilder.withArgName("dir").hasArg()
            .withDescription("set directory which has config files").create("config"));

    ops.addOption(OptionBuilder.withArgName("file").hasArg()
            .withDescription("use given configu file for logging system").create("log"));

    final Option pluginsPath = OptionBuilder.withArgName("dir").hasArg()
            .withDescription("use given directory for a serch path of plugins").create("plugin_dir");
    ops.addOption(pluginsPath);

    final Option plugins = OptionBuilder.withArgName("file").hasArg().withDescription("use given plugin file")
            .create("plugin");
    ops.addOption(plugins);

    ops.addOption("help", false, "show this help");

    try {

        final Parser parser = new BasicParser();
        final CommandLine cmd = parser.parse(ops, args);

        if (cmd.hasOption("help")) {

            final HelpFormatter help = new HelpFormatter();
            help.printHelp("nor", ops, true);
            System.exit(0);

        }

        // Configure about logging system.
        InputStream logStream;
        if (cmd.hasOption("log")) {

            logStream = new FileInputStream(cmd.getOptionValue("log"));

        } else {

            final String file = System.getProperty("nor.log", LoggindConfigFile);
            logStream = Nor.class.getResourceAsStream(file);

        }
        Logger.loadConfig(logStream);
        logStream.close();

        // Create the application instance by given config directory
        if (cmd.hasOption("config")) {

            Nor.nor = new Nor(cmd.getOptionValue("config"));

        } else {

            Nor.nor = new Nor(System.getProperty("nor.config", "config"));

        }

        // Load plugins
        final List<URL> pluginJar = new ArrayList<URL>();
        if (cmd.hasOption("plugin")) {

            for (final String filename : cmd.getOptionValues("plugin")) {

                final File f = new File(filename);
                pluginJar.add(f.toURI().toURL());

            }

        }
        if (cmd.hasOption("plugin_dir")) {

            for (final String dirname : cmd.getOptionValues("plugin_dir")) {

                final File dir = new File(dirname);
                if (dir.isDirectory()) {

                    for (final String filename : dir.list()) {

                        final File f = new File(dir, filename);
                        pluginJar.add(f.toURI().toURL());

                    }

                }

            }

        }
        nor.init(pluginJar);

        nor.start();

        // Waiting for end
        System.in.read();

        // Closing
        nor.close();

    } catch (final UnrecognizedOptionException e) {

        final HelpFormatter help = new HelpFormatter();
        help.printHelp("nor", ops, true);

    } catch (final Exception e) {

        LOGGER.catched(Level.SEVERE, "main", e);

    }

    LOGGER.info("main", "End.");

}

From source file:org.apache.accumulo.examples.simple.mapreduce.WordCount.java

public int run(String[] unprocessed_args) throws Exception {
    Parser p = new BasicParser();

    CommandLine cl = p.parse(opts, unprocessed_args);
    String[] args = cl.getArgs();

    String username = cl.getOptionValue(usernameOpt.getOpt(), "root");
    String password = cl.getOptionValue(passwordOpt.getOpt(), "secret");

    if (args.length != 4) {
        System.out.println("ERROR: Wrong number of parameters: " + args.length + " instead of 4.");
        return printUsage();
    }//from www.  jav  a2  s.  c o  m

    Job job = new Job(getConf(), WordCount.class.getName());
    job.setJarByClass(this.getClass());

    job.setInputFormatClass(TextInputFormat.class);
    TextInputFormat.setInputPaths(job, new Path(args[2]));

    job.setMapperClass(MapClass.class);

    job.setNumReduceTasks(0);

    job.setOutputFormatClass(AccumuloOutputFormat.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Mutation.class);
    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), username, password.getBytes(), true, args[3]);
    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
    job.waitForCompletion(true);
    return 0;
}

From source file:org.apache.accumulo.server.test.CreateTestTable.java

public static void main(String[] args) throws Exception {
    setupOptions();// www. j  av  a2s  .  c o  m

    Parser p = new BasicParser();
    CommandLine cl = null;

    try {
        cl = p.parse(opts, args);
    } catch (ParseException e) {
        throw new RuntimeException(e);
    }
    String[] rargs = cl.getArgs();
    if (rargs.length != 1) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp(" <count> ", opts);
    }
    count = Integer.parseInt(rargs[0]);
    readOnly = cl.hasOption(readonlyOpt.getOpt());
    user = cl.getOptionValue(usernameOpt.getOpt(), "root");
    password = cl.getOptionValue(passwordOpt.getOpt(), "secret");

    // create the test table within accumulo
    String table = "mrtest1";
    Connector connector;

    connector = HdfsZooInstance.getInstance().getConnector(user, password.getBytes());

    if (!readOnly) {
        TreeSet<Text> keys = new TreeSet<Text>();
        for (int i = 0; i < count / 100; i++) {
            keys.add(new Text(String.format("%05d", i * 100)));
        }

        // presplit
        connector.tableOperations().create(table);
        connector.tableOperations().addSplits(table, keys);
        BatchWriter b = connector.createBatchWriter(table, 10000000l, 1000000l, 10);

        // populate
        for (int i = 0; i < count; i++) {
            Mutation m = new Mutation(new Text(String.format("%05d", i)));
            m.put(new Text("col" + Integer.toString((i % 3) + 1)), new Text("qual"),
                    new Value("junk".getBytes()));
            b.addMutation(m);
        }

        b.close();

    }

    readBack(connector, count);

}

From source file:org.apache.accumulo.server.test.functional.FunctionalTest.java

public static void main(String[] args) throws Exception {
    Parser p = new BasicParser();

    CommandLine cl = null;/*www  .  j  a v  a 2  s .  c  om*/
    try {
        cl = p.parse(opts, args);
    } catch (ParseException e) {
        System.out.println("Parse Exception, exiting.");
        return;
    }

    String master = cl.getOptionValue(masterOpt.getOpt(), "localhost");
    String username = cl.getOptionValue(usernameOpt.getOpt(), "root");
    String password = cl.getOptionValue(passwordOpt.getOpt(), "secret");
    String instanceName = cl.getOptionValue(instanceNameOpt.getOpt(), "FuncTest");

    String remainingArgs[] = cl.getArgs();
    String clazz = remainingArgs[0];
    String opt = remainingArgs[1];

    Class<? extends FunctionalTest> testClass = AccumuloClassLoader.loadClass(clazz, FunctionalTest.class);
    FunctionalTest fTest = testClass.newInstance();

    fTest.setMaster(master);
    fTest.setUsername(username);
    fTest.setPassword(password);
    fTest.setInstanceName(instanceName);

    if (opt.equals("getConfig")) {
        Map<String, String> iconfig = fTest.getInitialConfig();
        System.out.println("{");
        for (Entry<String, String> entry : iconfig.entrySet()) {
            System.out.println("'" + entry.getKey() + "':'" + entry.getValue() + "',");
        }
        System.out.println("}");
    } else if (opt.equals("setup")) {
        fTest.setup();
    } else if (opt.equals("run")) {
        fTest.run();
    } else if (opt.equals("cleanup")) {
        fTest.cleanup();
    }

}

From source file:org.apache.accumulo.server.test.QueryMetadataTable.java

public static void main(String[] args)
        throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
    Option usernameOpt = new Option("username", "username", true, "username");
    Option passwordOpt = new Option("password", "password", true, "password");

    Options opts = new Options();

    opts.addOption(usernameOpt);/*from   w w  w . ja  va 2  s.  c om*/
    opts.addOption(passwordOpt);

    Parser p = new BasicParser();
    CommandLine cl = null;
    try {
        cl = p.parse(opts, args);
    } catch (ParseException e1) {
        System.out.println("Parse Exception, exiting.");
        return;
    }

    if (cl.getArgs().length != 2) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("queryMetadataTable <numQueries> <numThreads> ", opts);
        return;
    }
    String[] rargs = cl.getArgs();

    int numQueries = Integer.parseInt(rargs[0]);
    int numThreads = Integer.parseInt(rargs[1]);
    credentials = new AuthInfo(cl.getOptionValue("username", "root"),
            ByteBuffer.wrap(cl.getOptionValue("password", "secret").getBytes()),
            HdfsZooInstance.getInstance().getInstanceID());

    Connector connector = HdfsZooInstance.getInstance().getConnector(credentials.user, credentials.password);
    Scanner scanner = connector.createScanner(Constants.METADATA_TABLE_NAME, Constants.NO_AUTHS);
    scanner.setBatchSize(20000);
    Text mdrow = new Text(KeyExtent.getMetadataEntry(new Text(Constants.METADATA_TABLE_ID), null));

    HashSet<Text> rowSet = new HashSet<Text>();

    int count = 0;

    for (Entry<Key, Value> entry : scanner) {
        System.out.print(".");
        if (count % 72 == 0) {
            System.out.printf(" %,d\n", count);
        }
        if (entry.getKey().compareRow(mdrow) == 0 && entry.getKey().getColumnFamily()
                .compareTo(Constants.METADATA_CURRENT_LOCATION_COLUMN_FAMILY) == 0) {
            System.out.println(entry.getKey() + " " + entry.getValue());
            location = entry.getValue().toString();
        }

        if (!entry.getKey().getRow().toString().startsWith(Constants.METADATA_TABLE_ID))
            rowSet.add(entry.getKey().getRow());
        count++;

    }

    System.out.printf(" %,d\n", count);

    ArrayList<Text> rows = new ArrayList<Text>(rowSet);

    Random r = new Random();

    ExecutorService tp = Executors.newFixedThreadPool(numThreads);

    long t1 = System.currentTimeMillis();

    for (int i = 0; i < numQueries; i++) {
        int index = r.nextInt(rows.size());
        MDTQuery mdtq = new MDTQuery(rows.get(index));
        tp.submit(mdtq);
    }

    tp.shutdown();

    try {
        tp.awaitTermination(1, TimeUnit.HOURS);
    } catch (InterruptedException e) {
        e.printStackTrace();
        throw new RuntimeException(e);
    }

    long t2 = System.currentTimeMillis();
    double delta = (t2 - t1) / 1000.0;
    System.out.println("time : " + delta + "  queries per sec : " + (numQueries / delta));
}

From source file:org.apache.accumulo.server.test.TestIngest.java

public static IngestArgs parseArgs(String args[]) {

    Parser p = new BasicParser();
    Options opts = getOptions();//w w w.  ja va  2s.c  om
    CommandLine cl;

    try {
        cl = p.parse(opts, args);
    } catch (ParseException e) {
        System.out.println("Parse Error, exiting.");
        throw new RuntimeException(e);
    }

    if (cl.getArgs().length != 3) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("test_ingest <rows> <start_row> <num_columns>", getOptions());
        throw new RuntimeException();
    }

    IngestArgs ia = new IngestArgs();

    if (cl.hasOption("size")) {
        ia.dataSize = Integer.parseInt(cl.getOptionValue("size"));
    }
    if (cl.hasOption("colf")) {
        ia.columnFamily = cl.getOptionValue("colf");
    }
    if (cl.hasOption("timestamp")) {
        ia.timestamp = Long.parseLong(cl.getOptionValue("timestamp"));
        ia.hasTimestamp = true;
    }
    if (cl.hasOption("mapFile")) {
        ia.outputToMapFile = true;
        ia.outputFile = cl.getOptionValue("mapFile");
    }
    if (cl.hasOption("rfile")) {
        ia.outputToRFile = true;
        ia.outputFile = cl.getOptionValue("rfile");
    }
    if (ia.outputToMapFile && ia.outputToRFile) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("Cannot output to both an rfile and a map file", getOptions());
        throw new RuntimeException();
    }
    ia.delete = cl.hasOption("delete");
    ia.useGet = cl.hasOption("useGet");
    if (cl.hasOption("random")) {
        ia.random = true;
        ia.seed = Integer.parseInt(cl.getOptionValue("random"));
    }
    if (cl.hasOption("stride")) {
        ia.stride = Integer.parseInt(cl.getOptionValue("stride"));
    }
    ia.useTsbw = cl.hasOption("tsbw");

    username = cl.getOptionValue("username", "root");
    passwd = cl.getOptionValue("password", "secret");

    String[] requiredArgs = cl.getArgs();

    ia.rows = Integer.parseInt(requiredArgs[0]);
    ia.startRow = Integer.parseInt(requiredArgs[1]);
    ia.cols = Integer.parseInt(requiredArgs[2]);

    if (cl.hasOption("trace")) {
        ia.trace = true;
    }
    return ia;
}

From source file:org.apache.accumulo.server.test.TestMultiTableIngest.java

public static void main(String[] args) throws Exception {

    Parser p = new BasicParser();
    CommandLine cl = null;//from  w  w w.ja v a 2  s. c  om

    try {
        cl = p.parse(opts, args);
    } catch (ParseException e) {
        throw new RuntimeException(e);
    }
    String[] rargs = cl.getArgs();
    if (rargs.length != 0) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("", opts);
    }
    count = Integer.parseInt(cl.getOptionValue(countOpt.getOpt(), "10000"));
    tables = Integer.parseInt(cl.getOptionValue(tablesOpt.getOpt(), "5"));
    readOnly = cl.hasOption(readonlyOpt.getOpt());
    user = cl.getOptionValue(usernameOpt.getOpt(), "root");
    password = cl.getOptionValue(passwordOpt.getOpt(), "secret");

    // create the test table within accumulo
    Connector connector;
    try {
        connector = HdfsZooInstance.getInstance().getConnector(user, password.getBytes());
    } catch (AccumuloException e) {
        throw new RuntimeException(e);
    } catch (AccumuloSecurityException e) {
        throw new RuntimeException(e);
    }
    for (int i = 0; i < tables; i++) {
        tableNames.add(String.format("test_%04d", i));
    }

    if (!readOnly) {
        for (String table : tableNames)
            connector.tableOperations().create(table);

        MultiTableBatchWriter b;
        try {
            b = connector.createMultiTableBatchWriter(10000000, 1000000, 10);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }

        // populate
        for (int i = 0; i < count; i++) {
            Mutation m = new Mutation(new Text(String.format("%05d", i)));
            m.put(new Text("col" + Integer.toString((i % 3) + 1)), new Text("qual"),
                    new Value("junk".getBytes()));
            b.getBatchWriter(tableNames.get(i % tableNames.size())).addMutation(m);
        }
        try {
            b.close();
        } catch (MutationsRejectedException e) {
            throw new RuntimeException(e);
        }
    }
    try {
        readBack(connector, count);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:org.apache.accumulo.server.test.TestRandomDeletes.java

static public void main(String[] args) {
    Option usernameOpt = new Option("username", "username", true, "username");
    Option passwordOpt = new Option("password", "password", true, "password");

    Options opts = new Options();

    opts.addOption(usernameOpt);//  ww  w  .j a  v a 2 s.c  om
    opts.addOption(passwordOpt);

    Parser p = new BasicParser();
    CommandLine cl = null;
    try {
        cl = p.parse(opts, args);
    } catch (ParseException e1) {
        System.out.println("Parse Exception, exiting.");
        return;
    }
    credentials = new AuthInfo(cl.getOptionValue("username", "root"),
            ByteBuffer.wrap(cl.getOptionValue("password", "secret").getBytes()),
            HdfsZooInstance.getInstance().getInstanceID());

    try {
        long deleted = 0;

        Text t = new Text("test_ingest");

        TreeSet<RowColumn> doomed = scanAll(t);
        log.info("Got " + doomed.size() + " rows");

        long startTime = System.currentTimeMillis();
        while (true) {
            long half = scrambleDeleteHalfAndCheck(t, doomed);
            deleted += half;
            if (half == 0)
                break;
        }
        long stopTime = System.currentTimeMillis();

        long elapsed = (stopTime - startTime) / 1000;
        log.info("deleted " + deleted + " values in " + elapsed + " seconds");
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:org.apache.hadoop.mapred.nativetask.Submitter.java

@SuppressWarnings("deprecation")
@Override// www  .  j a va 2 s . c  o m
public int run(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();
        return 1;
    }

    cli.addOption("input", true, "input path to the maps", "path");
    cli.addOption("output", true, "output path from the reduces", "path");
    cli.addOption("lib", false, "extra native library used", "path");
    cli.addOption("inputformat", false, "java classname of InputFormat", "class");
    cli.addOption("outputformat", false, "java classname of OutputFormat", "class");
    cli.addOption("mapper", false, "native Mapper class", "class");
    cli.addOption("reducer", false, "native Reducer class", "class");
    cli.addOption("partitioner", false, "native Partitioner class", "class");
    cli.addOption("combiner", false, "native Combiner class", "class");
    cli.addOption("reader", false, "native RecordReader class", "class");
    cli.addOption("writer", false, "native RecordWriter class", "class");
    cli.addOption("maps", false, "number of maps(just hint)", "num");
    cli.addOption("reduces", false, "number of reduces", "num");
    cli.addOption("jobconf", false,
            "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val");
    Parser parser = cli.createParser();
    try {

        JobConf job = new JobConf(getConf());

        GenericOptionsParser genericParser = new GenericOptionsParser(job, args);

        setConf(job);

        CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());

        if (results.hasOption("input")) {
            FileInputFormat.setInputPaths(job, (String) results.getOptionValue("input"));
        }
        if (results.hasOption("output")) {
            FileOutputFormat.setOutputPath(job, new Path((String) results.getOptionValue("output")));
        }
        if (results.hasOption("mapper")) {
            job.set("native.mapper.class", results.getOptionValue("mapper"));
        }
        if (results.hasOption("reducer")) {
            job.set("native.reducer.class", results.getOptionValue("reducer"));
        }
        if (results.hasOption("partitioner")) {
            job.set("native.partitioner.class", results.getOptionValue("partitioner"));
        }
        if (results.hasOption("combiner")) {
            job.set("native.combiner.class", results.getOptionValue("combiner"));
        }
        if (results.hasOption("reader")) {
            job.set("native.recordreader.class", results.getOptionValue("reader"));
        }
        if (results.hasOption("writer")) {
            job.set("native.recordwriter.class", results.getOptionValue("writer"));
        }
        if (results.hasOption("maps")) {
            int numMapTasks = Integer.parseInt(results.getOptionValue("maps"));
            job.setNumReduceTasks(numMapTasks);
        }
        if (results.hasOption("reduces")) {
            int numReduceTasks = Integer.parseInt(results.getOptionValue("reduces"));
            job.setNumReduceTasks(numReduceTasks);
        }
        if (results.hasOption("lib")) {
            job.set("native.class.library", results.getOptionValue("lib"));
        }
        if (results.hasOption("inputformat")) {
            job.setInputFormat(getClass(results, "inputformat", job, InputFormat.class));
        }
        if (results.hasOption("outputformat")) {
            job.setOutputFormat(getClass(results, "outputformat", job, OutputFormat.class));
        }
        if (results.hasOption("jobconf")) {
            LOG.warn("-jobconf option is deprecated, please use -D instead.");
            String options = (String) results.getOptionValue("jobconf");
            StringTokenizer tokenizer = new StringTokenizer(options, ",");
            while (tokenizer.hasMoreTokens()) {
                String keyVal = tokenizer.nextToken().trim();
                String[] keyValSplit = keyVal.split("=", 2);
                job.set(keyValSplit[0], keyValSplit[1]);
            }
        }
        runJob(job);
        return 0;
    } catch (ParseException pe) {
        LOG.info("Error : " + pe);
        cli.printUsage();
        return 1;
    }
}

From source file:org.apache.hadoop.mapred.nativetask.tools.Submitter.java

@Override
public int run(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();/*from   ww w . java  2s  .  c o m*/
        return 1;
    }

    cli.addOption("input", true, "input path to the maps", "path");
    cli.addOption("output", true, "output path from the reduces", "path");
    cli.addOption("lib", false, "extra native library used", "path");
    cli.addOption("inputformat", false, "java classname of InputFormat", "class");
    cli.addOption("outputformat", false, "java classname of OutputFormat", "class");
    cli.addOption("mapper", false, "native Mapper class", "class");
    cli.addOption("reducer", false, "native Reducer class", "class");
    cli.addOption("partitioner", false, "native Partitioner class", "class");
    cli.addOption("combiner", false, "native Combiner class", "class");
    cli.addOption("reader", false, "native RecordReader class", "class");
    cli.addOption("writer", false, "native RecordWriter class", "class");
    cli.addOption("maps", false, "number of maps(just hint)", "num");
    cli.addOption("reduces", false, "number of reduces", "num");
    cli.addOption("jobconf", false,
            "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val");
    Parser parser = cli.createParser();
    try {

        JobConf job = new JobConf(getConf());

        GenericOptionsParser genericParser = new GenericOptionsParser(job, args);

        setConf(job);

        CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());

        if (results.hasOption("input")) {
            FileInputFormat.setInputPaths(job, results.getOptionValue("input"));
        }
        if (results.hasOption("output")) {
            FileOutputFormat.setOutputPath(job, new Path(results.getOptionValue("output")));
        }
        if (results.hasOption("mapper")) {
            job.set("native.mapper.class", results.getOptionValue("mapper"));
        }
        if (results.hasOption("reducer")) {
            job.set("native.reducer.class", results.getOptionValue("reducer"));
        }
        if (results.hasOption("partitioner")) {
            job.set("native.partitioner.class", results.getOptionValue("partitioner"));
        }
        if (results.hasOption("combiner")) {
            job.set("native.combiner.class", results.getOptionValue("combiner"));
        }
        if (results.hasOption("reader")) {
            job.set(Constants.NATIVE_RECORDREADER_CLASS, results.getOptionValue("reader"));
        }
        if (results.hasOption("writer")) {
            job.set("native.recordwriter.class", results.getOptionValue("writer"));
        }
        if (results.hasOption("maps")) {
            int numMapTasks = Integer.parseInt(results.getOptionValue("maps"));
            job.setNumReduceTasks(numMapTasks);
        }
        if (results.hasOption("reduces")) {
            int numReduceTasks = Integer.parseInt(results.getOptionValue("reduces"));
            job.setNumReduceTasks(numReduceTasks);
        }
        if (results.hasOption("lib")) {
            job.set("native.class.library", results.getOptionValue("lib"));
        }
        if (results.hasOption("inputformat")) {
            job.setInputFormat(getClass(results, "inputformat", job, InputFormat.class));
        }
        if (results.hasOption("outputformat")) {
            job.setOutputFormat(getClass(results, "outputformat", job, OutputFormat.class));
        }
        if (results.hasOption("jobconf")) {
            LOG.warn("-jobconf option is deprecated, please use -D instead.");
            String options = results.getOptionValue("jobconf");
            StringTokenizer tokenizer = new StringTokenizer(options, ",");
            while (tokenizer.hasMoreTokens()) {
                String keyVal = tokenizer.nextToken().trim();
                String[] keyValSplit = keyVal.split("=", 2);
                job.set(keyValSplit[0], keyValSplit[1]);
            }
        }
        runJob(job);
        return 0;
    } catch (ParseException pe) {
        LOG.info("Error : " + pe);
        cli.printUsage();
        return 1;
    }
}