List of usage examples for org.apache.commons.cli OptionBuilder hasArg
public static OptionBuilder hasArg()
From source file:org.apache.hadoop.hive.ql.processors.ErasureProcessor.java
/** * Sets an erasure coding policy on a directory at the specified path. * @param params Parameters passed to the command. * @throws Exception If command failed./* w w w.java 2 s . co m*/ */ private void setPolicy(String[] params) throws Exception { String command = "setPolicy"; try { // setPolicy -path <path> [-policy <policyName>] Options setPolicyOptions = new Options(); String pathOptionName = "path"; Option pathOption = OptionBuilder.hasArg().isRequired().withLongOpt(pathOptionName) .withDescription("Path to set policy on").create(); setPolicyOptions.addOption(pathOption); String policyOptionName = "policy"; Option policyOption = OptionBuilder.hasArg().withLongOpt(policyOptionName) .withDescription("Policy to set").create(); setPolicyOptions.addOption(policyOption); CommandLine args = parseCommandArgs(setPolicyOptions, params); String path = args.getOptionValue(pathOptionName); String policy = args.getOptionValue(policyOptionName); erasureCodingShim.setErasureCodingPolicy(new Path(path), policy); writeTestOutput("Set EC policy' " + policy); } catch (ParseException pe) { writeTestOutput("Error parsing options for " + command + " " + pe.getMessage()); } catch (Exception e) { writeTestOutput("Caught exception running " + command + ": " + e.getMessage()); throw new Exception("Cannot run " + command + ": " + e.getMessage()); } }
From source file:org.apache.hadoop.hive.ql.processors.ErasureProcessor.java
/** * Unsets an erasure coding policy on a directory at the specified path. * @param params Parameters passed to the command. * @throws Exception if command failed.//ww w . j a va 2s .c om */ private void unsetPolicy(String[] params) throws Exception { String command = "unsetPolicy"; try { // unsetPolicy -path <path> Options unsetPolicyOptions = new Options(); String pathOptionName = "path"; Option pathOption = OptionBuilder.hasArg().isRequired().withLongOpt(pathOptionName) .withDescription("Path to unset policy on").create(); unsetPolicyOptions.addOption(pathOption); CommandLine args = parseCommandArgs(unsetPolicyOptions, params); String path = args.getOptionValue(pathOptionName); erasureCodingShim.unsetErasureCodingPolicy(new Path(path)); writeTestOutput("Unset EC policy"); } catch (ParseException pe) { writeTestOutput("Error parsing options for " + command + " " + pe.getMessage()); } catch (Exception e) { writeTestOutput("Caught exception running " + command + ": " + e.getMessage()); throw new Exception("Cannot run " + command + ": " + e.getMessage()); } }
From source file:org.apache.hadoop.mapreduce.MiniHadoopClusterManager.java
/** * Creates configuration options object. *///w w w . j a va 2 s.c o m @SuppressWarnings("static-access") private Options makeOptions() { Options options = new Options(); options.addOption("nodfs", false, "Don't start a mini DFS cluster") .addOption("nomr", false, "Don't start a mini MR cluster") .addOption("nodemanagers", true, "How many nodemanagers to start (default 1)") .addOption("datanodes", true, "How many datanodes to start (default 1)") .addOption("format", false, "Format the DFS (default false)") .addOption("nnport", true, "NameNode port (default 0--we choose)") .addOption("namenode", true, "URL of the namenode (default " + "is either the DFS cluster or a temporary dir)") .addOption("rmport", true, "ResourceManager port (default 0--we choose)") .addOption("jhsport", true, "JobHistoryServer port (default 0--we choose)") .addOption(OptionBuilder.hasArgs().withArgName("property=value") .withDescription("Options to pass into configuration object").create("D")) .addOption(OptionBuilder.hasArg().withArgName("path") .withDescription("Save configuration to this XML file.").create("writeConfig")) .addOption(OptionBuilder.hasArg().withArgName("path") .withDescription("Write basic information to this JSON file.").create("writeDetails")) .addOption(OptionBuilder.withDescription("Prints option help.").create("help")); return options; }
From source file:org.apache.hadoop.test.MiniHadoopClusterManager.java
/** * Creates configuration options object. *//*from ww w . j a v a 2 s .c o m*/ @SuppressWarnings("static-access") private Options makeOptions() { Options options = new Options(); options.addOption("nodfs", false, "Don't start a mini DFS cluster") .addOption("nomr", false, "Don't start a mini MR cluster") .addOption("tasktrackers", true, "How many tasktrackers to start (default 1)") .addOption("datanodes", true, "How many datanodes to start (default 1)") .addOption("format", false, "Format the DFS (default false)") .addOption("nnport", true, "NameNode port (default 0--we choose)") .addOption("namenode", true, "URL of the namenode (default " + "is either the DFS cluster or a temporary dir)") .addOption("jtport", true, "JobTracker port (default 0--we choose)") .addOption(OptionBuilder.hasArgs().withArgName("property=value") .withDescription("Options to pass into configuration object").create("D")) .addOption(OptionBuilder.hasArg().withArgName("path") .withDescription("Save configuration to this XML file.").create("writeConfig")) .addOption(OptionBuilder.hasArg().withArgName("path") .withDescription("Write basic information to this JSON file.").create("writeDetails")) .addOption(OptionBuilder.withDescription("Prints option help.").create("help")); return options; }
From source file:org.apache.hadoop.util.ConfTest.java
@SuppressWarnings("static-access") public static void main(String[] args) throws IOException { GenericOptionsParser genericParser = new GenericOptionsParser(args); String[] remainingArgs = genericParser.getRemainingArgs(); Option conf = OptionBuilder.hasArg().create("conffile"); Option help = OptionBuilder.withLongOpt("help").create('h'); Options opts = new Options().addOption(conf).addOption(help); CommandLineParser specificParser = new GnuParser(); CommandLine cmd = null;/*from w ww.ja v a 2s.co m*/ try { cmd = specificParser.parse(opts, remainingArgs); } catch (MissingArgumentException e) { terminate(1, "No argument specified for -conffile option"); } catch (ParseException e) { terminate(1, USAGE); } if (cmd == null) { terminate(1, "Failed to parse options"); } if (cmd.hasOption('h')) { terminate(0, USAGE); } List<File> files = new ArrayList<File>(); if (cmd.hasOption("conffile")) { String[] values = cmd.getOptionValues("conffile"); for (String value : values) { File confFile = new File(value); if (confFile.isFile()) { files.add(confFile); } else if (confFile.isDirectory()) { for (File file : listFiles(confFile)) { files.add(file); } } else { terminate(1, confFile.getAbsolutePath() + " is neither a file nor directory"); } } } else { String confDirName = System.getenv(HADOOP_CONF_DIR); if (confDirName == null) { terminate(1, HADOOP_CONF_DIR + " does not defined"); } File confDir = new File(confDirName); if (!confDir.isDirectory()) { terminate(1, HADOOP_CONF_DIR + " is not a directory"); } files = Arrays.asList(listFiles(confDir)); } if (files.isEmpty()) { terminate(1, "No input file to validate"); } boolean ok = true; for (File file : files) { String path = file.getAbsolutePath(); List<String> errors = checkConf(new FileInputStream(file)); if (errors.isEmpty()) { System.out.println(path + ": valid"); } else { ok = false; System.err.println(path + ":"); for (String error : errors) { System.err.println("\t" + error); } } } if (ok) { System.out.println("OK"); } else { terminate(1, "Invalid file exists"); } }
From source file:org.apache.hcatalog.cli.HCatCli.java
@SuppressWarnings("static-access") public static void main(String[] args) { try {//w ww .j a v a 2 s. c o m LogUtils.initHiveLog4j(); } catch (LogInitializationException e) { } CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; try { ss.out = new PrintStream(System.out, true, "UTF-8"); ss.err = new PrintStream(System.err, true, "UTF-8"); } catch (UnsupportedEncodingException e) { System.exit(1); } HiveConf conf = ss.getConf(); HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName()); SessionState.start(ss); Options options = new Options(); // -e 'quoted-query-string' options.addOption(OptionBuilder.hasArg().withArgName("exec") .withDescription("hcat command given from command line").create('e')); // -f <query-file> options.addOption( OptionBuilder.hasArg().withArgName("file").withDescription("hcat commands in file").create('f')); // -g options.addOption(OptionBuilder.hasArg().withArgName("group") .withDescription("group for the db/table specified in CREATE statement").create('g')); // -p options.addOption(OptionBuilder.hasArg().withArgName("perms") .withDescription("permissions for the db/table specified in CREATE statement").create('p')); // -D options.addOption(OptionBuilder.hasArgs(2).withArgName("property=value").withValueSeparator() .withDescription("use hadoop value for given property").create('D')); // [-h|--help] options.addOption(new Option("h", "help", false, "Print help information")); Parser parser = new GnuParser(); CommandLine cmdLine = null; try { cmdLine = parser.parse(options, args); } catch (ParseException e) { printUsage(options, ss.err); System.exit(1); } // -e String execString = (String) cmdLine.getOptionValue('e'); // -f String fileName = (String) cmdLine.getOptionValue('f'); // -h if (cmdLine.hasOption('h')) { printUsage(options, ss.out); System.exit(0); } if (execString != null && fileName != null) { ss.err.println("The '-e' and '-f' options cannot be specified simultaneously"); printUsage(options, ss.err); System.exit(1); } // -p String perms = (String) cmdLine.getOptionValue('p'); if (perms != null) { validatePermissions(ss, conf, perms); } // -g String grp = (String) cmdLine.getOptionValue('g'); if (grp != null) { conf.set(HCatConstants.HCAT_GROUP, grp); } // -D setConfProperties(conf, cmdLine.getOptionProperties("D")); if (execString != null) { System.exit(processLine(execString)); } try { if (fileName != null) { System.exit(processFile(fileName)); } } catch (FileNotFoundException e) { ss.err.println("Input file not found. (" + e.getMessage() + ")"); System.exit(1); } catch (IOException e) { ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")"); System.exit(1); } // -h printUsage(options, ss.err); System.exit(1); }
From source file:org.apache.helix.tools.IntegrationTestUtil.java
@SuppressWarnings("static-access") static Options constructCommandLineOptions() { Option helpOption = OptionBuilder.withLongOpt(help) .withDescription("Prints command-line options information").create(); Option zkSvrOption = OptionBuilder.hasArgs(1).isRequired(true).withArgName("zookeeperAddress") .withLongOpt(zkSvr).withDescription("Provide zookeeper-address").create(); Option verifyExternalViewOption = OptionBuilder.hasArgs().isRequired(false) .withArgName("clusterName node1 node2..").withLongOpt(verifyExternalView) .withDescription("Verify external-view").create(); Option verifyLiveNodesOption = OptionBuilder.hasArg().isRequired(false) .withArgName("clusterName node1, node2..").withLongOpt(verifyLiveNodes) .withDescription("Verify live-nodes").create(); Option readZNodeOption = OptionBuilder.hasArgs(1).isRequired(false).withArgName("zkPath") .withLongOpt(readZNode).withDescription("Read znode").create(); Option readLeaderOption = OptionBuilder.hasArgs(1).isRequired(false).withArgName("clusterName") .withLongOpt(readLeader).withDescription("Read cluster controller").create(); OptionGroup optGroup = new OptionGroup(); optGroup.setRequired(true);//from w w w . j a v a 2 s . c om optGroup.addOption(verifyExternalViewOption); optGroup.addOption(verifyLiveNodesOption); optGroup.addOption(readZNodeOption); optGroup.addOption(readLeaderOption); Options options = new Options(); options.addOption(helpOption); options.addOption(zkSvrOption); options.addOptionGroup(optGroup); return options; }
From source file:org.apache.hive.beeline.cli.CliOptionsProcessor.java
public CliOptionsProcessor() { // -database database options.addOption(OptionBuilder.hasArg().withArgName("databasename").withLongOpt("database") .withDescription("Specify the database to use").create()); // -e 'quoted-query-string' options.addOption(OptionBuilder.hasArg().withArgName("quoted-query-string") .withDescription("SQL from command line").create('e')); // -f <query-file> options.addOption(//from ww w . j a v a 2 s . c o m OptionBuilder.hasArg().withArgName("filename").withDescription("SQL from " + "files").create('f')); // -i <init-query-file> options.addOption(OptionBuilder.hasArg().withArgName("filename").withDescription("Initialization SQL file") .create('i')); // -hiveconf x=y options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value") .withLongOpt("hiveconf").withDescription("Use value for given property").create()); // Substitution option -d, --define options.addOption( OptionBuilder.withValueSeparator().hasArgs(2).withArgName("key=value").withLongOpt("define") .withDescription( "Variable substitution to apply to Hive commands. e" + ".g. -d A=B or --define A=B") .create('d')); // Substitution option --hivevar options.addOption( OptionBuilder.withValueSeparator().hasArgs(2).withArgName("key=value").withLongOpt("hivevar") .withDescription("Variable substitution to apply to Hive commands. " + "e.g. --hivevar A=B") .create()); // [-S|--silent] options.addOption(new Option("S", "silent", false, "Silent mode in interactive shell")); // [-v|--verbose] options.addOption( new Option("v", "verbose", false, "Verbose mode (echo executed SQL to the " + "console)")); // [-H|--help] options.addOption(new Option("H", "help", false, "Print help information")); }
From source file:org.apache.hive.hcatalog.cli.HCatCli.java
@SuppressWarnings("static-access") public static void main(String[] args) { try {/* w w w . ja v a2 s. c om*/ LogUtils.initHiveLog4j(); } catch (LogInitializationException e) { } LOG = LoggerFactory.getLogger(HCatCli.class); CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; try { ss.out = new PrintStream(System.out, true, "UTF-8"); ss.err = new PrintStream(System.err, true, "UTF-8"); } catch (UnsupportedEncodingException e) { System.exit(1); } HiveConf conf = ss.getConf(); HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName()); String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE); final String MR_ENGINE = "mr"; if (!MR_ENGINE.equalsIgnoreCase(engine)) { HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_ENGINE, MR_ENGINE); LOG.info("Forcing " + ConfVars.HIVE_EXECUTION_ENGINE + " to " + MR_ENGINE); } Options options = new Options(); // -e 'quoted-query-string' options.addOption(OptionBuilder.hasArg().withArgName("exec") .withDescription("hcat command given from command line").create('e')); // -f <query-file> options.addOption( OptionBuilder.hasArg().withArgName("file").withDescription("hcat commands in file").create('f')); // -g options.addOption(OptionBuilder.hasArg().withArgName("group") .withDescription("group for the db/table specified in CREATE statement").create('g')); // -p options.addOption(OptionBuilder.hasArg().withArgName("perms") .withDescription("permissions for the db/table specified in CREATE statement").create('p')); // -D options.addOption(OptionBuilder.hasArgs(2).withArgName("property=value").withValueSeparator() .withDescription("use hadoop value for given property").create('D')); // [-h|--help] options.addOption(new Option("h", "help", false, "Print help information")); Parser parser = new GnuParser(); CommandLine cmdLine = null; try { cmdLine = parser.parse(options, args); } catch (ParseException e) { printUsage(options, System.err); // Note, we print to System.err instead of ss.err, because if we can't parse our // commandline, we haven't even begun, and therefore cannot be expected to have // reasonably constructed or started the SessionState. System.exit(1); } // -D : process these first, so that we can instantiate SessionState appropriately. setConfProperties(conf, cmdLine.getOptionProperties("D")); // Now that the properties are in, we can instantiate SessionState. SessionState.start(ss); // -h if (cmdLine.hasOption('h')) { printUsage(options, ss.out); sysExit(ss, 0); } // -e String execString = (String) cmdLine.getOptionValue('e'); // -f String fileName = (String) cmdLine.getOptionValue('f'); if (execString != null && fileName != null) { ss.err.println("The '-e' and '-f' options cannot be specified simultaneously"); printUsage(options, ss.err); sysExit(ss, 1); } // -p String perms = (String) cmdLine.getOptionValue('p'); if (perms != null) { validatePermissions(ss, conf, perms); } // -g String grp = (String) cmdLine.getOptionValue('g'); if (grp != null) { conf.set(HCatConstants.HCAT_GROUP, grp); } // all done parsing, let's run stuff! if (execString != null) { sysExit(ss, processLine(execString)); } try { if (fileName != null) { sysExit(ss, processFile(fileName)); } } catch (FileNotFoundException e) { ss.err.println("Input file not found. (" + e.getMessage() + ")"); sysExit(ss, 1); } catch (IOException e) { ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")"); sysExit(ss, 1); } // -h printUsage(options, ss.err); sysExit(ss, 1); }
From source file:org.apache.hive.hcatalog.streaming.StreamingIntegrationTester.java
public static void main(String[] args) { try {//from w ww .ja va 2s . c o m LogUtils.initHiveLog4j(); } catch (LogUtils.LogInitializationException e) { System.err.println("Unable to initialize log4j " + StringUtils.stringifyException(e)); System.exit(-1); } Options options = new Options(); options.addOption(OptionBuilder.hasArg().withArgName("abort-pct") .withDescription("Percentage of transactions to abort, defaults to 5").withLongOpt("abortpct") .create('a')); options.addOption(OptionBuilder.hasArgs().withArgName("column-names") .withDescription("column names of table to write to").withLongOpt("columns").withValueSeparator(',') .isRequired().create('c')); options.addOption(OptionBuilder.hasArg().withArgName("database") .withDescription("Database of table to write to").withLongOpt("database").isRequired().create('d')); options.addOption(OptionBuilder.hasArg().withArgName("frequency") .withDescription("How often to commit a transaction, in seconds, defaults to 1") .withLongOpt("frequency").create('f')); options.addOption(OptionBuilder.hasArg().withArgName("iterations") .withDescription("Number of batches to write, defaults to 10").withLongOpt("num-batches") .create('i')); options.addOption(OptionBuilder.hasArg().withArgName("metastore-uri") .withDescription("URI of Hive metastore").withLongOpt("metastore-uri").isRequired().create('m')); options.addOption(OptionBuilder.hasArg().withArgName("num_transactions") .withDescription("Number of transactions per batch, defaults to 100").withLongOpt("num-txns") .create('n')); options.addOption(OptionBuilder.hasArgs().withArgName("partition-values") .withDescription("partition values, must be provided in order of partition columns, " + "if not provided table is assumed to not be partitioned") .withLongOpt("partition").withValueSeparator(',').create('p')); options.addOption(OptionBuilder.hasArg().withArgName("records-per-transaction") .withDescription("records to write in each transaction, defaults to 100") .withLongOpt("records-per-txn").withValueSeparator(',').create('r')); options.addOption(OptionBuilder.hasArgs().withArgName("column-types") .withDescription("column types, valid values are string, int, float, decimal, date, " + "datetime") .withLongOpt("schema").withValueSeparator(',').isRequired().create('s')); options.addOption(OptionBuilder.hasArg().withArgName("table").withDescription("Table to write to") .withLongOpt("table").isRequired().create('t')); options.addOption(OptionBuilder.hasArg().withArgName("num-writers") .withDescription("Number of writers to create, defaults to 2").withLongOpt("writers").create('w')); options.addOption(OptionBuilder.hasArg(false).withArgName("pause") .withDescription("Wait on keyboard input after commit & batch close. default: disabled") .withLongOpt("pause").create('x')); Parser parser = new GnuParser(); CommandLine cmdline = null; try { cmdline = parser.parse(options, args); } catch (ParseException e) { System.err.println(e.getMessage()); usage(options); } boolean pause = cmdline.hasOption('x'); String db = cmdline.getOptionValue('d'); String table = cmdline.getOptionValue('t'); String uri = cmdline.getOptionValue('m'); int txnsPerBatch = Integer.parseInt(cmdline.getOptionValue('n', "100")); int writers = Integer.parseInt(cmdline.getOptionValue('w', "2")); int batches = Integer.parseInt(cmdline.getOptionValue('i', "10")); int recordsPerTxn = Integer.parseInt(cmdline.getOptionValue('r', "100")); int frequency = Integer.parseInt(cmdline.getOptionValue('f', "1")); int ap = Integer.parseInt(cmdline.getOptionValue('a', "5")); float abortPct = ((float) ap) / 100.0f; String[] partVals = cmdline.getOptionValues('p'); String[] cols = cmdline.getOptionValues('c'); String[] types = cmdline.getOptionValues('s'); StreamingIntegrationTester sit = new StreamingIntegrationTester(db, table, uri, txnsPerBatch, writers, batches, recordsPerTxn, frequency, abortPct, partVals, cols, types, pause); sit.go(); }