Example usage for org.apache.commons.cli CommandLine getOptionValues

List of usage examples for org.apache.commons.cli CommandLine getOptionValues

Introduction

In this page you can find the example usage for org.apache.commons.cli CommandLine getOptionValues.

Prototype

public String[] getOptionValues(char opt) 

Source Link

Document

Retrieves the array of values, if any, of an option.

Usage

From source file:org.apache.giraph.hive.HiveGiraphRunner.java

/**
 * Process -hiveconf options from command line
 *
 * @param cmdln Command line options//  w  ww . j  ava2  s.  co m
 */
private void processHiveConfOptions(CommandLine cmdln) {
    for (String hiveconf : cmdln.getOptionValues("hiveconf")) {
        String[] keyval = hiveconf.split("=", 2);
        if (keyval.length == 2) {
            String name = keyval[0];
            String value = keyval[1];
            if (name.equals("tmpjars") || name.equals("tmpfiles")) {
                addToStringCollection(conf, name, value);
            } else {
                conf.set(name, value);
            }
        }
    }
}

From source file:org.apache.giraph.io.hcatalog.HCatGiraphRunner.java

/**
* process arguments/*from   ww w .j  av a  2s  .c o  m*/
* @param args to process
* @return CommandLine instance
* @throws ParseException error parsing arguments
* @throws InterruptedException interrupted
*/
private CommandLine processArguments(String[] args) throws ParseException, InterruptedException {
    Options options = new Options();
    options.addOption("h", "help", false, "Help");
    options.addOption("v", "verbose", false, "Verbose");
    options.addOption("D", "hiveconf", true, "property=value for Hive/Hadoop configuration");
    options.addOption("w", "workers", true, "Number of workers");
    if (vertexClass == null) {
        options.addOption(null, "vertexClass", true, "Giraph Vertex class to use");
    }
    if (vertexInputFormatClass == null) {
        options.addOption(null, "vertexInputFormatClass", true,
                "Giraph HCatalogVertexInputFormat class to use");
    }
    if (edgeInputFormatClass == null) {
        options.addOption(null, "edgeInputFormatClass", true, "Giraph HCatalogEdgeInputFormat class to use");
    }

    if (vertexOutputFormatClass == null) {
        options.addOption(null, "vertexOutputFormatClass", true,
                "Giraph HCatalogVertexOutputFormat class to use");
    }

    options.addOption("db", "dbName", true, "Hive database name");
    options.addOption("vi", "vertexInputTable", true, "Vertex input table name");
    options.addOption("VI", "vertexInputFilter", true,
            "Vertex input table filter expression (e.g., \"a<2 AND b='two'\"");
    options.addOption("ei", "edgeInputTable", true, "Edge input table name");
    options.addOption("EI", "edgeInputFilter", true,
            "Edge input table filter expression (e.g., \"a<2 AND b='two'\"");
    options.addOption("o", "outputTable", true, "Output table name");
    options.addOption("O", "outputPartition", true, "Output table partition values (e.g., \"a=1,b=two\")");
    options.addOption("s", "skipOutput", false, "Skip output?");

    addMoreOptions(options);

    CommandLineParser parser = new GnuParser();
    final CommandLine cmdln = parser.parse(options, args);
    if (args.length == 0 || cmdln.hasOption("help")) {
        new HelpFormatter().printHelp(getClass().getName(), options, true);
        throw new InterruptedException();
    }

    // Giraph classes
    if (cmdln.hasOption("vertexClass")) {
        vertexClass = findClass(cmdln.getOptionValue("vertexClass"), Vertex.class);
    }
    if (cmdln.hasOption("vertexInputFormatClass")) {
        vertexInputFormatClass = findClass(cmdln.getOptionValue("vertexInputFormatClass"),
                HCatalogVertexInputFormat.class);
    }
    if (cmdln.hasOption("edgeInputFormatClass")) {
        edgeInputFormatClass = findClass(cmdln.getOptionValue("edgeInputFormatClass"),
                HCatalogEdgeInputFormat.class);
    }

    if (cmdln.hasOption("vertexOutputFormatClass")) {
        vertexOutputFormatClass = findClass(cmdln.getOptionValue("vertexOutputFormatClass"),
                HCatalogVertexOutputFormat.class);
    }

    if (cmdln.hasOption("skipOutput")) {
        skipOutput = true;
    }

    if (vertexClass == null) {
        throw new IllegalArgumentException("Need the Giraph Vertex class name (-vertexClass) to use");
    }
    if (vertexInputFormatClass == null && edgeInputFormatClass == null) {
        throw new IllegalArgumentException(
                "Need at least one of Giraph VertexInputFormat " + "class name (-vertexInputFormatClass) and "
                        + "EdgeInputFormat class name (-edgeInputFormatClass)");
    }
    if (vertexOutputFormatClass == null) {
        throw new IllegalArgumentException(
                "Need the Giraph VertexOutputFormat " + "class name (-vertexOutputFormatClass) to use");
    }
    if (!cmdln.hasOption("workers")) {
        throw new IllegalArgumentException("Need to choose the number of workers (-w)");
    }
    if (!cmdln.hasOption("vertexInputTable") && vertexInputFormatClass != null) {
        throw new IllegalArgumentException("Need to set the vertex input table name (-vi)");
    }
    if (!cmdln.hasOption("edgeInputTable") && edgeInputFormatClass != null) {
        throw new IllegalArgumentException("Need to set the edge input table name (-ei)");
    }
    if (!cmdln.hasOption("outputTable")) {
        throw new IllegalArgumentException("Need to set the output table name (-o)");
    }
    dbName = cmdln.getOptionValue("dbName", "default");
    vertexInputTableName = cmdln.getOptionValue("vertexInputTable");
    vertexInputTableFilterExpr = cmdln.getOptionValue("vertexInputFilter");
    edgeInputTableName = cmdln.getOptionValue("edgeInputTable");
    edgeInputTableFilterExpr = cmdln.getOptionValue("edgeInputFilter");
    outputTableName = cmdln.getOptionValue("outputTable");
    outputTablePartitionValues = HiveUtils.parsePartitionValues(cmdln.getOptionValue("outputPartition"));
    workers = Integer.parseInt(cmdln.getOptionValue("workers"));
    isVerbose = cmdln.hasOption("verbose");

    // pick up -hiveconf arguments
    for (String hiveconf : cmdln.getOptionValues("hiveconf")) {
        String[] keyval = hiveconf.split("=", 2);
        if (keyval.length == 2) {
            String name = keyval[0];
            String value = keyval[1];
            if (name.equals("tmpjars") || name.equals("tmpfiles")) {
                addToStringCollection(conf, name, value);
            } else {
                conf.set(name, value);
            }
        }
    }

    processMoreArguments(cmdln);

    return cmdln;
}

From source file:org.apache.giraph.utils.ConfigurationUtils.java

/**
 * Populate GiraphConfiguration for this job with all cmd line args found.
 * Any global configuration data that Giraph on any platform might need
 * should be captured here.//from  w  w  w.j  a  v  a  2s  .  c o  m
 * @param giraphConfiguration config for this job run
 * @param cmd parsed command line options to store in giraphConfiguration
 * @param vertexClassName the vertex class (application) to run in this job.
 * @param workers the number of worker tasks for this job run.
 */
private static void populateGiraphConfiguration(final GiraphConfiguration giraphConfiguration,
        final CommandLine cmd, final String vertexClassName, final int workers)
        throws ClassNotFoundException, IOException {
    giraphConfiguration.setWorkerConfiguration(workers, workers, 100.0f);
    giraphConfiguration.setVertexClass((Class<? extends Vertex>) Class.forName(vertexClassName));
    if (cmd.hasOption("c")) {
        giraphConfiguration
                .setCombinerClass((Class<? extends Combiner>) Class.forName(cmd.getOptionValue("c")));
    }
    if (cmd.hasOption("ve")) {
        giraphConfiguration
                .setOutEdgesClass((Class<? extends OutEdges>) Class.forName(cmd.getOptionValue("ve")));
    }
    if (cmd.hasOption("ive")) {
        giraphConfiguration
                .setInputOutEdgesClass((Class<? extends OutEdges>) Class.forName(cmd.getOptionValue("ive")));
    }
    if (cmd.hasOption("wc")) {
        giraphConfiguration.setWorkerContextClass(
                (Class<? extends WorkerContext>) Class.forName(cmd.getOptionValue("wc")));
    }
    if (cmd.hasOption("mc")) {
        giraphConfiguration.setMasterComputeClass(
                (Class<? extends MasterCompute>) Class.forName(cmd.getOptionValue("mc")));
    }
    if (cmd.hasOption("aw")) {
        giraphConfiguration.setAggregatorWriterClass(
                (Class<? extends AggregatorWriter>) Class.forName(cmd.getOptionValue("aw")));
    }
    if (cmd.hasOption("vif")) {
        giraphConfiguration.setVertexInputFormatClass(
                (Class<? extends VertexInputFormat>) Class.forName(cmd.getOptionValue("vif")));
    } else {
        if (LOG.isInfoEnabled()) {
            LOG.info("No vertex input format specified. Ensure your " + "InputFormat does not require one.");
        }
    }
    if (cmd.hasOption("eif")) {
        giraphConfiguration.setEdgeInputFormatClass(
                (Class<? extends EdgeInputFormat>) Class.forName(cmd.getOptionValue("eif")));
    } else {
        if (LOG.isInfoEnabled()) {
            LOG.info("No edge input format specified. Ensure your " + "InputFormat does not require one.");
        }
    }
    if (cmd.hasOption("of")) {
        giraphConfiguration.setVertexOutputFormatClass(
                (Class<? extends VertexOutputFormat>) Class.forName(cmd.getOptionValue("of")));
    } else {
        if (LOG.isInfoEnabled()) {
            LOG.info("No output format specified. Ensure your OutputFormat " + "does not require one.");
        }
    }
    if (cmd.hasOption("pc")) {
        giraphConfiguration
                .setPartitionClass((Class<? extends Partition>) Class.forName(cmd.getOptionValue("pc")));
    }
    if (cmd.hasOption("vvf")) {
        giraphConfiguration.setVertexValueFactoryClass(
                (Class<? extends VertexValueFactory>) Class.forName(cmd.getOptionValue("vvf")));
    }
    if (cmd.hasOption("ca")) {
        for (String caOptionValue : cmd.getOptionValues("ca")) {
            for (String paramValue : Splitter.on(',').split(caOptionValue)) {
                String[] parts = Iterables.toArray(Splitter.on('=').split(paramValue), String.class);
                if (parts.length != 2) {
                    throw new IllegalArgumentException("Unable to parse custom " + " argument: " + paramValue);
                }
                if (LOG.isInfoEnabled()) {
                    LOG.info("Setting custom argument [" + parts[0] + "] to [" + parts[1]
                            + "] in GiraphConfiguration");
                }
                giraphConfiguration.set(parts[0], parts[1]);
            }
        }
    }
    // Now, we parse options that are specific to Hadoop MR Job
    if (cmd.hasOption("vif")) {
        if (cmd.hasOption("vip")) {
            GiraphFileInputFormat.addVertexInputPath(giraphConfiguration, new Path(cmd.getOptionValue("vip")));
        } else {
            if (LOG.isInfoEnabled()) {
                LOG.info("No input path for vertex data was specified. Ensure your "
                        + "InputFormat does not require one.");
            }
        }
    }
    if (cmd.hasOption("eif")) {
        if (cmd.hasOption("eip")) {
            GiraphFileInputFormat.addEdgeInputPath(giraphConfiguration, new Path(cmd.getOptionValue("eip")));
        } else {
            if (LOG.isInfoEnabled()) {
                LOG.info("No input path for edge data was specified. Ensure your "
                        + "InputFormat does not require one.");
            }
        }
    }
    // YARN-ONLY OPTIONS
    if (cmd.hasOption("yj")) {
        giraphConfiguration.setYarnLibJars(cmd.getOptionValue("yj"));
    }
    if (cmd.hasOption("yh")) {
        giraphConfiguration.setYarnTaskHeapMb(Integer.parseInt(cmd.getOptionValue("yh")));
    }
    /*if[PURE_YARN]
    if (cmd.hasOption("of")) {
      if (cmd.hasOption("op")) {
        // For YARN conf to get the out dir we need w/o a Job obj
        Path outputDir =
    new Path(BASE_OUTPUT_PATH, cmd.getOptionValue("op"));
        outputDir =
          outputDir.getFileSystem(giraphConfiguration).makeQualified(outputDir);
        giraphConfiguration.set(
    org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.OUTDIR,
    outputDir.toString());
            
      } else {
        if (LOG.isInfoEnabled()) {
          LOG.info("No output path specified. Ensure your OutputFormat " +
    "does not require one.");
        }
      }
    }
    end[PURE_YARN]*/
    // END YARN-ONLY OPTIONS
}

From source file:org.apache.hadoop.fs.nfs.tools.Nfs3Console.java

private static void parseCommandLine(String args[]) throws Exception {
    final Options options = new Options();

    // Get NFS server details
    Option opt;/*ww  w . j  a  va  2  s. c  o m*/

    options.addOption("h", "hostname", true, "NFS server hostname, e.g., nfs://server.com:2049/mountpath");
    options.addOption("p", "port", true, "NFS server port (optional)");
    options.addOption("m", "mount-dir", true, "NFS mount directory, e.g., /mnt/nfs");
    opt = new Option("e", "endpoint", true, "NFS additional endpoints");
    opt.setArgs(64);
    options.addOption(opt);

    // Parse the command line
    try {
        final CommandLineParser parser = new GnuParser();
        CommandLine commandLine = parser.parse(options, args);

        if (commandLine.hasOption('h')) {
            hostname = new URI(commandLine.getOptionValue('h'));
        }
        if (commandLine.hasOption('m')) {
            mountPath = commandLine.getOptionValue('m', "/");
        }
        if (commandLine.hasOption('e')) {
            String u[] = commandLine.getOptionValues('e');
            if (u != null) {
                for (String s : u) {
                    endpoints.add(s);
                }
            }
        }
    } catch (ParseException exception) {
        LOG.error("Could not parse command line options!");
        throw exception;
    }
}

From source file:org.apache.hadoop.hbase.client.HBaseFsck.java

/**
 * Main program//from  www . ja  v  a  2  s. c om
 *
 * @param args
 * @throws ParseException
 */
public static void main(String[] args)
        throws IOException, MasterNotRunningException, InterruptedException, ParseException {

    Options opt = new Options();
    opt.addOption(OptionBuilder.withArgName("property=value").hasArg()
            .withDescription("Override HBase Configuration Settings").create("D"));
    opt.addOption(OptionBuilder.withArgName("timeInSeconds").hasArg()
            .withDescription("Ignore regions with metadata updates in the last {timeInSeconds}.")
            .withType(PatternOptionBuilder.NUMBER_VALUE).create("timelag"));
    opt.addOption(OptionBuilder.withArgName("timeInSeconds").hasArg()
            .withDescription("Stop scan jobs after a fixed time & analyze existing data.")
            .withType(PatternOptionBuilder.NUMBER_VALUE).create("timeout"));
    opt.addOption("fix", false, "Try to fix some of the errors.");
    opt.addOption("y", false, "Do not prompt for reconfirmation from users on fix.");
    opt.addOption("w", false, "Try to fix warnings as well as errors.");
    opt.addOption("summary", false, "Print only summary of the tables and status.");
    opt.addOption("detail", false, "Display full report of all regions.");
    opt.addOption("checkRegionInfo", false, "Check if .regioninfo is consistent with .META.");
    opt.addOption("h", false, "Display this help");
    CommandLine cmd = new GnuParser().parse(opt, args);

    // any unknown args or -h
    if (!cmd.getArgList().isEmpty() || cmd.hasOption("h")) {
        new HelpFormatter().printHelp("hbck", opt);
        return;
    }

    Configuration conf = HBaseConfiguration.create();
    conf.set("fs.defaultFS", conf.get("hbase.rootdir"));

    if (cmd.hasOption("D")) {
        for (String confOpt : cmd.getOptionValues("D")) {
            String[] kv = confOpt.split("=", 2);
            if (kv.length == 2) {
                conf.set(kv[0], kv[1]);
                LOG.debug("-D configuration override: " + kv[0] + "=" + kv[1]);
            } else {
                throw new ParseException("-D option format invalid: " + confOpt);
            }
        }
    }
    if (cmd.hasOption("timeout")) {
        Object timeout = cmd.getParsedOptionValue("timeout");
        if (timeout instanceof Long) {
            conf.setLong(HConstants.HBASE_RPC_TIMEOUT_KEY, ((Long) timeout).longValue() * 1000);
        } else {
            throw new ParseException("-timeout needs a long value.");
        }
    }

    // create a fsck object
    HBaseFsck fsck = new HBaseFsck(conf);
    fsck.setTimeLag(HBaseFsckRepair.getEstimatedFixTime(conf));

    if (cmd.hasOption("details")) {
        fsck.displayFullReport();
    }
    if (cmd.hasOption("timelag")) {
        Object timelag = cmd.getParsedOptionValue("timelag");
        if (timelag instanceof Long) {
            fsck.setTimeLag(((Long) timelag).longValue() * 1000);
        } else {
            throw new ParseException("-timelag needs a long value.");
        }
    }
    if (cmd.hasOption("fix")) {
        fsck.setFixState(FixState.ERROR);
    }
    if (cmd.hasOption("w")) {
        fsck.setFixState(FixState.ALL);
    }
    if (cmd.hasOption("y")) {
        fsck.setPromptResponse(true);
    }
    if (cmd.equals("summary")) {
        fsck.setSummary();
    }
    if (cmd.hasOption("checkRegionInfo")) {
        checkRegionInfo = true;
    }

    int code = -1;
    try {
        // do the real work of fsck
        code = fsck.doWork();
        // If we have tried to fix the HBase state, run fsck again
        // to see if we have fixed our problems
        if (fsck.shouldRerun()) {
            fsck.setFixState(FixState.NONE);
            long fixTime = HBaseFsckRepair.getEstimatedFixTime(conf);
            if (fixTime > 0) {
                LOG.info("Waiting " + StringUtils.formatTime(fixTime)
                        + " before checking to see if fixes worked...");
                Thread.sleep(fixTime);
            }
            code = fsck.doWork();
        }
    } catch (InterruptedException ie) {
        LOG.info("HBCK was interrupted by user. Exiting...");
        code = -1;
    }

    Runtime.getRuntime().exit(code);
}

From source file:org.apache.hadoop.hbase.util.ChaosMonkey.java

@Override
protected void processOptions(CommandLine cmd) {
    String[] policies = cmd.getOptionValues("policy");
    if (policies != null) {
        setPoliciesByName(policies);/*from w  w  w . j  a  v a  2  s .c o m*/
    }
}

From source file:org.apache.hadoop.hbase.util.RegionSplitter.java

/**
 * The main function for the RegionSplitter application. Common uses:
 * <p>/*from   w  ww.  j  ava 2  s .c  o  m*/
 * <ul>
 * <li>create a table named 'myTable' with 60 pre-split regions containing 2
 * column families 'test' & 'rs', assuming the keys are hex-encoded ASCII:
 * <ul>
 * <li>bin/hbase org.apache.hadoop.hbase.util.RegionSplitter -c 60 -f test:rs
 * myTable HexStringSplit
 * </ul>
 * <li>perform a rolling split of 'myTable' (i.e. 60 => 120 regions), # 2
 * outstanding splits at a time, assuming keys are uniformly distributed
 * bytes:
 * <ul>
 * <li>bin/hbase org.apache.hadoop.hbase.util.RegionSplitter -r -o 2 myTable
 * UniformSplit
 * </ul>
 * </ul>
 *
 * There are two SplitAlgorithms built into RegionSplitter, HexStringSplit
 * and UniformSplit. These are different strategies for choosing region
 * boundaries. See their source code for details.
 *
 * @param args
 *          Usage: RegionSplitter &lt;TABLE&gt; &lt;SPLITALGORITHM&gt;
 *          &lt;-c &lt;# regions&gt; -f &lt;family:family:...&gt; | -r
 *          [-o &lt;# outstanding splits&gt;]&gt;
 *          [-D &lt;conf.param=value&gt;]
 * @throws IOException
 *           HBase IO problem
 * @throws InterruptedException
 *           user requested exit
 * @throws ParseException
 *           problem parsing user input
 */
@SuppressWarnings("static-access")
public static void main(String[] args) throws IOException, InterruptedException, ParseException {
    Configuration conf = HBaseConfiguration.create();

    // parse user input
    Options opt = new Options();
    opt.addOption(OptionBuilder.withArgName("property=value").hasArg()
            .withDescription("Override HBase Configuration Settings").create("D"));
    opt.addOption(OptionBuilder.withArgName("region count").hasArg()
            .withDescription("Create a new table with a pre-split number of regions").create("c"));
    opt.addOption(OptionBuilder.withArgName("family:family:...").hasArg()
            .withDescription("Column Families to create with new table.  Required with -c").create("f"));
    opt.addOption("h", false, "Print this usage help");
    opt.addOption("r", false, "Perform a rolling split of an existing region");
    opt.addOption(OptionBuilder.withArgName("count").hasArg()
            .withDescription("Max outstanding splits that have unfinished major compactions").create("o"));
    opt.addOption(null, "firstrow", true, "First Row in Table for Split Algorithm");
    opt.addOption(null, "lastrow", true, "Last Row in Table for Split Algorithm");
    opt.addOption(null, "risky", false,
            "Skip verification steps to complete quickly." + "STRONGLY DISCOURAGED for production systems.  ");
    CommandLine cmd = new GnuParser().parse(opt, args);

    if (cmd.hasOption("D")) {
        for (String confOpt : cmd.getOptionValues("D")) {
            String[] kv = confOpt.split("=", 2);
            if (kv.length == 2) {
                conf.set(kv[0], kv[1]);
                LOG.debug("-D configuration override: " + kv[0] + "=" + kv[1]);
            } else {
                throw new ParseException("-D option format invalid: " + confOpt);
            }
        }
    }

    if (cmd.hasOption("risky")) {
        conf.setBoolean("split.verify", false);
    }

    boolean createTable = cmd.hasOption("c") && cmd.hasOption("f");
    boolean rollingSplit = cmd.hasOption("r");
    boolean oneOperOnly = createTable ^ rollingSplit;

    if (2 != cmd.getArgList().size() || !oneOperOnly || cmd.hasOption("h")) {
        new HelpFormatter().printHelp("RegionSplitter <TABLE> <SPLITALGORITHM>\n"
                + "SPLITALGORITHM is a java class name of a class implementing "
                + "SplitAlgorithm, or one of the special strings HexStringSplit "
                + "or UniformSplit, which are built-in split algorithms. "
                + "HexStringSplit treats keys as hexadecimal ASCII, and "
                + "UniformSplit treats keys as arbitrary bytes.", opt);
        return;
    }
    String tableName = cmd.getArgs()[0];
    String splitClass = cmd.getArgs()[1];
    SplitAlgorithm splitAlgo = newSplitAlgoInstance(conf, splitClass);

    if (cmd.hasOption("firstrow")) {
        splitAlgo.setFirstRow(cmd.getOptionValue("firstrow"));
    }
    if (cmd.hasOption("lastrow")) {
        splitAlgo.setLastRow(cmd.getOptionValue("lastrow"));
    }

    if (createTable) {
        conf.set("split.count", cmd.getOptionValue("c"));
        createPresplitTable(tableName, splitAlgo, cmd.getOptionValue("f").split(":"), conf);
    }

    if (rollingSplit) {
        if (cmd.hasOption("o")) {
            conf.set("split.outstanding", cmd.getOptionValue("o"));
        }
        rollingSplit(tableName, splitAlgo, conf);
    }
}

From source file:org.apache.hadoop.hdfs.AvatarShellCommand.java

boolean validate(CommandLine cmd) {
    int commands = 0;
    commands += isWaitTxIdCommand ? 1 : 0;
    commands += isFailoverCommand ? 1 : 0;
    commands += isPrepfailoverCommand ? 1 : 0;
    commands += isMetasaveCommand ? 1 : 0;
    commands += isSafemodeCommand ? 1 : 0;
    commands += isSetAvatarCommand ? 1 : 0;
    commands += isIsInitializedCommand ? 1 : 0;
    commands += isShutdownAvatarCommand ? 1 : 0;
    commands += isShowAvatarCommand ? 1 : 0;
    commands += isSaveNamespaceCommand ? 1 : 0;

    if (commands > 1) {
        throwException("More than one command specified");
    }/*  w  w w .j  a v a  2 s .  c  om*/

    if (isZeroCommand && isOneCommand) {
        throwException("Both zero and one specified");
    }

    if ((isServiceCommand || isZeroCommand || isOneCommand) && isAddressCommand) {
        throwException("Both service|zero|one and address specified");
    }

    // /////////////////////// complext commands

    // -waittxid
    if (isWaitTxIdCommand) {
        noZeroOrOneOrAddress(waitTxIdCommand);
        assertArgsSize(waitTxIdCommand, cmd.getOptionValues(waitTxIdCommand), 0, 0);
    }

    // -failover
    if (isFailoverCommand) {
        noZeroOrOneOrAddress(failoverCommand);
        assertArgsSize(failoverCommand, cmd.getOptionValues(failoverCommand), 0, 0);
    }

    // -prepfalover
    if (isPrepfailoverCommand) {
        noZeroOrOneOrAddress(prepfailoverCommand);
        assertArgsSize(prepfailoverCommand, cmd.getOptionValues(prepfailoverCommand), 0, 0);
    }

    // /////////////// per-avatar node commands

    // -showAvatar
    if (isShowAvatarCommand) {
        eitherZeroOrOneOrAddress(showAvatarCommand);
        assertArgsSize(showAvatarCommand, cmd.getOptionValues(showAvatarCommand), 0, 0);
    }

    // -shutDownAvatar
    if (isShutdownAvatarCommand) {
        eitherZeroOrOneOrAddress(shutdownAvatarCommand);
        assertArgsSize(shutdownAvatarCommand, cmd.getOptionValues(shutdownAvatarCommand), 0, 0);
    }

    // -isInitialized
    if (isIsInitializedCommand) {
        eitherZeroOrOneOrAddress(isInitializedCommand);
        assertArgsSize(isInitializedCommand, cmd.getOptionValues(isInitializedCommand), 0, 0);
    }

    // -setAvatar
    if (isSetAvatarCommand) {
        eitherZeroOrOneOrAddress(setAvatarCommand);
        setAvatarArgs = cmd.getOptionValues(setAvatarCommand);
        assertArgsSize(setAvatarCommand, setAvatarArgs, 2, 1);
        if (setAvatarArgs != null) {
            boolean hasPrimary = false;
            for (String arg : setAvatarArgs) {
                if (!(arg.equals("primary") || arg.equals("force")))
                    throwException(CMD + setAvatarCommand + WRONG + Arrays.toString(setAvatarArgs));
                if (arg.equals("primary"))
                    hasPrimary = true;
            }
            if (!hasPrimary) {
                throwException(CMD + setAvatarCommand + WRONG + Arrays.toString(setAvatarArgs));
            }
        }
    }

    // -metasave
    if (isMetasaveCommand) {
        eitherZeroOrOneOrAddress(metasaveCommand);
        metasageArgs = cmd.getOptionValues(metasaveCommand);
        assertArgsSize(metasaveCommand, metasageArgs, 1, 1);
    }

    // -safemode
    if (isSafemodeCommand) {
        eitherZeroOrOneOrAddress(safemodeCommand);
        safemodeArgs = cmd.getOptionValues(safemodeCommand);
        assertArgsSize(safemodeCommand, safemodeArgs, 1, 1);
        for (String arg : safemodeArgs) {
            if (!safeModeActions.contains(arg))
                throwException(CMD + safemodeCommand + " - safemode action: " + arg + " unknown");
        }
    }

    // -saveNamespace
    if (isSaveNamespaceCommand) {
        eitherZeroOrOneOrAddress(saveNamespaceCommand);
        saveNamespaceArgs = cmd.getOptionValues(saveNamespaceCommand);
        assertArgsSize(saveNamespaceCommand, saveNamespaceArgs, 2, 0);
        if (saveNamespaceArgs != null) {
            for (String arg : saveNamespaceArgs) {
                if (!(arg.equals("uncompressed") || arg.equals("force")))
                    throwException(CMD + saveNamespaceCommand + WRONG + Arrays.toString(saveNamespaceArgs));
            }
        }
    }

    // -service
    if (isServiceCommand) {
        serviceArgs = cmd.getOptionValues(serviceCommand);
        assertArgsSize(serviceCommand, serviceArgs, 1, 1);
    }

    // -address
    if (isAddressCommand) {
        addressArgs = cmd.getOptionValues(addressCommand);
        assertArgsSize(addressCommand, addressArgs, 1, 1);
        int i = addressArgs[0].indexOf(":");
        if (i < 1)
            throwException(addressCommand + ": wrong host:port pair");
        try {
            Integer.valueOf(addressArgs[0].substring(i + 1));
        } catch (Exception e) {
            throwException(addressCommand + ": wrong host:port pair");
        }
    }

    // -zero -one
    assertArgsSize(zeroCommand, cmd.getOptionValues(zeroCommand), 0, 0);
    assertArgsSize(oneCommand, cmd.getOptionValues(oneCommand), 0, 0);

    return true;
}

From source file:org.apache.hadoop.hive.metastore.hbase.HBaseImport.java

private int init(String... args) throws ParseException {
    Options options = new Options();

    doAll = doKerberos = false;//from w w  w.j av  a 2 s.c o  m
    parallel = 1;
    batchSize = 1000;

    options.addOption(
            OptionBuilder.withLongOpt("all").withDescription("Import the full metastore").create('a'));

    options.addOption(OptionBuilder.withLongOpt("batchsize")
            .withDescription("Number of partitions to read and write in a batch, defaults to 1000").hasArg()
            .create('b'));

    options.addOption(OptionBuilder.withLongOpt("database").withDescription("Import a single database")
            .hasArgs().create('d'));

    options.addOption(OptionBuilder.withLongOpt("help").withDescription("You're looking at it").create('h'));

    options.addOption(OptionBuilder.withLongOpt("function").withDescription("Import a single function")
            .hasArgs().create('f'));

    options.addOption(OptionBuilder.withLongOpt("kerberos")
            .withDescription("Import all kerberos related objects (master key, tokens)").create('k'));

    options.addOption(OptionBuilder.withLongOpt("parallel")
            .withDescription(
                    "Parallel factor for loading (only applied to tables and partitions), " + "defaults to 1")
            .hasArg().create('p'));

    options.addOption(
            OptionBuilder.withLongOpt("role").withDescription("Import a single role").hasArgs().create('r'));

    options.addOption(OptionBuilder.withLongOpt("tables").withDescription("Import a single tables").hasArgs()
            .create('t'));

    CommandLine cli = new GnuParser().parse(options, args);

    // Process help, if it was asked for, this must be done first
    if (cli.hasOption('h')) {
        printHelp(options);
        return 1;
    }

    boolean hasCmd = false;
    // Now process the other command line args
    if (cli.hasOption('a')) {
        hasCmd = true;
        doAll = true;
    }
    if (cli.hasOption('b')) {
        batchSize = Integer.parseInt(cli.getOptionValue('b'));
    }
    if (cli.hasOption('d')) {
        hasCmd = true;
        dbsToImport = Arrays.asList(cli.getOptionValues('d'));
    }
    if (cli.hasOption('f')) {
        hasCmd = true;
        functionsToImport = Arrays.asList(cli.getOptionValues('f'));
    }
    if (cli.hasOption('p')) {
        parallel = Integer.parseInt(cli.getOptionValue('p'));
    }
    if (cli.hasOption('r')) {
        hasCmd = true;
        rolesToImport = Arrays.asList(cli.getOptionValues('r'));
    }
    if (cli.hasOption('k')) {
        doKerberos = true;
    }
    if (cli.hasOption('t')) {
        hasCmd = true;
        tablesToImport = Arrays.asList(cli.getOptionValues('t'));
    }
    if (!hasCmd) {
        printHelp(options);
        return 1;
    }

    dbs = new ArrayList<>();
    // We don't want to bound the size of the table queue because we keep it all in memory
    partitionedTables = new LinkedBlockingQueue<>();
    tableNameQueue = new LinkedBlockingQueue<>();
    indexNameQueue = new LinkedBlockingQueue<>();

    // Bound the size of this queue so we don't get too much in memory.
    partQueue = new ArrayBlockingQueue<>(parallel * 2);
    return 0;
}

From source file:org.apache.hadoop.hive.metastore.tools.HiveMetaTool.java

public static void main(String[] args) {
    HiveMetaTool metaTool = new HiveMetaTool();
    metaTool.init();/*from www  .  ja  v a2  s .  c o m*/
    CommandLineParser parser = new GnuParser();
    CommandLine line = null;

    try {
        try {
            line = parser.parse(metaTool.cmdLineOptions, args);
        } catch (ParseException e) {
            System.err.println("HiveMetaTool:Parsing failed.  Reason: " + e.getLocalizedMessage());
            printAndExit(metaTool);
        }

        if (line.hasOption("help")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("metatool", metaTool.cmdLineOptions);
        } else if (line.hasOption("listFSRoot")) {
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not valid with listFSRoot");
                printAndExit(metaTool);
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not valid with listFSRoot");
                printAndExit(metaTool);
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not valid with listFSRoot");
                printAndExit(metaTool);
            }
            metaTool.listFSRoot();
        } else if (line.hasOption("executeJDOQL")) {
            String query = line.getOptionValue("executeJDOQL");
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not valid with executeJDOQL");
                printAndExit(metaTool);
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not valid with executeJDOQL");
                printAndExit(metaTool);
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not valid with executeJDOQL");
                printAndExit(metaTool);
            }
            if (query.toLowerCase().trim().startsWith("select")) {
                metaTool.executeJDOQLSelect(query);
            } else if (query.toLowerCase().trim().startsWith("update")) {
                metaTool.executeJDOQLUpdate(query);
            } else {
                System.err.println("HiveMetaTool:Unsupported statement type");
                printAndExit(metaTool);
            }
        } else if (line.hasOption("updateLocation")) {
            String[] loc = line.getOptionValues("updateLocation");
            boolean isDryRun = false;
            String serdepropKey = null;
            String tablePropKey = null;

            if (loc.length != 2 && loc.length != 3) {
                System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 "
                        + "optional arguments but " + "was passed " + loc.length + " arguments");
                printAndExit(metaTool);
            }

            Path newPath = new Path(loc[0]);
            Path oldPath = new Path(loc[1]);

            URI oldURI = oldPath.toUri();
            URI newURI = newPath.toUri();

            if (line.hasOption("dryRun")) {
                isDryRun = true;
            }

            if (line.hasOption("serdePropKey")) {
                serdepropKey = line.getOptionValue("serdePropKey");
            }

            if (line.hasOption("tablePropKey")) {
                tablePropKey = line.getOptionValue("tablePropKey");
            }

            /*
             * validate input - Both new and old URI should contain valid host names and valid schemes.
             * port is optional in both the URIs since HDFS HA NN URI doesn't have a port.
             */
            if (oldURI.getHost() == null || newURI.getHost() == null) {
                System.err.println("HiveMetaTool:A valid host is required in both old-loc and new-loc");
            } else if (oldURI.getScheme() == null || newURI.getScheme() == null) {
                System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc");
            } else {
                metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, tablePropKey, isDryRun);
            }
        } else {
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not a valid standalone option");
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not a valid standalone option");
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not a valid standalone option");
                printAndExit(metaTool);
            } else {
                System.err.print("HiveMetaTool:Parsing failed.  Reason: Invalid arguments: ");
                for (String s : line.getArgs()) {
                    System.err.print(s + " ");
                }
                System.err.println();
            }
            printAndExit(metaTool);
        }
    } finally {
        metaTool.shutdownObjectStore();
    }
}