Example usage for org.apache.commons.cli CommandLine getArgList

List of usage examples for org.apache.commons.cli CommandLine getArgList

Introduction

In this page you can find the example usage for org.apache.commons.cli CommandLine getArgList.

Prototype

public List getArgList() 

Source Link

Document

Retrieve any left-over non-recognized options and arguments

Usage

From source file:org.apache.cassandra.tools.Shuffle.java

/**
 * Execute.//from ww  w  .  j  a v  a2s  .  co m
 *
 * @param args arguments passed on the command line
 * @throws Exception when face meets palm
 */
public static void main(String[] args) throws Exception {
    CommandLine cmd = null;
    try {
        cmd = processArguments(args);
    } catch (MissingArgumentException e) {
        System.err.println(e.getMessage());
        System.exit(1);
    }

    // Sub command argument.
    if (cmd.getArgList().size() < 1) {
        System.err.println("Missing sub-command argument.");
        printShuffleHelp();
        System.exit(1);
    }
    String subCommand = (String) (cmd.getArgList()).get(0);

    String hostName = (cmd.getOptionValue("host") != null) ? cmd.getOptionValue("host") : DEFAULT_HOST;
    String port = (cmd.getOptionValue("port") != null) ? cmd.getOptionValue("port")
            : Integer.toString(DEFAULT_JMX_PORT);
    String username = cmd.getOptionValue("username");
    String password = cmd.getOptionValue("password");
    String thriftHost = (cmd.getOptionValue("thrift-host") != null) ? cmd.getOptionValue("thrift-host")
            : hostName;
    String thriftPort = (cmd.getOptionValue("thrift-port") != null) ? cmd.getOptionValue("thrift-port")
            : "9160";
    String onlyDc = cmd.getOptionValue("only-dc");
    boolean thriftFramed = cmd.hasOption("thrift-framed") ? true : false;
    boolean andEnable = cmd.hasOption("and-enable") ? true : false;
    int portNum = -1, thriftPortNum = -1;

    // Parse JMX port number
    if (port != null) {
        try {
            portNum = Integer.parseInt(port);
        } catch (NumberFormatException ferr) {
            System.err.printf("%s is not a valid JMX port number.%n", port);
            System.exit(1);
        }
    } else
        portNum = DEFAULT_JMX_PORT;

    // Parse Thrift port number
    if (thriftPort != null) {
        try {
            thriftPortNum = Integer.parseInt(thriftPort);
        } catch (NumberFormatException ferr) {
            System.err.printf("%s is not a valid port number.%n", thriftPort);
            System.exit(1);
        }
    } else
        thriftPortNum = 9160;

    Shuffle shuffler = new Shuffle(hostName, portNum, thriftHost, thriftPortNum, thriftFramed, username,
            password);

    try {
        if (subCommand.equals("create"))
            shuffler.shuffle(andEnable, onlyDc);
        else if (subCommand.equals("ls"))
            shuffler.ls();
        else if (subCommand.startsWith("en"))
            shuffler.enable();
        else if (subCommand.startsWith("dis"))
            shuffler.disable();
        else if (subCommand.equals("clear"))
            shuffler.clear();
        else {
            System.err.println("Unknown subcommand: " + subCommand);
            printShuffleHelp();
            System.exit(1);
        }
    } catch (ShuffleError err) {
        shuffler.writeln(err);
        System.exit(1);
    } finally {
        shuffler.close();
    }

    System.exit(0);
}

From source file:org.apache.cocoon.Main.java

/**
 * The <code>main</code> method.
 *
 * @param args a <code>String[]</code> of arguments
 * @exception Exception if an error occurs
 *///  www  .  j av a 2 s.  c om
public static void main(String[] args) throws Exception {

    Main.setOptions();
    CommandLine line = new PosixParser().parse(options, args);
    listener = new OutputStreamListener(System.out);
    CocoonBean cocoon = new CocoonBean();
    cocoon.addListener(listener);

    if (line.hasOption(HELP_OPT)) {
        printUsage();
    } else if (line.hasOption(VERSION_OPT)) {
        printVersion();
    } else {
        String uriGroup = null;
        if (line.hasOption(URI_GROUP_NAME_OPT)) {
            uriGroup = line.getOptionValue(URI_GROUP_NAME_OPT);
        }

        String destDir = null;
        if (line.hasOption(XCONF_OPT)) {
            // destDir from command line overrides one in xconf file
            destDir = Main.processXConf(cocoon, line.getOptionValue(XCONF_OPT), destDir, uriGroup);
        }
        if (line.hasOption(DEST_DIR_OPT)) {
            destDir = line.getOptionValue(DEST_DIR_OPT);
        }

        if (line.hasOption(VERBOSE_OPT)) {
            cocoon.setVerbose(true);
        }
        if (line.hasOption(PRECOMPILE_ONLY_OPT)) {
            cocoon.setPrecompileOnly(true);
        }

        if (line.hasOption(WORK_DIR_OPT)) {
            String workDir = line.getOptionValue(WORK_DIR_OPT);
            if (workDir.length() == 0) {
                listener.messageGenerated(
                        "Careful, you must specify a work dir when using the -w/--workDir argument");
                System.exit(1);
            } else {
                cocoon.setWorkDir(line.getOptionValue(WORK_DIR_OPT));
            }
        }
        if (line.hasOption(CONTEXT_DIR_OPT)) {
            String contextDir = line.getOptionValue(CONTEXT_DIR_OPT);
            if (contextDir.length() == 0) {
                listener.messageGenerated(
                        "Careful, you must specify a configuration file when using the -c/--contextDir argument");
                System.exit(1);
            } else {
                cocoon.setContextDir(contextDir);
            }
        }
        if (line.hasOption(CONFIG_FILE_OPT)) {
            cocoon.setConfigFile(line.getOptionValue(CONFIG_FILE_OPT));
        }
        if (line.hasOption(LOG_KIT_OPT)) {
            cocoon.setLogKit(line.getOptionValue(LOG_KIT_OPT));
        }
        if (line.hasOption(LOGGER_OPT)) {
            cocoon.setLogger(line.getOptionValue(LOGGER_OPT));
        }
        if (line.hasOption(LOG_LEVEL_OPT)) {
            cocoon.setLogLevel(line.getOptionValue(LOG_LEVEL_OPT));
        }
        if (line.hasOption(AGENT_OPT)) {
            cocoon.setAgentOptions(line.getOptionValue(AGENT_OPT));
        }
        if (line.hasOption(ACCEPT_OPT)) {
            cocoon.setAcceptOptions(line.getOptionValue(ACCEPT_OPT));
        }
        if (line.hasOption(DEFAULT_FILENAME_OPT)) {
            cocoon.setDefaultFilename(line.getOptionValue(DEFAULT_FILENAME_OPT));
        }
        if (line.hasOption(BROKEN_LINK_FILE_OPT)) {
            listener.setReportFile(line.getOptionValue(BROKEN_LINK_FILE_OPT));
        }
        if (line.hasOption(FOLLOW_LINKS_OPT)) {
            cocoon.setFollowLinks(BooleanUtils.toBoolean(line.getOptionValue(FOLLOW_LINKS_OPT)));
        }
        if (line.hasOption(CONFIRM_EXTENSIONS_OPT)) {
            cocoon.setConfirmExtensions(
                    BooleanUtils.toBoolean(line.getOptionValue(CONFIRM_EXTENSIONS_OPT, "yes")));
        }
        if (line.hasOption(LOAD_CLASS_OPT)) {
            cocoon.addLoadedClasses(Arrays.asList(line.getOptionValues(LOAD_CLASS_OPT)));
        }
        if (line.hasOption(URI_FILE_OPT)) {
            cocoon.addTargets(BeanConfigurator.processURIFile(line.getOptionValue(URI_FILE_OPT)), destDir);
        }

        cocoon.addTargets(line.getArgList(), destDir);

        listener.messageGenerated(CocoonBean.getProlog());

        if (cocoon.getTargetCount() == 0 && cocoon.isPrecompileOnly()) {
            listener.messageGenerated("Please, specify at least one starting URI.");
            System.exit(1);
        }

        cocoon.initialize();
        cocoon.process();
        cocoon.dispose();

        listener.complete();

        int exitCode = (listener.isSuccessful() ? 0 : 1);
        System.exit(exitCode);
    }
}

From source file:org.apache.easyant.core.EasyAntMain.java

/**
 * Process command line arguments. When ant is started from Launcher, launcher-only arguments do not get passed
 * through to this routine.// w  w w. jav a  2s  .c  o m
 *
 * @since Ant 1.6
 */
private void processArgs(CommandLine line) {
    String searchForThis;
    PrintStream logTo = null;

    if (line.hasOption("help")) {
        printUsage();
        return;
    }
    if (easyAntConfiguration.getMsgOutputLevel() >= Project.MSG_VERBOSE || line.hasOption("version")) {
        printVersion();
        if (line.hasOption("version")) {
            return;
        }
    }
    if (line.hasOption("showMemoryDetails")) {
        easyAntConfiguration.setShowMemoryDetails(true);
    }
    if (line.hasOption("diagnostics")) {
        Diagnostics.doReport(System.out, easyAntConfiguration.getMsgOutputLevel());
        return;
    }
    if (line.hasOption("quiet")) {
        easyAntConfiguration.setMsgOutputLevel(Project.MSG_WARN);
    }
    if (line.hasOption("verbose")) {
        easyAntConfiguration.setMsgOutputLevel(Project.MSG_VERBOSE);
    }
    if (line.hasOption("debug")) {
        easyAntConfiguration.setMsgOutputLevel(Project.MSG_DEBUG);
    }
    if (line.hasOption("noinput")) {
        easyAntConfiguration.setAllowInput(false);
    }
    if (line.hasOption("logfile")) {
        try {
            File logFile = new File(line.getOptionValue("logfile"));
            logTo = new PrintStream(new FileOutputStream(logFile));
            isLogFileUsed = true;
        } catch (IOException ioe) {
            String msg = "Cannot write on the specified log file. "
                    + "Make sure the path exists and you have write " + "permissions.";
            throw new BuildException(msg);
        } catch (ArrayIndexOutOfBoundsException aioobe) {
            String msg = "You must specify a log file when " + "using the -log argument";
            throw new BuildException(msg);
        }
    }
    if (line.hasOption("buildmodule")) {
        File buildModule = new File(line.getOptionValue("buildmodule").replace('/', File.separatorChar));
        easyAntConfiguration.setBuildModule(buildModule);
    }
    if (line.hasOption("buildfile")) {
        File buildFile = new File(line.getOptionValue("buildfile").replace('/', File.separatorChar));
        easyAntConfiguration.setBuildFile(buildFile);
    }
    if (line.hasOption("buildconf")) {
        easyAntConfiguration.getActiveBuildConfigurations().add(line.getOptionValue("buildconf"));
    }

    File easyantConfFile = null;

    if (line.hasOption("configfile")) {
        easyantConfFile = new File(line.getOptionValue("configfile").replace('/', File.separatorChar));
    } else {
        // if no command line switch is specified check the default location

        File easyantHome = new File(
                System.getProperty(EasyAntMagicNames.EASYANT_HOME).replace('/', File.separatorChar));
        File defaultGlobalEasyantConfFile = new File(easyantHome,
                EasyAntConstants.DEFAULT_GLOBAL_EASYANT_CONF_FILE);

        if (defaultGlobalEasyantConfFile.exists()) {
            easyantConfFile = defaultGlobalEasyantConfFile;
        }
    }

    if (easyantConfFile != null) {
        try {
            easyAntConfiguration = EasyantConfigurationFactory.getInstance()
                    .createConfigurationFromFile(easyAntConfiguration, easyantConfFile.toURI().toURL());
        } catch (Exception e) {
            throw new BuildException(e);
        }
    }

    if (line.hasOption("listener")) {
        easyAntConfiguration.getListeners().add(line.getOptionValue("listener"));
    }
    if (line.hasOption("D")) {
        easyAntConfiguration.getDefinedProps().putAll(line.getOptionProperties("D"));
    }
    if (line.hasOption("logger")) {
        if (easyAntConfiguration.getLoggerClassname() != null) {
            throw new BuildException("Only one logger class may be specified.");
        }
        easyAntConfiguration.setLoggerClassname(line.getOptionValue("logger"));
    }
    if (line.hasOption("inputhandler")) {
        if (easyAntConfiguration.getInputHandlerClassname() != null) {
            throw new BuildException("Only one input handler class may " + "be specified.");
        }
        easyAntConfiguration.setInputHandlerClassname(line.getOptionValue("inputhandler"));
    }
    if (line.hasOption("emacs")) {
        easyAntConfiguration.setEmacsMode(true);
    }
    if (line.hasOption("projecthelp")) {
        // set the flag to display the targets and quit
        projectHelp = true;
    }
    if (line.hasOption("find")) {
        // eat up next arg if present, default to module.ivy
        if (line.getOptionValues("find").length > 0) {
            searchForThis = line.getOptionValue("find");

        } else {
            searchForThis = EasyAntConstants.DEFAULT_BUILD_MODULE;
        }
        easyAntConfiguration.setBuildModule(new File(searchForThis));
        easyAntConfiguration.setBuildModuleLookupEnabled(true);
    }
    if (line.hasOption("propertyfile")) {
        propertyFiles.add(line.getOptionValue("propertyfile"));
    }
    if (line.hasOption("keep-going")) {
        easyAntConfiguration.setKeepGoingMode(true);
    }
    if (line.hasOption("offline")) {
        easyAntConfiguration.setOffline(true);
    }
    if (line.hasOption("nice")) {
        easyAntConfiguration.setThreadPriority(Integer.decode(line.getOptionValue("nice")));

        if (easyAntConfiguration.getThreadPriority() < Thread.MIN_PRIORITY
                || easyAntConfiguration.getThreadPriority() > Thread.MAX_PRIORITY) {
            throw new BuildException("Niceness value is out of the range 1-10");
        }
    }
    if (line.hasOption("autoproxy")) {
        easyAntConfiguration.setProxy(true);
    }
    if (!line.getArgList().isEmpty()) {
        for (Object o : line.getArgList()) {
            String target = (String) o;
            easyAntConfiguration.getTargets().add(target);
        }
    }

    // Load the property files specified by -propertyfile
    loadPropertyFiles();

    if (logTo != null) {
        easyAntConfiguration.setOut(logTo);
        easyAntConfiguration.setErr(logTo);
        System.setOut(easyAntConfiguration.getOut());
        System.setErr(easyAntConfiguration.getErr());
    }
    readyToRun = true;
}

From source file:org.apache.hadoop.hbase.client.HBaseFsck.java

/**
 * Main program//from   w  w  w  .  j  a v  a  2s  .  com
 *
 * @param args
 * @throws ParseException
 */
public static void main(String[] args)
        throws IOException, MasterNotRunningException, InterruptedException, ParseException {

    Options opt = new Options();
    opt.addOption(OptionBuilder.withArgName("property=value").hasArg()
            .withDescription("Override HBase Configuration Settings").create("D"));
    opt.addOption(OptionBuilder.withArgName("timeInSeconds").hasArg()
            .withDescription("Ignore regions with metadata updates in the last {timeInSeconds}.")
            .withType(PatternOptionBuilder.NUMBER_VALUE).create("timelag"));
    opt.addOption(OptionBuilder.withArgName("timeInSeconds").hasArg()
            .withDescription("Stop scan jobs after a fixed time & analyze existing data.")
            .withType(PatternOptionBuilder.NUMBER_VALUE).create("timeout"));
    opt.addOption("fix", false, "Try to fix some of the errors.");
    opt.addOption("y", false, "Do not prompt for reconfirmation from users on fix.");
    opt.addOption("w", false, "Try to fix warnings as well as errors.");
    opt.addOption("summary", false, "Print only summary of the tables and status.");
    opt.addOption("detail", false, "Display full report of all regions.");
    opt.addOption("checkRegionInfo", false, "Check if .regioninfo is consistent with .META.");
    opt.addOption("h", false, "Display this help");
    CommandLine cmd = new GnuParser().parse(opt, args);

    // any unknown args or -h
    if (!cmd.getArgList().isEmpty() || cmd.hasOption("h")) {
        new HelpFormatter().printHelp("hbck", opt);
        return;
    }

    Configuration conf = HBaseConfiguration.create();
    conf.set("fs.defaultFS", conf.get("hbase.rootdir"));

    if (cmd.hasOption("D")) {
        for (String confOpt : cmd.getOptionValues("D")) {
            String[] kv = confOpt.split("=", 2);
            if (kv.length == 2) {
                conf.set(kv[0], kv[1]);
                LOG.debug("-D configuration override: " + kv[0] + "=" + kv[1]);
            } else {
                throw new ParseException("-D option format invalid: " + confOpt);
            }
        }
    }
    if (cmd.hasOption("timeout")) {
        Object timeout = cmd.getParsedOptionValue("timeout");
        if (timeout instanceof Long) {
            conf.setLong(HConstants.HBASE_RPC_TIMEOUT_KEY, ((Long) timeout).longValue() * 1000);
        } else {
            throw new ParseException("-timeout needs a long value.");
        }
    }

    // create a fsck object
    HBaseFsck fsck = new HBaseFsck(conf);
    fsck.setTimeLag(HBaseFsckRepair.getEstimatedFixTime(conf));

    if (cmd.hasOption("details")) {
        fsck.displayFullReport();
    }
    if (cmd.hasOption("timelag")) {
        Object timelag = cmd.getParsedOptionValue("timelag");
        if (timelag instanceof Long) {
            fsck.setTimeLag(((Long) timelag).longValue() * 1000);
        } else {
            throw new ParseException("-timelag needs a long value.");
        }
    }
    if (cmd.hasOption("fix")) {
        fsck.setFixState(FixState.ERROR);
    }
    if (cmd.hasOption("w")) {
        fsck.setFixState(FixState.ALL);
    }
    if (cmd.hasOption("y")) {
        fsck.setPromptResponse(true);
    }
    if (cmd.equals("summary")) {
        fsck.setSummary();
    }
    if (cmd.hasOption("checkRegionInfo")) {
        checkRegionInfo = true;
    }

    int code = -1;
    try {
        // do the real work of fsck
        code = fsck.doWork();
        // If we have tried to fix the HBase state, run fsck again
        // to see if we have fixed our problems
        if (fsck.shouldRerun()) {
            fsck.setFixState(FixState.NONE);
            long fixTime = HBaseFsckRepair.getEstimatedFixTime(conf);
            if (fixTime > 0) {
                LOG.info("Waiting " + StringUtils.formatTime(fixTime)
                        + " before checking to see if fixes worked...");
                Thread.sleep(fixTime);
            }
            code = fsck.doWork();
        }
    } catch (InterruptedException ie) {
        LOG.info("HBCK was interrupted by user. Exiting...");
        code = -1;
    }

    Runtime.getRuntime().exit(code);
}

From source file:org.apache.hadoop.hbase.master.HMasterCommandLine.java

public int run(String args[]) throws Exception {
    Options opt = new Options();
    opt.addOption("localRegionServers", true,
            "RegionServers to start in master process when running standalone");
    opt.addOption("masters", true, "Masters to start in this process");
    opt.addOption("minRegionServers", true, "Minimum RegionServers needed to host user tables");
    opt.addOption("backup", false, "Do not try to become HMaster until the primary fails");

    CommandLine cmd;
    try {//from   w ww  .  j  a v  a  2s.  c  o  m
        cmd = new GnuParser().parse(opt, args);
    } catch (ParseException e) {
        LOG.error("Could not parse: ", e);
        usage(null);
        return 1;
    }

    if (cmd.hasOption("minRegionServers")) {
        String val = cmd.getOptionValue("minRegionServers");
        getConf().setInt("hbase.regions.server.count.min", Integer.valueOf(val));
        LOG.debug("minRegionServers set to " + val);
    }

    // minRegionServers used to be minServers.  Support it too.
    if (cmd.hasOption("minServers")) {
        String val = cmd.getOptionValue("minServers");
        getConf().setInt("hbase.regions.server.count.min", Integer.valueOf(val));
        LOG.debug("minServers set to " + val);
    }

    // check if we are the backup master - override the conf if so
    if (cmd.hasOption("backup")) {
        getConf().setBoolean(HConstants.MASTER_TYPE_BACKUP, true);
    }

    // How many regionservers to startup in this process (we run regionservers in same process as
    // master when we are in local/standalone mode. Useful testing)
    if (cmd.hasOption("localRegionServers")) {
        String val = cmd.getOptionValue("localRegionServers");
        getConf().setInt("hbase.regionservers", Integer.valueOf(val));
        LOG.debug("localRegionServers set to " + val);
    }
    // How many masters to startup inside this process; useful testing
    if (cmd.hasOption("masters")) {
        String val = cmd.getOptionValue("masters");
        getConf().setInt("hbase.masters", Integer.valueOf(val));
        LOG.debug("masters set to " + val);
    }

    @SuppressWarnings("unchecked")
    List<String> remainingArgs = cmd.getArgList();
    if (remainingArgs.size() != 1) {
        usage(null);
        return 1;
    }

    String command = remainingArgs.get(0);

    if ("start".equals(command)) {
        return startMaster();
    } else if ("stop".equals(command)) {
        return stopMaster();
    } else if ("clear".equals(command)) {
        return (ZNodeClearer.clear(getConf()) ? 0 : 1);
    } else {
        usage("Invalid command: " + command);
        return 1;
    }
}

From source file:org.apache.hadoop.hbase.regionserver.HFileReadWriteTest.java

@SuppressWarnings("unchecked")
public boolean parseOptions(String args[]) {

    Options options = new Options();
    options.addOption(OUTPUT_DIR_OPTION, true, "Output directory" + Workload.MERGE.onlyUsedFor());
    options.addOption(COMPRESSION_OPTION, true, " Compression type, one of "
            + Arrays.toString(Compression.Algorithm.values()) + Workload.MERGE.onlyUsedFor());
    options.addOption(BLOOM_FILTER_OPTION, true,
            "Bloom filter type, one of " + Arrays.toString(BloomType.values()) + Workload.MERGE.onlyUsedFor());
    options.addOption(BLOCK_SIZE_OPTION, true, "HFile block size" + Workload.MERGE.onlyUsedFor());
    options.addOption(DURATION_OPTION, true, "The amount of time to run the " + "random read workload for"
            + Workload.RANDOM_READS.onlyUsedFor());
    options.addOption(NUM_THREADS_OPTION, true,
            "The number of random " + "reader threads" + Workload.RANDOM_READS.onlyUsedFor());
    options.addOption(NUM_THREADS_OPTION, true,
            "The number of random " + "reader threads" + Workload.RANDOM_READS.onlyUsedFor());
    options.addOption(LoadTestTool.OPT_DATA_BLOCK_ENCODING, true, LoadTestTool.OPT_DATA_BLOCK_ENCODING_USAGE);
    options.addOptionGroup(Workload.getOptionGroup());

    if (args.length == 0) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(HFileReadWriteTest.class.getSimpleName(), options, true);
        return false;
    }//  w  w  w . j a  v  a2 s  .co m

    CommandLineParser parser = new PosixParser();
    CommandLine cmdLine;
    try {
        cmdLine = parser.parse(options, args);
    } catch (ParseException ex) {
        LOG.error(ex);
        return false;
    }

    workload = Workload.fromCmdLine(cmdLine);
    if (workload == null)
        return false;

    inputFileNames = (List<String>) cmdLine.getArgList();

    if (inputFileNames.size() == 0) {
        LOG.error("No input file names specified");
        return false;
    }

    if (inputFileNames.size() < workload.minNumInputFiles) {
        LOG.error("Too few input files: at least " + workload.minNumInputFiles + " required");
        return false;
    }

    if (inputFileNames.size() > workload.maxNumInputFiles) {
        LOG.error("Too many input files: at most " + workload.minNumInputFiles + " allowed");
        return false;
    }

    if (cmdLine.hasOption(COMPRESSION_OPTION)) {
        compression = Compression.Algorithm.valueOf(cmdLine.getOptionValue(COMPRESSION_OPTION));
    }

    if (cmdLine.hasOption(BLOOM_FILTER_OPTION)) {
        bloomType = BloomType.valueOf(cmdLine.getOptionValue(BLOOM_FILTER_OPTION));
    }

    if (cmdLine.hasOption(LoadTestTool.OPT_DATA_BLOCK_ENCODING)) {
        dataBlockEncoding = DataBlockEncoding
                .valueOf(cmdLine.getOptionValue(LoadTestTool.OPT_DATA_BLOCK_ENCODING));
    }

    blockSize = conf.getInt("hfile.min.blocksize.size", 65536);
    if (cmdLine.hasOption(BLOCK_SIZE_OPTION))
        blockSize = Integer.valueOf(cmdLine.getOptionValue(BLOCK_SIZE_OPTION));

    if (workload == Workload.MERGE) {
        String outputDirStr = cmdLine.getOptionValue(OUTPUT_DIR_OPTION);
        if (outputDirStr == null) {
            LOG.error("Output directory is not specified");
            return false;
        }
        outputDir = new Path(outputDirStr);
        // Will be checked for existence in validateConfiguration.
    }

    if (workload == Workload.RANDOM_READS) {
        if (!requireOptions(cmdLine, new String[] { DURATION_OPTION, NUM_THREADS_OPTION })) {
            return false;
        }

        durationSec = Integer.parseInt(cmdLine.getOptionValue(DURATION_OPTION));
        numReadThreads = Integer.parseInt(cmdLine.getOptionValue(NUM_THREADS_OPTION));
    }

    Collections.sort(inputFileNames);

    return true;
}

From source file:org.apache.hadoop.hbase.regionserver.wal.HLogPrettyPrinter.java

/**
 * Pass one or more log file names and formatting options and it will dump out
 * a text version of the contents on <code>stdout</code>.
 * /*from www  . j a  v a2 s  .  co  m*/
 * @param args
 *          Command line arguments
 * @throws IOException
 *           Thrown upon file system errors etc.
 * @throws ParseException
 *           Thrown if command-line parsing fails.
 */
public static void run(String[] args) throws IOException {
    // create options
    Options options = new Options();
    options.addOption("h", "help", false, "Output help message");
    options.addOption("j", "json", false, "Output JSON");
    options.addOption("p", "printvals", false, "Print values");
    options.addOption("r", "region", true, "Region to filter by. Pass region name; e.g. 'hbase:meta,,1'");
    options.addOption("s", "sequence", true, "Sequence to filter by. Pass sequence number.");
    options.addOption("w", "row", true, "Row to filter by. Pass row name.");

    HLogPrettyPrinter printer = new HLogPrettyPrinter();
    CommandLineParser parser = new PosixParser();
    List files = null;
    try {
        CommandLine cmd = parser.parse(options, args);
        files = cmd.getArgList();
        if (files.size() == 0 || cmd.hasOption("h")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("HLog <filename...>", options, true);
            System.exit(-1);
        }
        // configure the pretty printer using command line options
        if (cmd.hasOption("p"))
            printer.enableValues();
        if (cmd.hasOption("j"))
            printer.enableJSON();
        if (cmd.hasOption("r"))
            printer.setRegionFilter(cmd.getOptionValue("r"));
        if (cmd.hasOption("s"))
            printer.setSequenceFilter(Long.parseLong(cmd.getOptionValue("s")));
        if (cmd.hasOption("w"))
            printer.setRowFilter(cmd.getOptionValue("w"));
    } catch (ParseException e) {
        e.printStackTrace();
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("HFile filename(s) ", options, true);
        System.exit(-1);
    }
    // get configuration, file system, and process the given files
    Configuration conf = HBaseConfiguration.create();
    FSUtils.setFsDefault(conf, FSUtils.getRootDir(conf));

    // begin output
    printer.beginPersistentOutput();
    for (Object f : files) {
        Path file = new Path((String) f);
        FileSystem fs = file.getFileSystem(conf);
        if (!fs.exists(file)) {
            System.err.println("ERROR, file doesnt exist: " + file);
            return;
        }
        printer.processFile(conf, file);
    }
    printer.endPersistentOutput();
}

From source file:org.apache.hadoop.hbase.rest.Main.java

/**
 * The main method for the HBase rest server.
 * @param args command-line arguments//ww w  .ja  v  a  2 s . c o  m
 * @throws Exception exception
 */
public static void main(String[] args) throws Exception {
    Log LOG = LogFactory.getLog("RESTServer");

    VersionInfo.logVersion();
    Configuration conf = HBaseConfiguration.create();
    RESTServlet servlet = RESTServlet.getInstance(conf);

    Options options = new Options();
    options.addOption("p", "port", true, "Port to bind to [default: 8080]");
    options.addOption("ro", "readonly", false,
            "Respond only to GET HTTP " + "method requests [default: false]");

    CommandLine commandLine = null;
    try {
        commandLine = new PosixParser().parse(options, args);
    } catch (ParseException e) {
        LOG.error("Could not parse: ", e);
        printUsageAndExit(options, -1);
    }

    // check for user-defined port setting, if so override the conf
    if (commandLine != null && commandLine.hasOption("port")) {
        String val = commandLine.getOptionValue("port");
        servlet.getConfiguration().setInt("hbase.rest.port", Integer.valueOf(val));
        LOG.debug("port set to " + val);
    }

    // check if server should only process GET requests, if so override the conf
    if (commandLine != null && commandLine.hasOption("readonly")) {
        servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
        LOG.debug("readonly set to true");
    }

    @SuppressWarnings("unchecked")
    List<String> remainingArgs = commandLine != null ? commandLine.getArgList() : new ArrayList<String>();
    if (remainingArgs.size() != 1) {
        printUsageAndExit(options, 1);
    }

    String command = remainingArgs.get(0);
    if ("start".equals(command)) {
        // continue and start container
    } else if ("stop".equals(command)) {
        System.exit(1);
    } else {
        printUsageAndExit(options, 1);
    }

    // set up the Jersey servlet container for Jetty
    ServletHolder sh = new ServletHolder(ServletContainer.class);
    sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
            ResourceConfig.class.getCanonicalName());
    sh.setInitParameter("com.sun.jersey.config.property.packages", "jetty");

    // set up Jetty and run the embedded server

    Server server = new Server();

    Connector connector = new SelectChannelConnector();
    connector.setPort(servlet.getConfiguration().getInt("hbase.rest.port", 8080));
    connector.setHost(servlet.getConfiguration().get("hbase.rest.host", "0.0.0.0"));

    server.addConnector(connector);

    // Set the default max thread number to 100 to limit
    // the number of concurrent requests so that REST server doesn't OOM easily.
    // Jetty set the default max thread number to 250, if we don't set it.
    //
    // Our default min thread number 2 is the same as that used by Jetty.
    int maxThreads = servlet.getConfiguration().getInt("hbase.rest.threads.max", 100);
    int minThreads = servlet.getConfiguration().getInt("hbase.rest.threads.min", 2);
    QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads);
    threadPool.setMinThreads(minThreads);
    server.setThreadPool(threadPool);

    server.setSendServerVersion(false);
    server.setSendDateHeader(false);
    server.setStopAtShutdown(true);
    // set up context
    Context context = new Context(server, "/", Context.SESSIONS);
    context.addServlet(sh, "/*");
    context.addFilter(GzipFilter.class, "/*", 0);

    // login the server principal (if using secure Hadoop)   
    if (User.isSecurityEnabled() && User.isHBaseSecurityEnabled(conf)) {
        String machineName = Strings
                .domainNamePointerToHostName(DNS.getDefaultHost(conf.get("hbase.rest.dns.interface", "default"),
                        conf.get("hbase.rest.dns.nameserver", "default")));
        User.login(conf, "hbase.rest.keytab.file", "hbase.rest.kerberos.principal", machineName);
    }

    // start server
    server.start();
    server.join();
}

From source file:org.apache.hadoop.hbase.rest.RESTServer.java

/**
 * The main method for the HBase rest server.
 * @param args command-line arguments//from www  . jav  a  2 s.  co  m
 * @throws Exception exception
 */
public static void main(String[] args) throws Exception {
    Log LOG = LogFactory.getLog("RESTServer");

    VersionInfo.logVersion();
    FilterHolder authFilter = null;
    Configuration conf = HBaseConfiguration.create();
    Class<? extends ServletContainer> containerClass = ServletContainer.class;
    UserProvider userProvider = UserProvider.instantiate(conf);
    // login the server principal (if using secure Hadoop)
    if (userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled()) {
        String machineName = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
                conf.get(REST_DNS_INTERFACE, "default"), conf.get(REST_DNS_NAMESERVER, "default")));
        String keytabFilename = conf.get(REST_KEYTAB_FILE);
        Preconditions.checkArgument(keytabFilename != null && !keytabFilename.isEmpty(),
                REST_KEYTAB_FILE + " should be set if security is enabled");
        String principalConfig = conf.get(REST_KERBEROS_PRINCIPAL);
        Preconditions.checkArgument(principalConfig != null && !principalConfig.isEmpty(),
                REST_KERBEROS_PRINCIPAL + " should be set if security is enabled");
        userProvider.login(REST_KEYTAB_FILE, REST_KERBEROS_PRINCIPAL, machineName);
        if (conf.get(REST_AUTHENTICATION_TYPE) != null) {
            containerClass = RESTServletContainer.class;
            authFilter = new FilterHolder();
            authFilter.setClassName(AuthFilter.class.getName());
            authFilter.setName("AuthenticationFilter");
        }
    }

    UserGroupInformation realUser = userProvider.getCurrent().getUGI();
    RESTServlet servlet = RESTServlet.getInstance(conf, realUser);

    Options options = new Options();
    options.addOption("p", "port", true, "Port to bind to [default: 8080]");
    options.addOption("ro", "readonly", false,
            "Respond only to GET HTTP " + "method requests [default: false]");
    options.addOption(null, "infoport", true, "Port for web UI");

    CommandLine commandLine = null;
    try {
        commandLine = new PosixParser().parse(options, args);
    } catch (ParseException e) {
        LOG.error("Could not parse: ", e);
        printUsageAndExit(options, -1);
    }

    // check for user-defined port setting, if so override the conf
    if (commandLine != null && commandLine.hasOption("port")) {
        String val = commandLine.getOptionValue("port");
        servlet.getConfiguration().setInt("hbase.rest.port", Integer.valueOf(val));
        LOG.debug("port set to " + val);
    }

    // check if server should only process GET requests, if so override the conf
    if (commandLine != null && commandLine.hasOption("readonly")) {
        servlet.getConfiguration().setBoolean("hbase.rest.readonly", true);
        LOG.debug("readonly set to true");
    }

    // check for user-defined info server port setting, if so override the conf
    if (commandLine != null && commandLine.hasOption("infoport")) {
        String val = commandLine.getOptionValue("infoport");
        servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.valueOf(val));
        LOG.debug("Web UI port set to " + val);
    }

    @SuppressWarnings("unchecked")
    List<String> remainingArgs = commandLine != null ? commandLine.getArgList() : new ArrayList<String>();
    if (remainingArgs.size() != 1) {
        printUsageAndExit(options, 1);
    }

    String command = remainingArgs.get(0);
    if ("start".equals(command)) {
        // continue and start container
    } else if ("stop".equals(command)) {
        System.exit(1);
    } else {
        printUsageAndExit(options, 1);
    }

    // set up the Jersey servlet container for Jetty
    ServletHolder sh = new ServletHolder(containerClass);
    sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
            ResourceConfig.class.getCanonicalName());
    sh.setInitParameter("com.sun.jersey.config.property.packages", "jetty");
    // The servlet holder below is instantiated to only handle the case
    // of the /status/cluster returning arrays of nodes (live/dead). Without
    // this servlet holder, the problem is that the node arrays in the response
    // are collapsed to single nodes. We want to be able to treat the
    // node lists as POJO in the response to /status/cluster servlet call,
    // but not change the behavior for any of the other servlets
    // Hence we don't use the servlet holder for all servlets / paths
    ServletHolder shPojoMap = new ServletHolder(containerClass);
    @SuppressWarnings("unchecked")
    Map<String, String> shInitMap = sh.getInitParameters();
    for (Entry<String, String> e : shInitMap.entrySet()) {
        shPojoMap.setInitParameter(e.getKey(), e.getValue());
    }
    shPojoMap.setInitParameter(JSONConfiguration.FEATURE_POJO_MAPPING, "true");

    // set up Jetty and run the embedded server

    Server server = new Server();

    Connector connector = new SelectChannelConnector();
    if (conf.getBoolean(REST_SSL_ENABLED, false)) {
        SslSelectChannelConnector sslConnector = new SslSelectChannelConnector();
        String keystore = conf.get(REST_SSL_KEYSTORE_STORE);
        String password = conf.get(REST_SSL_KEYSTORE_PASSWORD);
        String keyPassword = conf.get(REST_SSL_KEYSTORE_KEYPASSWORD, password);
        sslConnector.setKeystore(keystore);
        sslConnector.setPassword(password);
        sslConnector.setKeyPassword(keyPassword);
        connector = sslConnector;
    }
    connector.setPort(servlet.getConfiguration().getInt("hbase.rest.port", 8080));
    connector.setHost(servlet.getConfiguration().get("hbase.rest.host", "0.0.0.0"));

    server.addConnector(connector);

    // Set the default max thread number to 100 to limit
    // the number of concurrent requests so that REST server doesn't OOM easily.
    // Jetty set the default max thread number to 250, if we don't set it.
    //
    // Our default min thread number 2 is the same as that used by Jetty.
    int maxThreads = servlet.getConfiguration().getInt("hbase.rest.threads.max", 100);
    int minThreads = servlet.getConfiguration().getInt("hbase.rest.threads.min", 2);
    QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads);
    threadPool.setMinThreads(minThreads);
    server.setThreadPool(threadPool);

    server.setSendServerVersion(false);
    server.setSendDateHeader(false);
    server.setStopAtShutdown(true);
    // set up context
    Context context = new Context(server, "/", Context.SESSIONS);
    context.addServlet(shPojoMap, "/status/cluster");
    context.addServlet(sh, "/*");
    if (authFilter != null) {
        context.addFilter(authFilter, "/*", 1);
    }

    // Load filters from configuration.
    String[] filterClasses = servlet.getConfiguration().getStrings(FILTER_CLASSES,
            ArrayUtils.EMPTY_STRING_ARRAY);
    for (String filter : filterClasses) {
        filter = filter.trim();
        context.addFilter(Class.forName(filter), "/*", 0);
    }
    HttpServerUtil.constrainHttpMethods(context);

    // Put up info server.
    int port = conf.getInt("hbase.rest.info.port", 8085);
    if (port >= 0) {
        conf.setLong("startcode", System.currentTimeMillis());
        String a = conf.get("hbase.rest.info.bindAddress", "0.0.0.0");
        InfoServer infoServer = new InfoServer("rest", a, port, false, conf);
        infoServer.setAttribute("hbase.conf", conf);
        infoServer.start();
    }

    // start server
    server.start();
    server.join();
}

From source file:org.apache.hadoop.hbase.test.IntegrationTestWithCellVisibilityLoadAndVerify.java

@Override
protected void processOptions(CommandLine cmd) {
    List args = cmd.getArgList();
    if (args.size() > 0) {
        usage();/*w  w w  .j a  va 2 s.c om*/
        throw new RuntimeException("No args expected.");
    }
    // We always want loadAndVerify action
    args.add("loadAndVerify");
    super.processOptions(cmd);
}