Example usage for org.apache.commons.cli OptionBuilder withDescription

List of usage examples for org.apache.commons.cli OptionBuilder withDescription

Introduction

In this page you can find the example usage for org.apache.commons.cli OptionBuilder withDescription.

Prototype

public static OptionBuilder withDescription(String newDescription) 

Source Link

Document

The next Option created will have the specified description

Usage

From source file:org.apache.sqoop.shell.UpdateJobFunction.java

@SuppressWarnings("static-access")
public UpdateJobFunction() {
    this.addOption(OptionBuilder.withDescription(resourceString(Constants.RES_PROMPT_JOB_ID))
            .withLongOpt(Constants.OPT_JID).isRequired().hasArg().create(Constants.OPT_JID_CHAR));
}

From source file:org.apache.sqoop.shell.UpdateLinkFunction.java

@SuppressWarnings("static-access")
public UpdateLinkFunction() {
    this.addOption(OptionBuilder.withDescription(resourceString(Constants.RES_PROMPT_LINK_ID))
            .withLongOpt(Constants.OPT_LID).isRequired().hasArg().create(Constants.OPT_LID_CHAR));
}

From source file:org.apache.sqoop.tool.BaseSqoopTool.java

/**
 * @return RelatedOptions used by most/all Sqoop tools.
 *///w ww . ja va  2s  . c  om
protected RelatedOptions getCommonOptions() {
    // Connection args (common)
    RelatedOptions commonOpts = new RelatedOptions("Common arguments");
    commonOpts.addOption(OptionBuilder.withArgName("jdbc-uri").hasArg()
            .withDescription("Specify JDBC connect string").withLongOpt(CONNECT_STRING_ARG).create());
    commonOpts.addOption(OptionBuilder.withArgName("class-name").hasArg()
            .withDescription("Specify connection manager class name").withLongOpt(CONN_MANAGER_CLASS_NAME)
            .create());
    commonOpts.addOption(OptionBuilder.withArgName("properties-file").hasArg()
            .withDescription("Specify connection parameters file").withLongOpt(CONNECT_PARAM_FILE).create());
    commonOpts.addOption(OptionBuilder.withArgName("class-name").hasArg()
            .withDescription("Manually specify JDBC driver class to use").withLongOpt(DRIVER_ARG).create());
    commonOpts.addOption(OptionBuilder.withArgName("username").hasArg()
            .withDescription("Set authentication username").withLongOpt(USERNAME_ARG).create());
    commonOpts.addOption(OptionBuilder.withArgName("password").hasArg()
            .withDescription("Set authentication password").withLongOpt(PASSWORD_ARG).create());
    commonOpts
            .addOption(OptionBuilder.withDescription("Read password from console").create(PASSWORD_PROMPT_ARG));

    commonOpts.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("Override $HADOOP_HOME")
            .withLongOpt(HADOOP_HOME_ARG).create());

    // misc (common)
    commonOpts.addOption(OptionBuilder.withDescription("Print more information while working")
            .withLongOpt(VERBOSE_ARG).create());
    commonOpts.addOption(
            OptionBuilder.withDescription("Print usage instructions").withLongOpt(HELP_ARG).create());

    return commonOpts;
}

From source file:org.apache.sqoop.tool.BaseSqoopTool.java

/**
 * @param explicitHiveImport true if the user has an explicit --hive-import
 * available, or false if this is implied by the tool.
 * @return options governing interaction with Hive
 *///  w w  w .  ja va2s .com
protected RelatedOptions getHiveOptions(boolean explicitHiveImport) {
    RelatedOptions hiveOpts = new RelatedOptions("Hive arguments");
    if (explicitHiveImport) {
        hiveOpts.addOption(OptionBuilder
                .withDescription(
                        "Import tables into Hive " + "(Uses Hive's default delimiters if none are set.)")
                .withLongOpt(HIVE_IMPORT_ARG).create());
    }

    hiveOpts.addOption(OptionBuilder.withArgName("dir").hasArg().withDescription("Override $HIVE_HOME")
            .withLongOpt(HIVE_HOME_ARG).create());
    hiveOpts.addOption(OptionBuilder.withDescription("Overwrite existing data in the Hive table")
            .withLongOpt(HIVE_OVERWRITE_ARG).create());
    hiveOpts.addOption(OptionBuilder.withDescription("Fail if the target hive table exists")
            .withLongOpt(CREATE_HIVE_TABLE_ARG).create());
    hiveOpts.addOption(OptionBuilder.withArgName("table-name").hasArg()
            .withDescription("Sets the table name to use when importing to hive").withLongOpt(HIVE_TABLE_ARG)
            .create());
    hiveOpts.addOption(OptionBuilder
            .withDescription(
                    "Drop Hive record \\0x01 and row delimiters " + "(\\n\\r) from imported string fields")
            .withLongOpt(HIVE_DROP_DELIMS_ARG).create());
    hiveOpts.addOption(OptionBuilder.hasArg()
            .withDescription("Replace Hive record \\0x01 and row delimiters "
                    + "(\\n\\r) from imported string fields with user-defined string")
            .withLongOpt(HIVE_DELIMS_REPLACEMENT_ARG).create());
    hiveOpts.addOption(OptionBuilder.withArgName("partition-key").hasArg()
            .withDescription("Sets the partition key to use when importing to hive")
            .withLongOpt(HIVE_PARTITION_KEY_ARG).create());
    hiveOpts.addOption(OptionBuilder.withArgName("partition-value").hasArg()
            .withDescription("Sets the partition value to use when importing " + "to hive")
            .withLongOpt(HIVE_PARTITION_VALUE_ARG).create());
    hiveOpts.addOption(
            OptionBuilder.hasArg().withDescription("Override mapping for specific column to hive" + " types.")
                    .withLongOpt(MAP_COLUMN_HIVE).create());

    return hiveOpts;
}

From source file:org.apache.sqoop.tool.ExportTool.java

/**
 * Construct the set of options that control exports.
 * @return the RelatedOptions that can be used to parse the export
 * arguments.//from   w w  w.j  ava  2 s .  co  m
 */
protected RelatedOptions getExportOptions() {
    RelatedOptions exportOpts = new RelatedOptions("Export control arguments");

    exportOpts.addOption(
            OptionBuilder.withDescription("Use direct export fast path").withLongOpt(DIRECT_ARG).create());
    exportOpts.addOption(OptionBuilder.withArgName("table-name").hasArg().withDescription("Table to populate")
            .withLongOpt(TABLE_ARG).create());
    exportOpts.addOption(OptionBuilder.withArgName("col,col,col...").hasArg()
            .withDescription("Columns to export to table").withLongOpt(COLUMNS_ARG).create());
    exportOpts.addOption(
            OptionBuilder.withArgName("n").hasArg().withDescription("Use 'n' map tasks to export in parallel")
                    .withLongOpt(NUM_MAPPERS_ARG).create(NUM_MAPPERS_SHORT_ARG));
    exportOpts.addOption(OptionBuilder.withArgName("dir").hasArg()
            .withDescription("HDFS source path for the export").withLongOpt(EXPORT_PATH_ARG).create());
    exportOpts.addOption(OptionBuilder.withArgName("key").hasArg()
            .withDescription("Update records by specified key column").withLongOpt(UPDATE_KEY_ARG).create());
    exportOpts.addOption(OptionBuilder.withArgName("table-name").hasArg()
            .withDescription("Intermediate staging table").withLongOpt(STAGING_TABLE_ARG).create());
    exportOpts.addOption(
            OptionBuilder.withDescription("Indicates that any data in " + "staging table can be deleted")
                    .withLongOpt(CLEAR_STAGING_TABLE_ARG).create());
    exportOpts.addOption(
            OptionBuilder.withDescription("Indicates underlying statements " + "to be executed in batch mode")
                    .withLongOpt(BATCH_ARG).create());
    exportOpts.addOption(OptionBuilder.withArgName("mode").hasArg()
            .withDescription("Specifies how updates are performed when "
                    + "new rows are found with non-matching keys in database")
            .withLongOpt(UPDATE_MODE_ARG).create());

    return exportOpts;
}

From source file:org.apache.sqoop.tool.ImportTool.java

/**
 * Construct the set of options that control imports, either of one
 * table or a batch of tables./*from   w  w w.j  av a  2s. co  m*/
 * @return the RelatedOptions that can be used to parse the import
 * arguments.
 */
protected RelatedOptions getImportOptions() {
    // Imports
    RelatedOptions importOpts = new RelatedOptions("Import control arguments");

    importOpts.addOption(
            OptionBuilder.withDescription("Use direct import fast path").withLongOpt(DIRECT_ARG).create());

    if (!allTables) {
        importOpts.addOption(OptionBuilder.withArgName("table-name").hasArg().withDescription("Table to read")
                .withLongOpt(TABLE_ARG).create());
        importOpts.addOption(OptionBuilder.withArgName("col,col,col...").hasArg()
                .withDescription("Columns to import from table").withLongOpt(COLUMNS_ARG).create());
        importOpts.addOption(OptionBuilder.withArgName("column-name").hasArg()
                .withDescription("Column of the table used to split work units").withLongOpt(SPLIT_BY_ARG)
                .create());
        importOpts.addOption(OptionBuilder.withArgName("where clause").hasArg()
                .withDescription("WHERE clause to use during import").withLongOpt(WHERE_ARG).create());
        importOpts.addOption(
                OptionBuilder.withDescription("Imports data in append mode").withLongOpt(APPEND_ARG).create());
        importOpts.addOption(OptionBuilder.withArgName("dir").hasArg()
                .withDescription("HDFS plain table destination").withLongOpt(TARGET_DIR_ARG).create());
        importOpts.addOption(OptionBuilder.withArgName("statement").hasArg()
                .withDescription("Import results of SQL 'statement'").withLongOpt(SQL_QUERY_ARG)
                .create(SQL_QUERY_SHORT_ARG));
        importOpts.addOption(OptionBuilder.withArgName("statement").hasArg()
                .withDescription("Set boundary query for retrieving max and min" + " value of the primary key")
                .withLongOpt(SQL_QUERY_BOUNDARY).create());
    }

    importOpts.addOption(OptionBuilder.withArgName("dir").hasArg()
            .withDescription("HDFS parent for table destination").withLongOpt(WAREHOUSE_DIR_ARG).create());
    importOpts.addOption(OptionBuilder.withDescription("Imports data to SequenceFiles")
            .withLongOpt(FMT_SEQUENCEFILE_ARG).create());
    importOpts.addOption(OptionBuilder.withDescription("Imports data as plain text (default)")
            .withLongOpt(FMT_TEXTFILE_ARG).create());
    importOpts.addOption(OptionBuilder.withDescription("Imports data to Avro data files")
            .withLongOpt(FMT_AVRODATAFILE_ARG).create());
    importOpts.addOption(
            OptionBuilder.withArgName("n").hasArg().withDescription("Use 'n' map tasks to import in parallel")
                    .withLongOpt(NUM_MAPPERS_ARG).create(NUM_MAPPERS_SHORT_ARG));
    importOpts.addOption(OptionBuilder.withDescription("Enable compression").withLongOpt(COMPRESS_ARG)
            .create(COMPRESS_SHORT_ARG));
    importOpts.addOption(
            OptionBuilder.withArgName("codec").hasArg().withDescription("Compression codec to use for import")
                    .withLongOpt(COMPRESSION_CODEC_ARG).create());
    importOpts.addOption(OptionBuilder.withArgName("n").hasArg()
            .withDescription("Split the input stream every 'n' bytes " + "when importing in direct mode")
            .withLongOpt(DIRECT_SPLIT_SIZE_ARG).create());
    importOpts.addOption(
            OptionBuilder.withArgName("n").hasArg().withDescription("Set the maximum size for an inline LOB")
                    .withLongOpt(INLINE_LOB_LIMIT_ARG).create());
    importOpts.addOption(OptionBuilder.withArgName("n").hasArg()
            .withDescription("Set number 'n' of rows to fetch from the " + "database when more rows are needed")
            .withLongOpt(FETCH_SIZE_ARG).create());

    return importOpts;
}

From source file:org.apache.sqoop.tool.MainframeImportTool.java

@Override
@SuppressWarnings("static-access")
protected RelatedOptions getImportOptions() {
    // Imports//from ww w .  ja v  a2  s .c  o  m
    RelatedOptions importOpts = new RelatedOptions("Import mainframe control arguments");
    importOpts.addOption(OptionBuilder.withArgName("Dataset name").hasArg()
            .withDescription("Datasets to import").withLongOpt(DS_ARG).create());
    importOpts.addOption(
            OptionBuilder.withDescription("Imports data in delete mode").withLongOpt(DELETE_ARG).create());
    importOpts.addOption(OptionBuilder.withArgName("dir").hasArg()
            .withDescription("HDFS plain file destination").withLongOpt(TARGET_DIR_ARG).create());
    importOpts.addOption(OptionBuilder.withArgName("Dataset type").hasArg()
            .withDescription("Dataset type (p=partitioned data set|s=sequential data set|g=GDG)")
            .withLongOpt(DS_TYPE_ARG).create());
    importOpts.addOption(OptionBuilder.withArgName("Dataset is on tape").hasArg()
            .withDescription("Dataset is on tape (true|false)").withLongOpt(DS_TAPE_ARG).create());

    addValidationOpts(importOpts);

    importOpts.addOption(OptionBuilder.withArgName("dir").hasArg()
            .withDescription("HDFS parent for file destination").withLongOpt(WAREHOUSE_DIR_ARG).create());
    importOpts.addOption(OptionBuilder.withDescription("Imports data as plain text (default)")
            .withLongOpt(FMT_TEXTFILE_ARG).create());
    importOpts.addOption(
            OptionBuilder.withArgName("n").hasArg().withDescription("Use 'n' map tasks to import in parallel")
                    .withLongOpt(NUM_MAPPERS_ARG).create(NUM_MAPPERS_SHORT_ARG));
    importOpts.addOption(OptionBuilder.withArgName("name").hasArg()
            .withDescription("Set name for generated mapreduce job").withLongOpt(MAPREDUCE_JOB_NAME).create());
    importOpts.addOption(OptionBuilder.withDescription("Enable compression").withLongOpt(COMPRESS_ARG)
            .create(COMPRESS_SHORT_ARG));
    importOpts.addOption(
            OptionBuilder.withArgName("codec").hasArg().withDescription("Compression codec to use for import")
                    .withLongOpt(COMPRESSION_CODEC_ARG).create());

    return importOpts;
}

From source file:org.apache.sysml.conf.DMLOptions.java

@SuppressWarnings("static-access")
private static Options createCLIOptions() {
    Options options = new Options();
    Option nvargsOpt = OptionBuilder.withArgName("key=value").withDescription(
            "parameterizes DML script with named parameters of the form <key=value>; <key> should be a valid identifier in DML/PyDML")
            .hasArgs().create("nvargs");
    Option argsOpt = OptionBuilder.withArgName("argN").withDescription(
            "specifies positional parameters; first value will replace $1 in DML program; $2 will replace 2nd and so on")
            .hasArgs().create("args");
    Option configOpt = OptionBuilder.withArgName("filename").withDescription(
            "uses a given configuration file (can be on local/hdfs/gpfs; default values in SystemML-config.xml")
            .hasArg().create("config");
    Option cleanOpt = OptionBuilder.withDescription(
            "cleans up all SystemML working directories (FS, DFS); all other flags are ignored in this mode. \n")
            .create("clean");
    Option statsOpt = OptionBuilder.withArgName("count").withDescription(
            "monitors and reports summary execution statistics; heavy hitter <count> is 10 unless overridden; default off")
            .hasOptionalArg().create("stats");
    Option memOpt = OptionBuilder
            .withDescription("monitors and reports max memory consumption in CP; default off").create("mem");
    Option explainOpt = OptionBuilder.withArgName("level").withDescription(
            "explains plan levels; can be 'hops' / 'runtime'[default] / 'recompile_hops' / 'recompile_runtime'")
            .hasOptionalArg().create("explain");
    Option execOpt = OptionBuilder.withArgName("mode").withDescription(
            "sets execution mode; can be 'hadoop' / 'singlenode' / 'hybrid'[default] / 'hybrid_spark' / 'spark'")
            .hasArg().create("exec");
    Option gpuOpt = OptionBuilder.withArgName("force").withDescription(
            "uses CUDA instructions when reasonable; set <force> option to skip conservative memory estimates and use GPU wherever possible; default off")
            .hasOptionalArg().create("gpu");
    Option debugOpt = OptionBuilder.withDescription("runs in debug mode; default off").create("debug");
    Option pythonOpt = OptionBuilder.withDescription("parses Python-like DML").create("python");
    Option fileOpt = OptionBuilder.withArgName("filename").withDescription(
            "specifies dml/pydml file to execute; path can be local/hdfs/gpfs (prefixed with appropriate URI)")
            .isRequired().hasArg().create("f");
    Option scriptOpt = OptionBuilder.withArgName("script_contents")
            .withDescription("specified script string to execute directly").isRequired().hasArg().create("s");
    Option helpOpt = OptionBuilder.withDescription("shows usage message").create("help");

    options.addOption(configOpt);/* w  ww .  ja v a2s.c o  m*/
    options.addOption(cleanOpt);
    options.addOption(statsOpt);
    options.addOption(memOpt);
    options.addOption(explainOpt);
    options.addOption(execOpt);
    options.addOption(gpuOpt);
    options.addOption(debugOpt);
    options.addOption(pythonOpt);

    // Either a clean(-clean), a file(-f), a script(-s) or help(-help) needs to be specified
    OptionGroup fileOrScriptOpt = new OptionGroup().addOption(scriptOpt).addOption(fileOpt).addOption(cleanOpt)
            .addOption(helpOpt);
    fileOrScriptOpt.setRequired(true);
    options.addOptionGroup(fileOrScriptOpt);

    // Either -args or -nvargs
    options.addOptionGroup(new OptionGroup().addOption(nvargsOpt).addOption(argsOpt));
    options.addOption(helpOpt);

    return options;
}

From source file:org.apache.usergrid.launcher.Server.java

static Options createOptions() {
    // the nogui option will be required due to combining the graphical
    // launcher with this standalone CLI based server
    Options options = new Options();
    OptionBuilder.withDescription("Start launcher without UI");
    OptionBuilder.isRequired(true);//ww w . j  av a 2  s  .  co m
    Option noguiOption = OptionBuilder.create("nogui");

    OptionBuilder.isRequired(false);
    OptionBuilder.withDescription("Initialize database");
    Option initOption = OptionBuilder.create("init");

    OptionBuilder.withDescription("Start database");
    Option dbOption = OptionBuilder.create("db");

    OptionBuilder.withDescription("Http port (without UI)");
    OptionBuilder.hasArg();
    OptionBuilder.withArgName("PORT");
    OptionBuilder.withLongOpt("port");
    OptionBuilder.withType(Number.class);
    Option portOption = OptionBuilder.create('p');

    options.addOption(initOption);
    options.addOption(dbOption);
    options.addOption(portOption);
    options.addOption(noguiOption);

    return options;
}

From source file:org.apache.usergrid.tools.Cli.java

@Override
@SuppressWarnings("static-access")
public Options createOptions() {

    Option hostOption = OptionBuilder.withArgName("host").hasArg().withDescription("Cassandra host")
            .create("host");

    Option remoteOption = OptionBuilder.withDescription("Use remote Cassandra instance").create("remote");

    Options options = new Options();
    options.addOption(hostOption);/*from   w  w  w. j  a  v a  2s  .co m*/
    options.addOption(remoteOption);

    return options;
}