Example usage for org.apache.commons.cli HelpFormatter setWidth

List of usage examples for org.apache.commons.cli HelpFormatter setWidth

Introduction

In this page you can find the example usage for org.apache.commons.cli HelpFormatter setWidth.

Prototype

public void setWidth(int width) 

Source Link

Document

Sets the 'width'.

Usage

From source file:tl.lin.data.map.FrontCodedString2IntBidiMapBuilder.java

@SuppressWarnings({ "static-access" })
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT));

    CommandLine cmdline = null;/*  w  w  w.ja  va2  s .  com*/
    CommandLineParser parser = new GnuParser();

    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) {
        System.out.println("args: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp(FrontCodedString2IntBidiMapBuilder.class.getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        System.exit(-1);
    }

    String input = cmdline.getOptionValue(INPUT);
    String output = cmdline.getOptionValue(OUTPUT);

    List<String> stringList = Lists.newArrayList();
    IntArrayList intList = new IntArrayList();

    // First read lines into sorted map to sort input.
    Object2IntAVLTreeMap<String> tree = new Object2IntAVLTreeMap<String>();
    BufferedReader br = new BufferedReader(new FileReader(input));
    String line;
    while ((line = br.readLine()) != null) {
        String[] arr = line.split("\\t");
        if (arr[0] == null || arr[0].length() == 0) {
            LOG.info("Skipping invalid line: " + line);
        }
        tree.put(arr[0], Integer.parseInt(arr[1]));
    }
    br.close();

    // Extracted sorted strings and ints.
    for (Object2IntMap.Entry<String> map : tree.object2IntEntrySet()) {
        stringList.add(map.getKey());
        intList.add(map.getIntValue());
    }

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);

    FSDataOutputStream os = fs.create(new Path(output), true);

    ByteArrayOutputStream bytesOut;
    ObjectOutputStream objOut;
    byte[] bytes;

    // Serialize the front-coded dictionary
    FrontCodedStringList frontcodedList = new FrontCodedStringList(stringList, 8, true);

    bytesOut = new ByteArrayOutputStream();
    objOut = new ObjectOutputStream(bytesOut);
    objOut.writeObject(frontcodedList);
    objOut.close();

    bytes = bytesOut.toByteArray();
    os.writeInt(bytes.length);
    os.write(bytes);

    // Serialize the hash function
    ShiftAddXorSignedStringMap dict = new ShiftAddXorSignedStringMap(stringList.iterator(),
            new TwoStepsLcpMonotoneMinimalPerfectHashFunction<CharSequence>(stringList,
                    TransformationStrategies.prefixFreeUtf16()));

    bytesOut = new ByteArrayOutputStream();
    objOut = new ObjectOutputStream(bytesOut);
    objOut.writeObject(dict);
    objOut.close();

    bytes = bytesOut.toByteArray();
    os.writeInt(bytes.length);
    os.write(bytes);

    // Serialize the ints.
    os.writeInt(intList.size());
    for (int i = 0; i < intList.size(); i++) {
        os.writeInt(intList.getInt(i));
    }

    os.close();
}

From source file:ubic.gemma.core.util.AbstractCLI.java

protected void printHelp() {
    HelpFormatter h = new HelpFormatter();
    h.setWidth(150);
    h.printHelp(this.getCommandName() + " [options]", this.getShortDesc() + "\n" + AbstractCLI.HEADER, options,
            AbstractCLI.FOOTER);//w ww.j a  va 2s. c  o m
}

From source file:ubic.gemma.util.AbstractCLI.java

/**
 * @param command The name of the command as used at the command line.
 *//*from w w w .jav a2s .  c o m*/
protected void printHelp(String command) {
    HelpFormatter h = new HelpFormatter();
    h.setWidth(120);
    h.printHelp(command + " [options]", HEADER, options, FOOTER);
}

From source file:uk.bl.wa.hadoop.datasets.WARCDatasetGenerator.java

private void printHelp(String message, Options options) {
    HelpFormatter helpFormatter = new HelpFormatter();
    helpFormatter.setWidth(80);
    helpFormatter.printHelp(CLI_USAGE, CLI_HEADER, options, message);
    System.out.println("\n");
    ToolRunner.printGenericCommandUsage(System.out);
    System.exit(1);/*from  ww w  .  j  a  v a2  s.  co  m*/
}

From source file:uk.bl.wa.hadoop.indexer.mdx.MDXSeqSampleGenerator.java

private void setup(String[] args, JobConf conf) throws ParseException {
    // Process Hadoop args first:
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

    // Process remaining args list this:
    Options options = new Options();
    options.addOption("i", true, "input file list");
    options.addOption("o", true, "output directory");
    options.addOption("w", false, "wait for job to finish");
    options.addOption("r", true, "number of reducers");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = parser.parse(options, otherArgs);
    if (!cmd.hasOption("i") || !cmd.hasOption("o")) {
        HelpFormatter helpFormatter = new HelpFormatter();
        helpFormatter.setWidth(80);
        helpFormatter.printHelp(CLI_USAGE, CLI_HEADER, options, "");
        System.exit(1);//ww  w.  j av a2 s  .c o  m
    }
    this.inputPath = cmd.getOptionValue("i");
    this.outputPath = cmd.getOptionValue("o");
    this.wait = cmd.hasOption("w");
    if (cmd.hasOption("r")) {
        this.numReducers = Integer.parseInt(cmd.getOptionValue("r"));
    }
}

From source file:uk.bl.wa.hadoop.mapreduce.hash.HdfsFileHasher.java

@Override
public int run(String[] args) throws Exception {
    // Options:/*  w  w w .jav  a2 s  .  co  m*/
    String[] otherArgs = new GenericOptionsParser(args).getRemainingArgs();

    // Process remaining args list this:
    Options options = new Options();
    options.addOption("i", true, "a local file containing a list of HDFS paths to process");
    options.addOption("o", true, "output directory");
    options.addOption("m", false, "use MD5 rather than SHA-512");
    options.addOption("r", true, "number of reducers (defaults to 1)");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = parser.parse(options, otherArgs);
    if (!cmd.hasOption("i") || !cmd.hasOption("o")) {
        HelpFormatter helpFormatter = new HelpFormatter();
        helpFormatter.setWidth(80);
        helpFormatter.printHelp(CLI_USAGE, CLI_HEADER, options, "");
        System.exit(1);
    }
    String input_file = cmd.getOptionValue("i");
    String output_path = cmd.getOptionValue("o");
    String algorithm = null;
    int numReducers = 1;
    if (cmd.hasOption("m")) {
        algorithm = "MD5";
    }
    if (cmd.hasOption("r")) {
        numReducers = Integer.parseInt(cmd.getOptionValue("r"));
    }

    // When implementing tool, choose algorithm:
    Configuration conf = this.getConf();
    if (algorithm != null)
        conf.set(MessageDigestMapper.CONFIG_DIGEST_ALGORITHM, algorithm);

    // Create job
    Job job = new Job(conf, "HDFS File Checksummer");
    job.setJarByClass(HdfsFileHasher.class);

    // Setup MapReduce job
    // Do not specify the number of Reducer
    job.setMapperClass(MessageDigestMapper.class);
    job.setReducerClass(Reducer.class);

    // Just one output file:
    job.setNumReduceTasks(numReducers);

    // Specify key / value
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    // Input
    log.info("Reading input files...");
    String line = null;
    long line_count = 0;
    BufferedReader br = new BufferedReader(new FileReader(input_file));
    while ((line = br.readLine()) != null) {
        if (StringUtils.isEmpty(line))
            continue;
        //
        line_count++;
        Path path = new Path(line);
        FileSystem fs = path.getFileSystem(conf);
        if (fs.isFile(path)) {
            FileInputFormat.addInputPath(job, path);
        } else if (fs.isDirectory(path)) {
            FileStatus[] listing = fs.listStatus(path);
            int list_count = 0;
            for (FileStatus fstat : listing) {
                list_count++;
                log.info("Checking " + list_count + "/" + listing.length + " " + fstat.getPath());
                if (!fstat.isDir()) {
                    FileInputFormat.addInputPath(job, fstat.getPath());
                }
            }
        }
    }
    br.close();
    log.info("Read " + FileInputFormat.getInputPaths(job).length + " input files from " + line_count
            + " paths.");
    job.setInputFormatClass(UnsplittableInputFileFormat.class);

    // Output
    FileOutputFormat.setOutputPath(job, new Path(output_path));
    job.setOutputFormatClass(TextOutputFormat.class);

    // Execute job and return status
    return job.waitForCompletion(true) ? 0 : 1;
}

From source file:uk.bl.wa.hadoop.mapreduce.warcstats.WARCRawStatsMDXGenerator.java

private void setup(String[] args, JobConf conf) throws ParseException {
    // Process Hadoop args first:
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

    // Process remaining args list this:
    Options options = new Options();
    options.addOption("i", true, "input file list");
    options.addOption("o", true, "output location");
    options.addOption("r", true, "number of reducers");
    options.addOption("w", false, "wait for job to finish");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = parser.parse(options, otherArgs);
    if (!cmd.hasOption("i") || !cmd.hasOption("o")) {
        HelpFormatter helpFormatter = new HelpFormatter();
        helpFormatter.setWidth(80);
        helpFormatter.printHelp(CLI_USAGE, CLI_HEADER, options, "");
        System.exit(1);// www.  j a v a 2 s  . c  o  m
    }
    this.inputPath = cmd.getOptionValue("i");
    this.outputPath = cmd.getOptionValue("o");
    this.wait = cmd.hasOption("w");
    this.numReducers = Integer.parseInt(cmd.getOptionValue("r", "1"));
}

From source file:uk.bl.wa.indexer.WARCIndexerCommand.java

/**
 * @param options/*from  w w  w .  ja  va  2  s  .com*/
 */
private static void printUsage(Options options) {
    HelpFormatter helpFormatter = new HelpFormatter();
    helpFormatter.setWidth(80);
    helpFormatter.printHelp(CLI_USAGE, CLI_HEADER, options, CLI_FOOTER);
}

From source file:uk.bl.wa.util.ValidateWARCNameMatchers.java

private static void printUsage(Options options) {
    HelpFormatter helpFormatter = new HelpFormatter();
    helpFormatter.setWidth(80);
    helpFormatter.printHelp(CLI_USAGE, CLI_HEADER, options, CLI_FOOTER);
}

From source file:uk.gov.nationalarchives.discovery.taxonomy.cli.CLIRunner.java

public void run(String... args) throws IOException, ParseException, org.apache.commons.cli.ParseException {

    logger.info("Start cat CLI Runner.");
    logger.debug("mongo host: {}", host);
    logger.debug("mongo Index path: {}", iaviewCollectionPath);

    final String[] cliArgs = filterInputToGetOnlyCliArguments(args);

    Options options = registerAvailableActionsAndOptions();

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, cliArgs);

    if (cliArgs.length > 0) {
        logger.debug("args: {} ", Arrays.asList(cliArgs).toString());
    } else {/*  ww w .j  a  v a 2 s  . com*/
        logger.warn("no valid argument provided");
        logger.info("Stop cat CLI Runner.");
        return;
    }

    /**
     * Management of training Set
     */

    if (cmd.hasOption(ACTION_UPDATE_CATEGORIES_SCORES)) {
        logger.info("update categories scores ");
        String minNumber = cmd.getOptionValue(OPTION_MIN_ELEMENTS_PER_CAT);
        String maxNumber = cmd.getOptionValue(OPTION_MAX_ELEMENTS_PER_CAT);
        trainingSetService.updateCategoriesScores(Integer.parseInt(minNumber), Integer.parseInt(maxNumber));
    }

    if (cmd.hasOption(ACTION_UPDATE)) {
        logger.info("update (create if not existing) training set");
        String categoryCiaid = null;
        if (cmd.hasOption(OPTION_CIAID)) {
            categoryCiaid = cmd.getOptionValue(OPTION_CIAID);
        }
        Integer fixedLimitSize = null;
        if (cmd.hasOption(OPTION_FIXED_SIZE)) {
            fixedLimitSize = Integer.valueOf(cmd.getOptionValue(OPTION_FIXED_SIZE));
        }

        updateTrainingSet(categoryCiaid, fixedLimitSize);
    }

    if (cmd.hasOption(ACTION_INDEX)) {
        trainingSetService.indexTrainingSet();
    }

    /**
     * Run Categorisation
     */

    if (cmd.hasOption(ACTION_TEST_CATEGORISE_SINGLE)) {
        String docRef = cmd.getOptionValue(OPTION_DOC_REF);
        if (StringUtils.isEmpty(docRef)) {
            docRef = "C1253";
        }
        categoriser.testCategoriseSingle(docRef);
    }
    if (cmd.hasOption(ACTION_CATEGORISE_SINGLE)) {
        String docRef = cmd.getOptionValue(OPTION_DOC_REF);
        if (StringUtils.isEmpty(docRef)) {
            docRef = "C1253";
        }
        categoriser.categoriseSingle(docRef);
    }

    // if (cmd.hasOption(ACTION_TEST_CATEGORISE_ALL)) {
    // categoriser.testCategoriseIAViewSolrIndex();
    // }

    /**
     * Evaluate Categorisation System
     */

    if (cmd.hasOption(ACTION_CREATE_EVALUATION_DATA_SET)) {
        Integer minimumSizePerCat = 10;
        if (cmd.hasOption(OPTION_MINIMUM_SIZE_PER_CATEGORY)) {
            minimumSizePerCat = Integer.valueOf(cmd.getOptionValue(OPTION_MINIMUM_SIZE_PER_CATEGORY));
        }
        evaluationService.createEvaluationTestDataset(minimumSizePerCat);
    }

    if (cmd.hasOption(ACTION_CATEGORISE_EVALUATION_DATA_SET)) {
        Boolean matchNbOfReturnedCategories = false;
        if (cmd.hasOption(OPTION_MATCH_NB_OF_RETURNED_CATEGORIES)) {
            matchNbOfReturnedCategories = Boolean
                    .valueOf(cmd.getOptionValue(OPTION_MATCH_NB_OF_RETURNED_CATEGORIES));
        }
        evaluationService.runCategorisationOnTestDataSet(matchNbOfReturnedCategories);
    }

    if (cmd.hasOption(ACTION_GET_EVALUATION_REPORT)) {
        String userComments = null;
        if (cmd.hasOption(OPTION_EVALUATION_REPORT_COMMENTS)) {
            userComments = cmd.getOptionValue(OPTION_EVALUATION_REPORT_COMMENTS);
        }

        String aggregatedComments = aggregateCommentsWithArguments(userComments, args);

        getEvaluationReport(aggregatedComments);

    }

    /**
     * get Help
     */

    if (cmd.hasOption(ACTION_HELP)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(150);
        formatter.printHelp("help", options);
    }

    logger.info("Stop cat CLI Runner.");
}