Example usage for org.apache.commons.cli OptionBuilder hasArg

List of usage examples for org.apache.commons.cli OptionBuilder hasArg

Introduction

In this page you can find the example usage for org.apache.commons.cli OptionBuilder hasArg.

Prototype

public static OptionBuilder hasArg(boolean hasArg) 

Source Link

Document

The next Option created will require an argument value if hasArg is true.

Usage

From source file:com.milaboratory.mitcr.cli.Main.java

public static void main(String[] args) {
    int o = 0;//from w  w w .  j  av a2s . c o m

    BuildInformation buildInformation = BuildInformationProvider.get();

    final boolean isProduction = "default".equals(buildInformation.scmBranch); // buildInformation.version != null && buildInformation.version.lastIndexOf("SNAPSHOT") < 0;

    orderingMap.put(PARAMETERS_SET_OPTION, o++);
    orderingMap.put(SPECIES_OPTION, o++);
    orderingMap.put(GENE_OPTION, o++);
    orderingMap.put(ERROR_CORECTION_LEVEL_OPTION, o++);
    orderingMap.put(QUALITY_THRESHOLD_OPTION, o++);
    orderingMap.put(AVERAGE_QUALITY_OPTION, o++);
    orderingMap.put(LQ_OPTION, o++);
    orderingMap.put(CLUSTERIZATION_OPTION, o++);
    orderingMap.put(INCLUDE_CYS_PHE_OPTION, o++);
    orderingMap.put(LIMIT_OPTION, o++);
    orderingMap.put(EXPORT_OPTION, o++);
    orderingMap.put(REPORT_OPTION, o++);
    orderingMap.put(REPORTING_LEVEL_OPTION, o++);
    orderingMap.put(PHRED33_OPTION, o++);
    orderingMap.put(PHRED64_OPTION, o++);
    orderingMap.put(THREADS_OPTION, o++);
    orderingMap.put(COMPRESSED_OPTION, o++);
    orderingMap.put(PRINT_HELP_OPTION, o++);
    orderingMap.put(PRINT_VERSION_OPTION, o++);
    orderingMap.put(PRINT_DEBUG_OPTION, o++);

    options.addOption(OptionBuilder.withArgName("preset name").hasArg()
            .withDescription("preset of pipeline parameters to use").create(PARAMETERS_SET_OPTION));

    options.addOption(OptionBuilder.withArgName("species").hasArg()
            .withDescription("overrides species ['hs' for Homo sapiens, 'mm' for us Mus musculus] "
                    + "(default for built-in presets is 'hs')")
            .create(SPECIES_OPTION));

    options.addOption(OptionBuilder.withArgName("gene").hasArg()
            .withDescription("overrides gene: TRB or TRA (default value for built-in parameter sets is TRB)")
            .create(GENE_OPTION));

    options.addOption(OptionBuilder.withArgName("0|1|2").hasArg()
            .withDescription(
                    "overrides error correction level (0 = don't correct errors, 1 = correct sequenecing "
                            + "errors only (see -" + QUALITY_THRESHOLD_OPTION + " and -" + LQ_OPTION
                            + " options for details), " + "2 = also correct PCR errors (see -"
                            + CLUSTERIZATION_OPTION + " option)")
            .create(ERROR_CORECTION_LEVEL_OPTION));

    options.addOption(OptionBuilder.withArgName("value").hasArg().withDescription(
            "overrides quality threshold value for segment alignment and bad quality sequences "
                    + "correction algorithms. 0 tells the program not to process quality information. (default is 25)")
            .create(QUALITY_THRESHOLD_OPTION));

    if (!isProduction)
        options.addOption(OptionBuilder.hasArg(false)
                .withDescription("use this option to output average instead of "
                        + "maximal, quality for CDR3 nucleotide sequences. (Experimental option, use with caution.)")
                .create(AVERAGE_QUALITY_OPTION));

    options.addOption(OptionBuilder.withArgName("map | drop").hasArg()
            .withDescription("overrides low quality CDR3s processing strategy (drop = filter off, "
                    + "map = map onto clonotypes created from the high quality CDR3s). This option makes no difference if "
                    + "quality threshold (-" + QUALITY_THRESHOLD_OPTION
                    + " option) is set to 0, or error correction " + "level (-" + ERROR_CORECTION_LEVEL_OPTION
                    + ") is 0.")
            .create(LQ_OPTION));

    options.addOption(OptionBuilder.withArgName("smd | ete").hasArg()
            .withDescription("overrides the PCR error correction algorithm: smd = \"save my diversity\", "
                    + "ete = \"eliminate these errors\". Default value for built-in parameters is ete.")
            .create(CLUSTERIZATION_OPTION));

    options.addOption(OptionBuilder.withArgName("0|1").hasArg()
            .withDescription("overrides weather include bounding Cys & Phe into CDR3 sequence")
            .create(INCLUDE_CYS_PHE_OPTION));

    options.addOption(
            OptionBuilder.withArgName("# of reads").hasArg()
                    .withDescription("limits the number of input sequencing reads, use this parameter to "
                            + "normalize several datasets or to have a glance at the data")
                    .create(LIMIT_OPTION));

    options.addOption(OptionBuilder.withArgName("new name").hasArg()
            .withDescription("use this option to export presets to a local xml files").create(EXPORT_OPTION));

    options.addOption(OptionBuilder.withArgName("file name").hasArg()
            .withDescription("use this option to write analysis report (summary) to file")
            .create(REPORT_OPTION));

    options.addOption(OptionBuilder.withArgName("1|2|3").hasArg(true)
            .withDescription("output detalization level (1 = simple, 2 = medium, 3 = full, this format "
                    + "could be deserialized using mitcr API). Affects only tab-delimited output. Default value is 3.")
            .create(REPORTING_LEVEL_OPTION));

    options.addOption(OptionBuilder.hasArg(false).withDescription(
            "add this option if input file is in old illumina format with 64 byte offset for quality "
                    + "string (MiTCR will try to automatically detect file format if one of the \"-phredXX\" options is not provided)")
            .create(PHRED64_OPTION));

    options.addOption(OptionBuilder.hasArg(false)
            .withDescription("add this option if input file is in Phred+33 format for quality values "
                    + "(MiTCR will try to automatically detect file format if one of the \"-phredXX\" options is not provided)")
            .create(PHRED33_OPTION));

    options.addOption(OptionBuilder.withArgName("threads").hasArg()
            .withDescription(
                    "specifies the number of CDR3 extraction threads (default = number of available CPU cores)")
            .create(THREADS_OPTION));

    if (!isProduction)
        options.addOption(OptionBuilder.hasArg(false)
                .withDescription("use compressed data structures for storing individual "
                        + "clone segments statistics (from which arises the clone segment information). This option reduces required "
                        + "amount of memory, but introduces small stochastic errors into the algorithm which determines clone "
                        + "segments. (Experimental option, use with caution.)")
                .create(COMPRESSED_OPTION));

    options.addOption(
            OptionBuilder.hasArg(false).withDescription("print this message").create(PRINT_HELP_OPTION));

    options.addOption(OptionBuilder.hasArg(false).withDescription("print version information")
            .create(PRINT_VERSION_OPTION));

    options.addOption(OptionBuilder.hasArg(false)
            .withDescription("print additional information about analysis process").create(PRINT_DEBUG_OPTION));

    PosixParser parser = new PosixParser();

    try {
        long input_limit = -1;
        int threads = Runtime.getRuntime().availableProcessors();
        int reporting_level = 3;
        int ec_level = 2;

        CommandLine cl = parser.parse(options, args, true);
        if (cl.hasOption(PRINT_HELP_OPTION)) {
            printHelp();
            return;
        }

        boolean averageQuality = cl.hasOption(AVERAGE_QUALITY_OPTION),
                compressedAggregators = cl.hasOption(COMPRESSED_OPTION);

        if (cl.hasOption(PRINT_VERSION_OPTION)) {
            System.out.println("MiTCR by MiLaboratory, version: " + buildInformation.version);
            System.out.println("Branch: " + buildInformation.scmBranch);
            System.out.println("Built: " + buildInformation.buildDate + ", " + buildInformation.jdk + " JDK, "
                    + "build machine: " + buildInformation.builtBy);
            System.out.println("SCM changeset: " + buildInformation.scmChangeset + " ("
                    + buildInformation.scmDate.replace("\"", "") + ")");
            return;
        }

        //Normal execution

        String paramName = cl.getOptionValue(PARAMETERS_SET_OPTION);

        if (paramName == null) {
            err.println("No parameters set is specified.");
            return;
        }

        Parameters params = ParametersIO.getParameters(paramName);

        if (params == null) {
            err.println("No parameters set found with name '" + paramName + "'.");
            return;
        }

        String value;

        if ((value = cl.getOptionValue(THREADS_OPTION)) != null)
            threads = Integer.decode(value);

        if ((value = cl.getOptionValue(REPORTING_LEVEL_OPTION)) != null)
            reporting_level = Integer.decode(value);

        if ((value = cl.getOptionValue(LIMIT_OPTION)) != null)
            input_limit = Long.decode(value);

        if ((value = cl.getOptionValue(GENE_OPTION)) != null)
            params.setGene(Gene.fromXML(value));

        if ((value = cl.getOptionValue(SPECIES_OPTION)) != null)
            params.setSpecies(Species.getFromShortName(value));

        if ((value = cl.getOptionValue(INCLUDE_CYS_PHE_OPTION)) != null) {
            if (value.equals("1"))
                params.getCDR3ExtractorParameters().setIncludeCysPhe(true);
            else if (value.equals("0"))
                params.getCDR3ExtractorParameters().setIncludeCysPhe(false);
            else {
                err.println("Illegal value for -" + INCLUDE_CYS_PHE_OPTION + " parameter.");
                return;
            }
        }

        if ((value = cl.getOptionValue(ERROR_CORECTION_LEVEL_OPTION)) != null) {
            int v = Integer.decode(value);
            ec_level = v;
            if (v == 0) {
                params.setCloneGeneratorParameters(new BasicCloneGeneratorParameters());
                params.setClusterizationType(CloneClusterizationType.None);
            } else if (v == 1) {
                params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters());
                params.setClusterizationType(CloneClusterizationType.None);
            } else if (v == 2) {
                params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters());
                params.setClusterizationType(CloneClusterizationType.OneMismatch, .1f);
            } else
                throw new RuntimeException("This (" + v + ") error correction level is not supported.");
        }

        if ((value = cl.getOptionValue(QUALITY_THRESHOLD_OPTION)) != null) {
            int v = Integer.decode(value);
            if (v == 0)
                params.setQualityInterpretationStrategy(new DummyQualityInterpretationStrategy());
            else
                params.setQualityInterpretationStrategy(new IlluminaQualityInterpretationStrategy((byte) v));
        }

        if ((value = cl.getOptionValue(LQ_OPTION)) != null)
            if (ec_level > 0)
                switch (value) {
                case "map":
                    params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters(
                            ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters())
                                    .getSegmentInformationAggregationFactor(),
                            3, true));
                    break;
                case "drop":
                    params.setCloneGeneratorParameters(new LQFilteringOffCloneGeneratorParameters(
                            ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters())
                                    .getSegmentInformationAggregationFactor()));
                    break;
                default:
                    throw new RuntimeException("Wrong value for -" + LQ_OPTION + " option.");
                }

        if ((value = cl.getOptionValue(CLUSTERIZATION_OPTION)) != null)
            if (ec_level > 1) // == 2
                switch (value) {
                case "smd":
                    params.setClusterizationType(CloneClusterizationType.V2D1J2T3Explicit);
                    break;
                case "ete":
                    params.setClusterizationType(CloneClusterizationType.OneMismatch);
                    break;
                default:
                    throw new RuntimeException("Wrong value for -" + CLUSTERIZATION_OPTION + " option.");
                }

        ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters())
                .setAccumulatorType(AccumulatorType.get(compressedAggregators, averageQuality));

        if ((value = cl.getOptionValue(EXPORT_OPTION)) != null) {
            //Exporting parameters
            ParametersIO.exportParameters(params, value);
            return;
        }

        String[] offArgs = cl.getArgs();

        if (offArgs.length == 0) {
            err.println("Input file not specified.");
            return;
        } else if (offArgs.length == 1) {
            err.println("Output file not specified.");
            return;
        } else if (offArgs.length > 2) {
            err.println("Unrecognized argument.");
            return;
        }

        String inputFileName = offArgs[0];
        String outputFileName = offArgs[1];

        File input = new File(inputFileName);

        if (!input.exists()) {
            err.println("Input file not found.");
            return;
        }

        //TODO This also done inside SFastqReader constructor
        CompressionType compressionType = CompressionType.None;
        if (inputFileName.endsWith(".gz"))
            compressionType = CompressionType.GZIP;

        QualityFormat format = null; // If variable remains null file format will be detected automatically
        if (cl.hasOption(PHRED33_OPTION))
            format = QualityFormat.Phred33;
        if (cl.hasOption(PHRED64_OPTION))
            if (format == null)
                format = QualityFormat.Phred64;
            else {
                err.println(
                        "Options: -" + PHRED33_OPTION + " and -" + PHRED64_OPTION + " are mutually exclusive");
                return;
            }

        SFastqReader reads = format == null ? new SFastqReader(input, compressionType)
                : new SFastqReader(input, format, compressionType);

        OutputPort<SSequencingRead> inputToPipeline = reads;
        if (input_limit >= 0)
            inputToPipeline = new CountLimitingOutputPort<>(inputToPipeline, input_limit);

        SegmentLibrary library = DefaultSegmentLibrary.load();

        AnalysisStatisticsAggregator statisticsAggregator = new AnalysisStatisticsAggregator();

        FullPipeline pipeline = new FullPipeline(inputToPipeline, params, false, library);
        pipeline.setThreads(threads);
        pipeline.setAnalysisListener(statisticsAggregator);

        new Thread(new SmartProgressReporter(pipeline, err)).start(); // Printing status to the standard error stream

        pipeline.run();

        if (cl.hasOption(PRINT_DEBUG_OPTION)) {
            err.println("Memory = " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()));
            err.println("Clusterization: " + pipeline.getQC().getReadsClusterized() + "% of reads, "
                    + pipeline.getQC().getClonesClusterized() + " % clones");
        }

        CloneSetClustered cloneSet = pipeline.getResult();

        if ((value = cl.getOptionValue(REPORT_OPTION)) != null) {
            File file = new File(value);
            TablePrintStreamAdapter table;
            if (file.exists())
                table = new TablePrintStreamAdapter(new FileOutputStream(file, true));
            else {
                table = new TablePrintStreamAdapter(file);
                ReportExporter.printHeader(table);
            }
            //CloneSetQualityControl qc = new CloneSetQualityControl(library, params.getSpecies(), params.getGene(), cloneSet);
            ReportExporter.printRow(table, inputFileName, outputFileName, pipeline.getQC(),
                    statisticsAggregator);
            table.close();
        }

        if (outputFileName.endsWith(".cls"))
            ClsExporter.export(pipeline, outputFileName.replace(".cls", "") + " " + new Date().toString(),
                    input.getName(), outputFileName);
        else {
            //Dry run
            if (outputFileName.startsWith("-"))
                return;

            ExportDetalizationLevel detalization = ExportDetalizationLevel.fromLevel(reporting_level);

            CompressionType compressionType1 = CompressionType.None;
            if (outputFileName.endsWith(".gz"))
                compressionType1 = CompressionType.GZIP;
            CloneSetIO.exportCloneSet(outputFileName, cloneSet, detalization, params, input.getAbsolutePath(),
                    compressionType1);
        }
    } catch (ParseException | RuntimeException | IOException e) {
        err.println("Error occurred in the analysis pipeline.");
        err.println();
        e.printStackTrace();
        //printHelp();
    }
}

From source file:edu.vt.cs.cnd2xsd.Cnd2XsdConverter.java

/**
 * Usage: Cnd2Xsd [path to source cnd] [path to write the xsd]
 * @param args/*from  w  ww  .j  ava  2  s.  c o  m*/
 * @throws LoginException
 * @throws RepositoryException
 * @throws IOException
 * @throws JAXBException
 */
@SuppressWarnings("static-access")
public static void main(String[] args) throws LoginException, RepositoryException, IOException, JAXBException,
        org.apache.commons.cli.ParseException {

    Session session = null;
    Cnd2XsdConverter converter = new Cnd2XsdConverter();

    try {
        Options opt = new Options();

        opt.addOption(OptionBuilder.hasArg(true).isRequired(false)
                .withDescription("Path for the input cnd file").create("fc"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false).withDescription("Path for properties map.")
                .create("fp"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false)
                .withDescription("Path for generating XML schema.").create("fx"));
        opt.addOption(OptionBuilder.hasArg(false).isRequired(false).withDescription("Prints this list.")
                .create("help"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false).withDescription("The namespace for the XSD.")
                .create("ns"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false).withDescription("The namespace prefix.")
                .create("nsp"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false)
                .withDescription("The root element in the XSD.").create("r"));
        opt.addOption(OptionBuilder.hasArg(true).isRequired(false).withDescription("The root element type.")
                .create("rtype"));

        //create the basic parser
        BasicParser parser = new BasicParser();
        CommandLine cl = parser.parse(opt, args);
        HelpFormatter f = new HelpFormatter();
        //check if we have any leftover args
        if (cl.getArgs().length != 0 || args.length == 0) {
            f.printHelp(MAINCLI, opt);
            return;
        }

        if (cl.hasOption("help")) {
            f.printHelp(MAINCLI, opt);
            return;
        }

        String cndFilePath = cl.getOptionValue("fc");
        String xsdFilePath = cl.getOptionValue("fx");
        String propmapPath = cl.getOptionValue("fp");
        String ns = cl.getOptionValue("ns");
        String nsPrefix = cl.getOptionValue("nsp");
        String rt = cl.getOptionValue("r");
        String rtype = cl.getOptionValue("rtype");

        converter.init(cndFilePath, propmapPath, ns, nsPrefix, rt, rtype);
        FileOutputStream fout = new FileOutputStream(xsdFilePath);
        converter.convert(fout);

    } finally {
        if (session != null) {
            session.save();
            session.logout();
        }
    }
}

From source file:ctrus.pa.bow.en.EnBOWOptions.java

@Override
@SuppressWarnings("static-access")
public void defineOptions() {
    super.defineDefaultOptions();

    Option o1 = OptionBuilder.hasArg(false).withDescription("Split camel cased terms").create(SPLIT_CAMELCASE);
    addOption(o1);//from  w ww .  j av  a  2  s. com
}

From source file:gov.nasa.jpl.mudrod.ontology.process.LocalOntology.java

public static void main(String[] args) throws Exception {

    // boolean options
    Option helpOpt = new Option("h", "help", false, "show this help message");
    // argument options
    Option ontDirOpt = OptionBuilder.hasArg(true).withArgName(ONT_DIR)
            .withDescription("A directory containing .owl files.").isRequired(false).create();

    // create the options
    Options options = new Options();
    options.addOption(helpOpt);//from   ww  w. ja  v a  2 s  . co  m
    options.addOption(ontDirOpt);

    String ontDir;
    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(options, args);

        if (line.hasOption(ONT_DIR)) {
            ontDir = line.getOptionValue(ONT_DIR).replace("\\", "/");
        } else {
            ontDir = LocalOntology.class.getClassLoader().getResource("ontology").getFile();
        }
        if (!ontDir.endsWith("/")) {
            ontDir += "/";
        }
    } catch (Exception e) {
        LOG.error("Error whilst processing main method of LocalOntology.", e);
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("LocalOntology: 'ontDir' argument is mandatory. ", options, true);
        return;
    }
    File fileDir = new File(ontDir);
    //Fail if the input is not a directory.
    if (fileDir.isDirectory()) {
        List<String> owlFiles = new ArrayList<>();
        for (File owlFile : fileDir.listFiles()) {
            owlFiles.add(owlFile.toString());
        }
        MudrodEngine mEngine = new MudrodEngine();
        Properties props = mEngine.loadConfig();
        Ontology ontology = new OntologyFactory(props).getOntology();
        //convert to correct iput for ontology loading.
        String[] owlArray = new String[owlFiles.size()];
        owlArray = owlFiles.toArray(owlArray);
        ontology.load(owlArray);

        String[] terms = new String[] { "Glacier ice" };
        //Demonstrate that we can do basic ontology heirarchy navigation and log output.
        for (Iterator<OntClass> i = getParser().rootClasses(getModel()); i.hasNext();) {

            //print Ontology Class Hierarchy
            OntClass c = i.next();
            renderHierarchy(System.out, c, new LinkedList<>(), 0);

            for (Iterator<OntClass> subClass = c.listSubClasses(true); subClass.hasNext();) {
                OntClass sub = subClass.next();
                //This means that the search term is present as an OntClass
                if (terms[0].equalsIgnoreCase(sub.getLabel(null))) {
                    //Add the search term(s) above to the term cache.
                    for (int j = 0; j < terms.length; j++) {
                        addSearchTerm(terms[j], sub);
                    }

                    //Query the ontology and return subclasses of the search term(s)
                    for (int k = 0; k < terms.length; k++) {
                        Iterator<String> iter = ontology.subclasses(terms[k]);
                        while (iter.hasNext()) {
                            LOG.info("Subclasses >> " + iter.next());
                        }
                    }

                    //print any synonymic relationships to demonstrate that we can 
                    //undertake synonym-based query expansion
                    for (int l = 0; l < terms.length; l++) {
                        Iterator<String> iter = ontology.synonyms(terms[l]);
                        while (iter.hasNext()) {
                            LOG.info("Synonym >> " + iter.next());
                        }
                    }
                }
            }
        }

        mEngine.end();
    }

}

From source file:gov.nasa.jpl.mudrod.main.MudrodEngine.java

/**
 * Main program invocation. Accepts one argument denoting location (on disk)
 * to a log file which is to be ingested. Help will be provided if invoked
 * with incorrect parameters.// ww  w  . j a v  a2  s.  c  o m
 *
 * @param args
 *          {@link java.lang.String} array contaning correct parameters.
 */
public static void main(String[] args) {
    // boolean options
    Option helpOpt = new Option("h", "help", false, "show this help message");

    // log ingest (preprocessing + processing)
    Option logIngestOpt = new Option("l", LOG_INGEST, false, "begin log ingest");
    // metadata ingest (preprocessing + processing)
    Option metaIngestOpt = new Option("m", META_INGEST, false, "begin metadata ingest");
    // ingest both log and metadata
    Option fullIngestOpt = new Option("f", FULL_INGEST, false, "begin full ingest Mudrod workflow");
    // processing only, assuming that preprocessing results is in dataDir
    Option processingOpt = new Option("p", PROCESSING, false, "begin processing with preprocessing results");

    // argument options
    Option dataDirOpt = OptionBuilder.hasArg(true).withArgName("/path/to/data/directory").hasArgs(1)
            .withDescription("the data directory to be processed by Mudrod").withLongOpt("dataDirectory")
            .isRequired().create(DATA_DIR);

    Option esHostOpt = OptionBuilder.hasArg(true).withArgName("host_name").hasArgs(1)
            .withDescription("elasticsearch cluster unicast host").withLongOpt("elasticSearchHost")
            .isRequired(false).create(ES_HOST);

    Option esTCPPortOpt = OptionBuilder.hasArg(true).withArgName("port_num").hasArgs(1)
            .withDescription("elasticsearch transport TCP port").withLongOpt("elasticSearchTransportTCPPort")
            .isRequired(false).create(ES_TCP_PORT);

    Option esPortOpt = OptionBuilder.hasArg(true).withArgName("port_num").hasArgs(1)
            .withDescription("elasticsearch HTTP/REST port").withLongOpt("elasticSearchHTTPPort")
            .isRequired(false).create(ES_HTTP_PORT);

    // create the options
    Options options = new Options();
    options.addOption(helpOpt);
    options.addOption(logIngestOpt);
    options.addOption(metaIngestOpt);
    options.addOption(fullIngestOpt);
    options.addOption(processingOpt);
    options.addOption(dataDirOpt);
    options.addOption(esHostOpt);
    options.addOption(esTCPPortOpt);
    options.addOption(esPortOpt);

    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(options, args);
        String processingType = null;

        if (line.hasOption(LOG_INGEST)) {
            processingType = LOG_INGEST;
        } else if (line.hasOption(PROCESSING)) {
            processingType = PROCESSING;
        } else if (line.hasOption(META_INGEST)) {
            processingType = META_INGEST;
        } else if (line.hasOption(FULL_INGEST)) {
            processingType = FULL_INGEST;
        }

        String dataDir = line.getOptionValue(DATA_DIR).replace("\\", "/");
        if (!dataDir.endsWith("/")) {
            dataDir += "/";
        }

        MudrodEngine me = new MudrodEngine();
        me.loadConfig();
        me.props.put(DATA_DIR, dataDir);

        if (line.hasOption(ES_HOST)) {
            String esHost = line.getOptionValue(ES_HOST);
            me.props.put(MudrodConstants.ES_UNICAST_HOSTS, esHost);
        }

        if (line.hasOption(ES_TCP_PORT)) {
            String esTcpPort = line.getOptionValue(ES_TCP_PORT);
            me.props.put(MudrodConstants.ES_TRANSPORT_TCP_PORT, esTcpPort);
        }

        if (line.hasOption(ES_HTTP_PORT)) {
            String esHttpPort = line.getOptionValue(ES_HTTP_PORT);
            me.props.put(MudrodConstants.ES_HTTP_PORT, esHttpPort);
        }

        me.es = new ESDriver(me.getConfig());
        me.spark = new SparkDriver(me.getConfig());
        loadFullConfig(me, dataDir);
        if (processingType != null) {
            switch (processingType) {
            case PROCESSING:
                me.startProcessing();
                break;
            case LOG_INGEST:
                me.startLogIngest();
                break;
            case META_INGEST:
                me.startMetaIngest();
                break;
            case FULL_INGEST:
                me.startFullIngest();
                break;
            default:
                break;
            }
        }
        me.end();
    } catch (Exception e) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(
                "MudrodEngine: 'dataDir' argument is mandatory. " + "User must also provide an ingest method.",
                options, true);
        LOG.error("Error whilst parsing command line.", e);
    }
}

From source file:ctrus.pa.bow.java.JavaBOWOptions.java

@SuppressWarnings("static-access")
@Override// ww w . j a va 2s.co  m
public void defineOptions() {

    super.defineDefaultOptions();

    Option o1 = OptionBuilder.hasArg(false).withDescription("Split camel cased terms").create(SPLIT_CAMELCASE);
    addOption(o1);

    Option o3 = OptionBuilder.hasArg(false)
            .withDescription("Consider terms from copyright notice in source files").create(CONSIDER_COPYRIGHT);
    addOption(o3);

    Option o4 = OptionBuilder.hasArg(false).withDescription("Ignore comments in source files")
            .create(IGNORE_COMMENTS);
    addOption(o4);

    Option o6 = OptionBuilder.hasArg(false).withDescription("Create BOW model per method, default is per class")
            .create(METHOD_CHUNKING);
    addOption(o6);
    /*      
          Option o7 =  OptionBuilder.hasArg(false)
    .withDescription("Retain compound terms (eg.camel cased) in the model")
    .create(RETAIN_COMPOUND_WORDS);
          addOption(o7);      
    */

    Option o8 = OptionBuilder.hasArg(false).withDescription("Create BOW model for state analysis")
            .create(STATE_ANALYSIS);
    addOption(o8);

    Option o9 = OptionBuilder.hasArg(false)
            .withDescription("Create multiple Bag of Words based on Java code structure")
            .create(STRUCTURE_MULTI_BOW);
    addOption(o9);
}

From source file:net.nicholaswilliams.java.licensing.licensor.interfaces.cli.spi.CliOptionsBuilder.java

public CliOptionsBuilder hasArg(boolean hasArg) {
    OptionBuilder.hasArg(hasArg);
    return this;
}

From source file:edu.uga.cs.fluxbuster.FluxbusterCLI.java

@SuppressWarnings("static-access")
private static Options initializeOptions() {
    Options retval = new Options();

    retval.addOption(OptionBuilder.isRequired(false).withDescription("Print help message.").withLongOpt("help")
            .create("?"))
            .addOption(OptionBuilder.hasArg(false).isRequired(false)
                    .withDescription("Generate clusters. (Optional)").withLongOpt("generate-clusters")
                    .create("g"))
            .addOption(OptionBuilder.hasArg(false).isRequired(false)
                    .withDescription("Calculate cluster features. (Optional)").withLongOpt("calc-features")
                    .create("f"))
            .addOption(OptionBuilder.hasArg(false).isRequired(false)
                    .withDescription("Calculate cluster similarities. (Optional)")
                    .withLongOpt("calc-similarity").create("s"))
            .addOption(OptionBuilder.hasArg(false).isRequired(false)
                    .withDescription("Classify clusters. (Optional)").withLongOpt("classify-clusters")
                    .create("c"))
            .addOption(OptionBuilder.hasArg().isRequired(true)
                    .withDescription("The start date of the input data.  " + "Should be in yyyyMMdd format.")
                    .withLongOpt("start-date").create("d"))
            .addOption(OptionBuilder.hasArg().isRequired(true)
                    .withDescription("The end date of the input data.  " + "Should be in yyyyMMdd format.")
                    .withLongOpt("end-date").create("e"));

    return retval;

}

From source file:carmen.utils.Utils.java

public static void registerOption(List<Option> options, String option_name, String arg_name, boolean has_arg,
        String description) {//ww w . ja  v a 2s  . c  o  m
    OptionBuilder.withArgName(arg_name);
    OptionBuilder.hasArg(has_arg);
    OptionBuilder.withDescription(description);
    Option option = OptionBuilder.create(option_name);

    options.add(option);
}

From source file:ctrus.pa.bow.DefaultOptions.java

@SuppressWarnings("static-access")
public void defineDefaultOptions() {
    Option o1 = OptionBuilder.hasArg(false).withDescription("Use weights for words").create(USE_WEIGHT);
    addOption(o1);//from   w  w  w .ja  va2 s  . c  om

    Option o2 = OptionBuilder.hasArg(false).withDescription("Stem the words extracted").create(USE_STEMMING);
    addOption(o2);

    Option o3 = OptionBuilder.hasArg(false).withDescription("Retain the capital characters in the term")
            .create(CASE_SENSITIVE);
    addOption(o3);

    Option o4 = OptionBuilder.hasArg(false).withDescription("Retain the numerals").create(RETAIN_NUMERAL);
    addOption(o4);

    Option o5 = OptionBuilder.hasArg(true).withDescription("Minimum word length to consider, default is 3")
            .create(MIN_WORD_LENGTH);
    addOption(o5);

    Option o6 = OptionBuilder.hasArg(true).withDescription("Stemming algorithm to use").create(STEMMING_ALGO);
    addOption(o6);

    Option o7 = OptionBuilder.hasArg(true).withDescription("Directory containing input documents").isRequired()
            .create(SOURCE_DIR);
    addOption(o7);

    Option o8 = OptionBuilder.hasArg(true)
            .withDescription("Output single file with each line corresponding to an input source file")
            .create(OUTPUT_SINGLE_FILE);
    addOption(o8);

    Option o9 = OptionBuilder.hasArg(true).withDescription("Stop words file").create(STOP_WORDS_FILE);
    addOption(o9);

    Option o10 = OptionBuilder.hasArg(true).withDescription("Directory to write output").isRequired()
            .create(OUTPUT_DIR);
    addOption(o10);

    Option o11 = OptionBuilder.hasArg(true).withDescription("Replace jargon words with full words")
            .create(REPLACE_JARGONS);
    addOption(o11);

    Option o12 = OptionBuilder.hasArg(true).withDescription("Chunk characters eg. ;,_:")
            .create(TERM_CHUNK_CHARS);
    addOption(o12);

    Option o13 = OptionBuilder.hasArg(false).withDescription("Output debug log").create(DEBUG_LOG);
    addOption(o13);

    Option o14 = OptionBuilder.hasArg(false).withDescription("Print this help").create(PRINT_HELP);
    addOption(o14);

    Option o15 = OptionBuilder.hasArg(false).withDescription("Print the vocabulary to a file 'voc.txt'")
            .create(PRINT_VOCABULARY);
    addOption(o15);

    Option o16 = OptionBuilder.hasArg(true).withDescription("Hash the term if its length exceeds <arg>")
            .create(HASH_TERMS);
    addOption(o16);

    Option o17 = OptionBuilder.hasArg(true).withDescription("Special characters to ignore eg. _$")
            .create(IGNORE_SPECIAL_CHARS);
    addOption(o17);

    Option o18 = OptionBuilder.hasArg(false).withDescription("Each line is a document in the input file(s)")
            .create(DOCUMENT_PER_LINE);
    addOption(o18);

    Option o19 = OptionBuilder.hasArg(false).withDescription("Do not stem the terms").create(NO_STEMMING);
    addOption(o19);

    Option o20 = OptionBuilder.hasArg(true)
            .withDescription("Term left to delimiter is considered document Id, default is space")
            .create(DOCUMENT_ID_DELIMITER);
    addOption(o20);

    Option o21 = OptionBuilder.hasArg(false).withDescription("Retain the document ids from input")
            .create(PRESERVE_DOC_ID);
    addOption(o21);

}