Example usage for org.apache.commons.cli HelpFormatter printHelp

List of usage examples for org.apache.commons.cli HelpFormatter printHelp

Introduction

In this page you can find the example usage for org.apache.commons.cli HelpFormatter printHelp.

Prototype

public void printHelp(String cmdLineSyntax, Options options, boolean autoUsage) 

Source Link

Document

Print the help for options with the specified command line syntax.

Usage

From source file:com.cloudera.recordbreaker.schemadict.SchemaSuggest.java

/**
 * SchemaSuggest takes an avro file where schema elements may be anonymous.  It then attempts to 
 * compute good labels for the anonymous elts.  By default, this tool simply prints out the
 * suggested labels, if any.  The user may include a flag to rewrite the input data using
 * the new labels.//from w w  w. j a v  a  2 s .c o  m
 *
 * schemaSuggest avroFile 
 *
 */
public static void main(String argv[]) throws IOException {
    CommandLine cmd = null;
    boolean debug = false;
    Options options = new Options();
    options.addOption("?", false, "Help for command-line");
    options.addOption("f", true, "Accept suggestions and rewrite input to a new Avro file");
    options.addOption("d", false, "Debug mode");
    options.addOption("k", true, "How many matches to emit.");

    try {
        CommandLineParser parser = new PosixParser();
        cmd = parser.parse(options, argv);
    } catch (ParseException e) {
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp("SchemaSuggest", options, true);
        System.err.println("Required inputs: <schemadictionary> <anonymousAvro>");
        System.exit(-1);
    }

    if (cmd.hasOption("?")) {
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp("SchemaSuggest", options, true);
        System.err.println("Required inputs: <schemadictionary> <anonymousAvro>");
        System.exit(0);
    }

    if (cmd.hasOption("d")) {
        debug = true;
    }

    int k = 1;
    if (cmd.hasOption("k")) {
        try {
            k = Integer.parseInt(cmd.getOptionValue("k"));
        } catch (NumberFormatException nfe) {
        }
    }

    String[] argArray = cmd.getArgs();
    if (argArray.length < 2) {
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp("SchemaSuggest", options, true);
        System.err.println("Required inputs: <schemadictionary> <anonymousAvro>");
        System.exit(0);
    }

    File dataDir = new File(argArray[0]).getCanonicalFile();
    File inputData = new File(argArray[1]).getCanonicalFile();
    SchemaSuggest ss = new SchemaSuggest(dataDir);
    List<DictionaryMapping> mappings = ss.inferSchemaMapping(inputData, k);

    if (!cmd.hasOption("f")) {
        System.out.println("Ranking of closest known data types, with match-distance (smaller is better):");
        int counter = 1;
        for (DictionaryMapping mapping : mappings) {
            SchemaMapping sm = mapping.getMapping();
            List<SchemaMappingOp> bestOps = sm.getMapping();

            System.err.println();
            System.err.println();
            System.err.println("-------------------------------------------------------------");
            System.out.println(
                    counter + ".  '" + mapping.getDictEntry().getInfo() + "', with distance: " + sm.getDist());

            List<SchemaMappingOp> renames = new ArrayList<SchemaMappingOp>();
            List<SchemaMappingOp> extraInTarget = new ArrayList<SchemaMappingOp>();
            List<SchemaMappingOp> extraInSource = new ArrayList<SchemaMappingOp>();

            for (SchemaMappingOp op : bestOps) {
                if (op.opcode == SchemaMappingOp.CREATE_OP) {
                    extraInTarget.add(op);
                } else if (op.opcode == SchemaMappingOp.DELETE_OP) {
                    if (op.getS1DatasetLabel().compareTo("input") == 0) {
                        extraInSource.add(op);
                    } else {
                        extraInTarget.add(op);
                    }
                } else if (op.opcode == SchemaMappingOp.TRANSFORM_OP) {
                    renames.add(op);
                }
            }

            System.err.println();
            System.err.println(" DISCOVERED LABELS");
            int counterIn = 1;
            if (renames.size() == 0) {
                System.err.println("  (None)");
            } else {
                for (SchemaMappingOp op : renames) {
                    System.err.println("  " + counterIn + ".  " + "In '" + op.getS1DatasetLabel() + "', label '"
                            + op.getS1FieldLabel() + "' AS " + op.getS2FieldLabel());
                    if (debug) {
                        if (op.getS1DocStr() != null && op.getS1DocStr().length() > 0) {
                            System.err.println(
                                    "         '" + op.getS1DocStr() + "'  ==> '" + op.getS2DocStr() + "'");
                        }
                    }
                    counterIn++;
                }
            }

            System.err.println();
            System.err.println(" UNMATCHED ITEMS IN TARGET DATA TYPE");
            counterIn = 1;
            if (extraInTarget.size() == 0) {
                System.err.println("  (None)");
            } else {
                for (SchemaMappingOp op : extraInTarget) {
                    System.err.println("  " + counterIn + ".  " + op.getS1FieldLabel());
                    if (debug) {
                        if (op.getS1DocStr() != null && op.getS1DocStr().length() > 0) {
                            System.err.println("         " + op.getS1DocStr());
                        }
                    }
                    counterIn++;
                }
            }

            System.err.println();
            System.err.println(" UNMATCHED ITEMS IN SOURCE DATA");
            counterIn = 1;
            if (extraInSource.size() == 0) {
                System.err.println("  (None)");
            } else {
                for (SchemaMappingOp op : extraInSource) {
                    System.err.println("  " + counterIn + ".  " + op.getS1FieldLabel());
                    if (debug) {
                        if (op.getS1DocStr() != null && op.getS1DocStr().length() > 0) {
                            System.err.println("         " + op.getS1DocStr());
                        }
                    }
                    counterIn++;
                }
            }
            counter++;
        }
    }
}

From source file:esiptestbed.mudrod.main.MudrodEngine.java

/**
 * Main program invocation. Accepts one argument denoting location (on disk)
 * to a log file which is to be ingested. Help will be provided if invoked
 * with incorrect parameters./* w  ww .j av a2s . co  m*/
 * 
 * @param args
 *          {@link java.lang.String} array contaning correct parameters.
 */
public static void main(String[] args) {
    // boolean options
    Option helpOpt = new Option("h", "help", false, "show this help message");

    // preprocessing + processing
    Option fullIngestOpt = new Option("f", FULL_INGEST, false, "begin full ingest Mudrod workflow");
    // processing only, assuming that preprocessing results is in logDir
    Option processingOpt = new Option("p", PROCESSING, false, "begin processing with preprocessing results");

    // import raw web log into Elasticsearch
    Option logIngestOpt = new Option("l", LOG_INGEST, false, "begin log ingest without any processing only");
    // preprocessing web log, assuming web log has already been imported
    Option sessionReconOpt = new Option("s", SESSION_RECON, false, "begin session reconstruction");
    // calculate vocab similarity from session reconstrution results
    Option vocabSimFromOpt = new Option("v", VOCAB_SIM_FROM_LOG, false,
            "begin similarity calulation from web log Mudrod workflow");
    // add metadata and ontology preprocessing and processing results into web
    // log vocab similarity
    Option addMetaOntoOpt = new Option("a", ADD_META_ONTO, false, "begin adding metadata and ontology results");

    // argument options
    Option logDirOpt = Option.builder(LOG_DIR).required(true).numberOfArgs(1).hasArg(true)
            .desc("the log directory to be processed by Mudrod").argName(LOG_DIR).build();

    // create the options
    Options options = new Options();
    options.addOption(helpOpt);
    options.addOption(logIngestOpt);
    options.addOption(fullIngestOpt);
    options.addOption(processingOpt);
    options.addOption(sessionReconOpt);
    options.addOption(vocabSimFromOpt);
    options.addOption(addMetaOntoOpt);
    options.addOption(logDirOpt);

    CommandLineParser parser = new DefaultParser();
    try {
        CommandLine line = parser.parse(options, args);
        String processingType = null;

        if (line.hasOption(LOG_INGEST)) {
            processingType = LOG_INGEST;
        } else if (line.hasOption(FULL_INGEST)) {
            processingType = FULL_INGEST;
        } else if (line.hasOption(PROCESSING)) {
            processingType = PROCESSING;
        } else if (line.hasOption(SESSION_RECON)) {
            processingType = SESSION_RECON;
        } else if (line.hasOption(VOCAB_SIM_FROM_LOG)) {
            processingType = VOCAB_SIM_FROM_LOG;
        } else if (line.hasOption(ADD_META_ONTO)) {
            processingType = ADD_META_ONTO;
        }

        String dataDir = line.getOptionValue(LOG_DIR).replace("\\", "/");
        if (!dataDir.endsWith("/")) {
            dataDir += "/";
        }

        MudrodEngine me = new MudrodEngine();
        me.loadConfig();
        me.props.put(LOG_DIR, dataDir);
        me.es = new ESDriver(me.getConfig());
        me.spark = new SparkDriver();
        loadFullConfig(me, dataDir);
        if (processingType != null) {
            switch (processingType) {
            case LOG_INGEST:
                me.logIngest();
                break;
            case PROCESSING:
                me.startProcessing();
                break;
            case SESSION_RECON:
                me.sessionRestruction();
                break;
            case VOCAB_SIM_FROM_LOG:
                me.vocabSimFromLog();
                break;
            case ADD_META_ONTO:
                me.addMetaAndOntologySim();
                break;
            case FULL_INGEST:
                me.startFullIngest();
                break;
            default:
                break;
            }
        }
        me.end();
    } catch (Exception e) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(
                "MudrodEngine: 'logDir' argument is mandatory. " + "User must also provide an ingest method.",
                options, true);
        LOG.error("Error inputting command line!", e);
        return;
    }
}

From source file:com.speed.ob.Obfuscator.java

public static void main(String[] args) {
    Options options = new Options();
    options.addOption("a", "all", false, "enable all obfuscations");
    options.addOption("s", "strings", false, "enable string obfuscation");
    options.addOption("l", "lexical", false, "enable lexical obfuscation");
    options.addOption("c", "control-flow", false, "enable control flow obfuscation");
    options.addOption("C", "config", true, "use <arg> as a config file");
    options.addOption("f", "file", true, "obfuscate file <arg>");
    options.addOption("o", "out", true, "output obfuscated file(s) to directory <arg>");
    options.addOption("h", "help", false, "shows this help message and then exits");
    CommandLineParser parser = new GnuParser();
    HelpFormatter formatter = new HelpFormatter();
    try {/*from w w  w  .  j  a  va2 s  .c  o  m*/
        CommandLine cmd = parser.parse(options, args);
        parse(cmd, parser, options, formatter);
    } catch (MissingArgumentException e) {
        System.out.println(e.getMessage());
        formatter.printHelp("java com.speed.ob.Obfuscate", options, true);
    } catch (ParseException e) {
        e.printStackTrace();
    }
}

From source file:esiptestbed.mudrod.ontology.process.LocalOntology.java

public static void main(String[] args) throws Exception {

    // boolean options
    Option helpOpt = new Option("h", "help", false, "show this help message");
    // argument options
    Option ontDirOpt = Option.builder(ONT_DIR).required(true).numberOfArgs(1).hasArg(true)
            .desc("A directory containing .owl files.").argName(ONT_DIR).build();

    // create the options
    Options options = new Options();
    options.addOption(helpOpt);/*from w w w  .j a  v a2 s . c  om*/
    options.addOption(ontDirOpt);

    String ontDir;
    CommandLineParser parser = new DefaultParser();
    try {
        CommandLine line = parser.parse(options, args);

        if (line.hasOption(ONT_DIR)) {
            ontDir = line.getOptionValue(ONT_DIR).replace("\\", "/");
        } else {
            ontDir = LocalOntology.class.getClassLoader().getResource("ontology").getFile();
        }
        if (!ontDir.endsWith("/")) {
            ontDir += "/";
        }
    } catch (Exception e) {
        LOG.error("Error whilst processing main method of LocalOntology.", e);
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("LocalOntology: 'ontDir' argument is mandatory. ", options, true);
        return;
    }
    File fileDir = new File(ontDir);
    //Fail if the input is not a directory.
    if (fileDir.isDirectory()) {
        List<String> owlFiles = new ArrayList<>();
        for (File owlFile : fileDir.listFiles()) {
            owlFiles.add(owlFile.toString());
        }
        MudrodEngine mEngine = new MudrodEngine();
        Properties props = mEngine.loadConfig();
        Ontology ontology = new OntologyFactory(props).getOntology();
        //convert to correct iput for ontology loading.
        String[] owlArray = new String[owlFiles.size()];
        owlArray = owlFiles.toArray(owlArray);
        ontology.load(owlArray);

        String[] terms = new String[] { "Glacier ice" };
        //Demonstrate that we can do basic ontology heirarchy navigation and log output.
        for (Iterator<OntClass> i = getParser().rootClasses(getModel()); i.hasNext();) {

            //print Ontology Class Hierarchy
            OntClass c = i.next();
            renderHierarchy(System.out, c, new LinkedList<>(), 0);

            for (Iterator<OntClass> subClass = c.listSubClasses(true); subClass.hasNext();) {
                OntClass sub = subClass.next();
                //This means that the search term is present as an OntClass
                if (terms[0].equalsIgnoreCase(sub.getLabel(null))) {
                    //Add the search term(s) above to the term cache.
                    for (int j = 0; j < terms.length; j++) {
                        addSearchTerm(terms[j], sub);
                    }

                    //Query the ontology and return subclasses of the search term(s)
                    for (int k = 0; k < terms.length; k++) {
                        Iterator<String> iter = ontology.subclasses(terms[k]);
                        while (iter.hasNext()) {
                            LOG.info("Subclasses >> " + iter.next());
                        }
                    }

                    //print any synonymic relationships to demonstrate that we can 
                    //undertake synonym-based query expansion
                    for (int l = 0; l < terms.length; l++) {
                        Iterator<String> iter = ontology.synonyms(terms[l]);
                        while (iter.hasNext()) {
                            LOG.info("Synonym >> " + iter.next());
                        }
                    }
                }
            }
        }

        mEngine.end();
    }

}

From source file:gov.nasa.jpl.mudrod.ontology.process.LocalOntology.java

public static void main(String[] args) throws Exception {

    // boolean options
    Option helpOpt = new Option("h", "help", false, "show this help message");
    // argument options
    Option ontDirOpt = OptionBuilder.hasArg(true).withArgName(ONT_DIR)
            .withDescription("A directory containing .owl files.").isRequired(false).create();

    // create the options
    Options options = new Options();
    options.addOption(helpOpt);//from  www  . jav a2 s  .  co  m
    options.addOption(ontDirOpt);

    String ontDir;
    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(options, args);

        if (line.hasOption(ONT_DIR)) {
            ontDir = line.getOptionValue(ONT_DIR).replace("\\", "/");
        } else {
            ontDir = LocalOntology.class.getClassLoader().getResource("ontology").getFile();
        }
        if (!ontDir.endsWith("/")) {
            ontDir += "/";
        }
    } catch (Exception e) {
        LOG.error("Error whilst processing main method of LocalOntology.", e);
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("LocalOntology: 'ontDir' argument is mandatory. ", options, true);
        return;
    }
    File fileDir = new File(ontDir);
    //Fail if the input is not a directory.
    if (fileDir.isDirectory()) {
        List<String> owlFiles = new ArrayList<>();
        for (File owlFile : fileDir.listFiles()) {
            owlFiles.add(owlFile.toString());
        }
        MudrodEngine mEngine = new MudrodEngine();
        Properties props = mEngine.loadConfig();
        Ontology ontology = new OntologyFactory(props).getOntology();
        //convert to correct iput for ontology loading.
        String[] owlArray = new String[owlFiles.size()];
        owlArray = owlFiles.toArray(owlArray);
        ontology.load(owlArray);

        String[] terms = new String[] { "Glacier ice" };
        //Demonstrate that we can do basic ontology heirarchy navigation and log output.
        for (Iterator<OntClass> i = getParser().rootClasses(getModel()); i.hasNext();) {

            //print Ontology Class Hierarchy
            OntClass c = i.next();
            renderHierarchy(System.out, c, new LinkedList<>(), 0);

            for (Iterator<OntClass> subClass = c.listSubClasses(true); subClass.hasNext();) {
                OntClass sub = subClass.next();
                //This means that the search term is present as an OntClass
                if (terms[0].equalsIgnoreCase(sub.getLabel(null))) {
                    //Add the search term(s) above to the term cache.
                    for (int j = 0; j < terms.length; j++) {
                        addSearchTerm(terms[j], sub);
                    }

                    //Query the ontology and return subclasses of the search term(s)
                    for (int k = 0; k < terms.length; k++) {
                        Iterator<String> iter = ontology.subclasses(terms[k]);
                        while (iter.hasNext()) {
                            LOG.info("Subclasses >> " + iter.next());
                        }
                    }

                    //print any synonymic relationships to demonstrate that we can 
                    //undertake synonym-based query expansion
                    for (int l = 0; l < terms.length; l++) {
                        Iterator<String> iter = ontology.synonyms(terms[l]);
                        while (iter.hasNext()) {
                            LOG.info("Synonym >> " + iter.next());
                        }
                    }
                }
            }
        }

        mEngine.end();
    }

}

From source file:de.unileipzig.ub.scroller.Main.java

public static void main(String[] args) throws IOException {

    Options options = new Options();
    // add t option
    options.addOption("h", "help", false, "display this help");

    // elasticsearch options
    options.addOption("t", "host", true, "elasticsearch hostname (default: 0.0.0.0)");
    options.addOption("p", "port", true, "transport port (that's NOT the http port, default: 9300)");
    options.addOption("c", "cluster", true, "cluster name (default: elasticsearch_mdma)");

    options.addOption("i", "index", true, "index to use");

    options.addOption("f", "filter", true, "filter(s) - e.g. meta.kind=title");
    options.addOption("j", "junctor", true, "values: and, or (default: and)");
    options.addOption("n", "notice-every", true, "show speed after every N items");

    options.addOption("v", "verbose", false, "be verbose");
    // options.addOption("z", "end-of-message", true, "sentinel to print to stdout, once the regular input finished (default: EOM)");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;/*from w  w  w  .  j  a v a 2s  .c  o  m*/

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException ex) {
        logger.error(ex);
        System.exit(1);
    }

    // process options
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("scroller", options, true);
        System.exit(0);
    }

    String endOfMessage = "EOM";

    boolean verbose = false;
    if (cmd.hasOption("verbose")) {
        verbose = true;
    }

    if (!cmd.hasOption("i")) {
        System.err.println("error: no index specified");
        System.exit(1);
    }

    long noticeEvery = 10000;
    if (cmd.hasOption("n")) {
        noticeEvery = Long.parseLong(cmd.getOptionValue("n"));
    }

    // ES options
    String[] hosts = new String[] { "0.0.0.0" };
    int port = 9300;
    String clusterName = "elasticsearch_mdma";
    int bulkSize = 3000;

    if (cmd.hasOption("host")) {
        hosts = cmd.getOptionValues("host");
    }
    if (cmd.hasOption("port")) {
        port = Integer.parseInt(cmd.getOptionValue("port"));
    }
    if (cmd.hasOption("cluster")) {
        clusterName = cmd.getOptionValue("cluster");
    }

    // Index
    String indexName = cmd.getOptionValue("index");

    Map<String, String> filterMap = new HashMap<String, String>();
    if (cmd.hasOption("filter")) {
        try {
            filterMap = getMapForKeys(cmd.getOptionValues("filter"));
        } catch (ParseException pe) {
            System.err.println(pe);
            System.exit(1);
        }
    }

    Collection<HashMap> filterList = new ArrayList<HashMap>();
    if (cmd.hasOption("filter")) {
        try {
            filterList = getFilterList(cmd.getOptionValues("filter"));
        } catch (ParseException pe) {
            System.err.println(pe);
            System.exit(1);
        }
    }

    // ES Client
    final Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "elasticsearch_mdma")
            .put("client.transport.ping_timeout", "60s").build();
    final TransportClient client = new TransportClient(settings);
    for (String host : hosts) {
        client.addTransportAddress(new InetSocketTransportAddress(host, port));
    }

    // build the query
    String junctor = "and";
    if (cmd.hasOption("j")) {
        junctor = cmd.getOptionValue("j");
    }

    //        ArrayList<TermFilterBuilder> filters = new ArrayList<TermFilterBuilder>();
    //        if (filterMap.size() > 0) {
    //            for (Map.Entry<String, String> entry : filterMap.entrySet()) {
    //                filters.add(new TermFilterBuilder(entry.getKey(), entry.getValue()));
    //            }
    //        }

    ArrayList<TermFilterBuilder> filters = new ArrayList<TermFilterBuilder>();
    if (filterList.size() > 0) {
        for (HashMap map : filterList) {
            for (Object obj : map.entrySet()) {
                Map.Entry entry = (Map.Entry) obj;
                filters.add(new TermFilterBuilder(entry.getKey().toString(), entry.getValue().toString()));
            }
        }
    }

    FilterBuilder fb = null;
    if (junctor.equals("and")) {
        AndFilterBuilder afb = new AndFilterBuilder();
        for (TermFilterBuilder tfb : filters) {
            afb.add(tfb);
        }
        fb = afb;
    }

    if (junctor.equals("or")) {
        OrFilterBuilder ofb = new OrFilterBuilder();
        for (TermFilterBuilder tfb : filters) {
            ofb.add(tfb);
        }
        fb = ofb;
    }

    //        TermFilterBuilder tfb0 = new TermFilterBuilder("meta.kind", "title");
    //        TermFilterBuilder tfb1 = new TermFilterBuilder("meta.timestamp", "201112081240");
    //
    //        AndFilterBuilder afb0 = new AndFilterBuilder(tfb0, tfb1);

    QueryBuilder qb0 = null;
    if (filterMap.isEmpty()) {
        qb0 = matchAllQuery();
    } else {
        qb0 = filteredQuery(matchAllQuery(), fb);
    }

    // sorting
    // FieldSortBuilder sortBuilder = new FieldSortBuilder("meta.timestamp");
    // sortBuilder.order(SortOrder.DESC);

    // FilteredQueryBuilder fqb0 = filteredQuery(matchAllQuery(), tfb0);

    final CountResponse countResponse = client.prepareCount(indexName).setQuery(qb0).execute().actionGet();
    final long total = countResponse.getCount();

    SearchResponse scrollResp = client.prepareSearch(indexName).setSearchType(SearchType.SCAN)
            .setScroll(new TimeValue(60000)).setQuery(qb0)
            // .addSort(sortBuilder) // sort has no effect on scroll type (see: https://github.com/CPAN-API/cpan-api/issues/172)
            .setSize(1000) //1000 hits per shard will be returned for each scroll
            .execute().actionGet();

    //Scroll until no hits are returned

    System.err.println("[Scroller] query: " + qb0.toString());
    System.err.println("[Scroller] took: " + scrollResp.getTookInMillis() + "ms");
    System.err.println("[Scroller] docs found: " + total);

    long counter = 0;
    long start = System.currentTimeMillis();

    while (true) {
        scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000))
                .execute().actionGet();

        if (scrollResp.getHits().hits().length == 0) {
            break;
        }

        for (SearchHit hit : scrollResp.getHits()) {

            System.out.println(hit.sourceAsString());
            counter += 1;
            if (counter % noticeEvery == 0) {
                final double elapsed = (System.currentTimeMillis() - start) / 1000;
                final double speed = counter / elapsed;
                final long eta = (long) ((elapsed / counter) * (total - counter) * 1000);
                System.err.println(
                        counter + "/" + total + " records recvd @ speed " + String.format("%1$,.1f", speed)
                                + " r/s eta: " + DurationFormatUtils.formatDurationWords(eta, false, false));
            }
        }
    }
    System.out.close();
    // System.out.println(endOfMessage);
}

From source file:gov.nasa.jpl.mudrod.main.MudrodEngine.java

/**
 * Main program invocation. Accepts one argument denoting location (on disk)
 * to a log file which is to be ingested. Help will be provided if invoked
 * with incorrect parameters./* w  w  w  .j a  v a2 s . c  om*/
 *
 * @param args
 *          {@link java.lang.String} array contaning correct parameters.
 */
public static void main(String[] args) {
    // boolean options
    Option helpOpt = new Option("h", "help", false, "show this help message");

    // log ingest (preprocessing + processing)
    Option logIngestOpt = new Option("l", LOG_INGEST, false, "begin log ingest");
    // metadata ingest (preprocessing + processing)
    Option metaIngestOpt = new Option("m", META_INGEST, false, "begin metadata ingest");
    // ingest both log and metadata
    Option fullIngestOpt = new Option("f", FULL_INGEST, false, "begin full ingest Mudrod workflow");
    // processing only, assuming that preprocessing results is in dataDir
    Option processingOpt = new Option("p", PROCESSING, false, "begin processing with preprocessing results");

    // argument options
    Option dataDirOpt = OptionBuilder.hasArg(true).withArgName("/path/to/data/directory").hasArgs(1)
            .withDescription("the data directory to be processed by Mudrod").withLongOpt("dataDirectory")
            .isRequired().create(DATA_DIR);

    Option esHostOpt = OptionBuilder.hasArg(true).withArgName("host_name").hasArgs(1)
            .withDescription("elasticsearch cluster unicast host").withLongOpt("elasticSearchHost")
            .isRequired(false).create(ES_HOST);

    Option esTCPPortOpt = OptionBuilder.hasArg(true).withArgName("port_num").hasArgs(1)
            .withDescription("elasticsearch transport TCP port").withLongOpt("elasticSearchTransportTCPPort")
            .isRequired(false).create(ES_TCP_PORT);

    Option esPortOpt = OptionBuilder.hasArg(true).withArgName("port_num").hasArgs(1)
            .withDescription("elasticsearch HTTP/REST port").withLongOpt("elasticSearchHTTPPort")
            .isRequired(false).create(ES_HTTP_PORT);

    // create the options
    Options options = new Options();
    options.addOption(helpOpt);
    options.addOption(logIngestOpt);
    options.addOption(metaIngestOpt);
    options.addOption(fullIngestOpt);
    options.addOption(processingOpt);
    options.addOption(dataDirOpt);
    options.addOption(esHostOpt);
    options.addOption(esTCPPortOpt);
    options.addOption(esPortOpt);

    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(options, args);
        String processingType = null;

        if (line.hasOption(LOG_INGEST)) {
            processingType = LOG_INGEST;
        } else if (line.hasOption(PROCESSING)) {
            processingType = PROCESSING;
        } else if (line.hasOption(META_INGEST)) {
            processingType = META_INGEST;
        } else if (line.hasOption(FULL_INGEST)) {
            processingType = FULL_INGEST;
        }

        String dataDir = line.getOptionValue(DATA_DIR).replace("\\", "/");
        if (!dataDir.endsWith("/")) {
            dataDir += "/";
        }

        MudrodEngine me = new MudrodEngine();
        me.loadConfig();
        me.props.put(DATA_DIR, dataDir);

        if (line.hasOption(ES_HOST)) {
            String esHost = line.getOptionValue(ES_HOST);
            me.props.put(MudrodConstants.ES_UNICAST_HOSTS, esHost);
        }

        if (line.hasOption(ES_TCP_PORT)) {
            String esTcpPort = line.getOptionValue(ES_TCP_PORT);
            me.props.put(MudrodConstants.ES_TRANSPORT_TCP_PORT, esTcpPort);
        }

        if (line.hasOption(ES_HTTP_PORT)) {
            String esHttpPort = line.getOptionValue(ES_HTTP_PORT);
            me.props.put(MudrodConstants.ES_HTTP_PORT, esHttpPort);
        }

        me.es = new ESDriver(me.getConfig());
        me.spark = new SparkDriver(me.getConfig());
        loadFullConfig(me, dataDir);
        if (processingType != null) {
            switch (processingType) {
            case PROCESSING:
                me.startProcessing();
                break;
            case LOG_INGEST:
                me.startLogIngest();
                break;
            case META_INGEST:
                me.startMetaIngest();
                break;
            case FULL_INGEST:
                me.startFullIngest();
                break;
            default:
                break;
            }
        }
        me.end();
    } catch (Exception e) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(
                "MudrodEngine: 'dataDir' argument is mandatory. " + "User must also provide an ingest method.",
                options, true);
        LOG.error("Error whilst parsing command line.", e);
    }
}

From source file:com.indeed.imhotep.builder.tsv.TsvConverter.java

@SuppressWarnings("static-access")
public static void main(String[] args) {
    final Options options;
    final CommandLineParser parser = new PosixParser();
    final CommandLine cmd;

    options = new Options();
    final Option idxLoc = OptionBuilder.withArgName("path").hasArg().withLongOpt("index-loc")
            .withDescription("").isRequired().create('i');
    options.addOption(idxLoc);// w w w. j a va 2 s. com
    final Option successLoc = OptionBuilder.withArgName("path").hasArg().withLongOpt("success-loc")
            .withDescription("").isRequired().create('s');
    options.addOption(successLoc);
    final Option failureLoc = OptionBuilder.withArgName("path").hasArg().withLongOpt("failure-loc")
            .withDescription("").isRequired().create('f');
    options.addOption(failureLoc);
    final Option dataLoc = OptionBuilder.withArgName("path").hasArg().withLongOpt("data-loc")
            .withDescription("Location to store the built indexes").isRequired().create('d');
    options.addOption(dataLoc);
    final Option buildLoc = OptionBuilder.withArgName("path").hasArg().withLongOpt("build-loc")
            .withDescription("Local directory were the indexes are built").create('b');
    options.addOption(buildLoc);
    final Option hdfsPrincipal = OptionBuilder.withArgName("name").hasArg().withLongOpt("hdfs-principal")
            .withDescription("HDFS principal (only when using HDFS)").create('p');
    options.addOption(hdfsPrincipal);
    final Option hdfsKeytab = OptionBuilder.withArgName("file").hasArg().withLongOpt("hdfs-keytab")
            .withDescription("HDFS keytab file location (only when using HDFS)").create('k');
    options.addOption(hdfsKeytab);
    final Option qaMode = OptionBuilder.withLongOpt("qa-mode").withDescription("Enable QA mode").create('q');
    options.addOption(qaMode);

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException exp) {
        System.out.println("Unexpected exception: " + exp.getMessage());
        System.out.println("\n");
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("shardBuilder", options, true);
        return;
    }

    /* start up the shard builder */
    final TsvConverter converter = new TsvConverter();
    converter.init(cmd.getOptionValue('i'), cmd.getOptionValue('s'), cmd.getOptionValue('f'),
            cmd.getOptionValue('d'), cmd.getOptionValue('b'), cmd.getOptionValue('p'), cmd.getOptionValue('k'),
            cmd.hasOption('q'));

    converter.run();
}

From source file:com.cloudera.flume.master.FlumeMaster.java

/**
 * This is the method that gets run when bin/flume master is executed.
 *///from w w w  .ja v  a 2s .  c  om
public static void main(String[] argv) {
    FlumeNode.logVersion(LOG);
    FlumeNode.logEnvironment(LOG);
    // Make sure the Java version is not older than 1.6
    if (!CheckJavaVersion.isVersionOk()) {
        LOG.error("Exiting because of an old Java version or Java version in bad format");
        System.exit(-1);
    }
    FlumeConfiguration.hardExitLoadConfig(); // if config file is bad hardexit.

    CommandLine cmd = null;
    Options options = new Options();
    options.addOption("c", true, "Load config from file");
    options.addOption("f", false, "Use fresh (empty) flume configs");
    options.addOption("i", true, "Server id (an integer from 0 up)");

    try {
        CommandLineParser parser = new PosixParser();
        cmd = parser.parse(options, argv);
    } catch (ParseException e) {
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp("FlumeNode", options, true);
        System.exit(1);
    }

    String nodeconfig = FlumeConfiguration.get().getMasterSavefile();

    if (cmd != null && cmd.hasOption("c")) {
        nodeconfig = cmd.getOptionValue("c");
    }

    if (cmd != null && cmd.hasOption("i")) {
        // if manually overriden by command line, accept it, live with
        // consequences.
        String sid = cmd.getOptionValue("i");
        LOG.info("Setting serverid from command line to be " + sid);
        try {
            int serverid = Integer.parseInt(cmd.getOptionValue("i"));
            FlumeConfiguration.get().setInt(FlumeConfiguration.MASTER_SERVER_ID, serverid);
        } catch (NumberFormatException e) {
            LOG.error("Couldn't parse server id as integer: " + sid);
            System.exit(1);
        }
    } else {
        // attempt to auto detect master id.
        try {
            if (!inferMasterHostID()) {
                System.exit(1);
            }
        } catch (Exception e) {
            // master needs to be valid to continue;
            LOG.error("Unable to resolve host '{}' ", e.getMessage());
            System.exit(1);
        }

    }

    // This will instantiate and read FlumeConfiguration - so make sure that
    // this is *after* we set the MASTER_SERVER_ID above.
    FlumeMaster config = new FlumeMaster();
    LOG.info("Starting flume master on: " + NetUtils.localhost());
    LOG.info(" Working Directory is: " + new File(".").getAbsolutePath());

    try {
        boolean autoload = FlumeConfiguration.get().getMasterSavefileAutoload();
        try {
            if (autoload && (cmd == null || (cmd != null && !cmd.hasOption("f")))) {
                // autoload a config?
                config.getSpecMan().loadConfigFile(nodeconfig);
            }
        } catch (IOException e) {
            LOG.warn("Could not autoload config from " + nodeconfig + " because " + e.getMessage());
        }
        config.serve();

    } catch (IOException e) {
        LOG.error("IO problem: " + e.getMessage());
        LOG.debug("IOException", e);
    }
}