Example usage for org.apache.commons.cli Option UNLIMITED_VALUES

List of usage examples for org.apache.commons.cli Option UNLIMITED_VALUES

Introduction

In this page you can find the example usage for org.apache.commons.cli Option UNLIMITED_VALUES.

Prototype

int UNLIMITED_VALUES

To view the source code for org.apache.commons.cli Option UNLIMITED_VALUES.

Click Source Link

Document

constant that specifies the number of argument values is infinite

Usage

From source file:edu.nyu.vida.data_polygamy.relationship_computation.Relationship.java

/**
 * @param args/* w w  w  .j  a  va  2  s  . c o m*/
 * @throws ParseException 
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the relationship " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option scoreOption = new Option("sc", "score", true, "set threhsold for relationship score");
    scoreOption.setRequired(false);
    scoreOption.setArgName("SCORE THRESHOLD");
    options.addOption(scoreOption);

    Option strengthOption = new Option("st", "strength", true, "set threhsold for relationship strength");
    strengthOption.setRequired(false);
    strengthOption.setArgName("STRENGTH THRESHOLD");
    options.addOption(strengthOption);

    Option completeRandomizationOption = new Option("c", "complete-randomization", false,
            "use complete randomization when performing significance tests");
    completeRandomizationOption.setRequired(false);
    options.addOption(completeRandomizationOption);

    Option idOption = new Option("id", "ids", false, "output id instead of names for datasets and attributes");
    idOption.setRequired(false);
    options.addOption(idOption);

    Option g1Option = new Option("g1", "first-group", true, "set first group of datasets");
    g1Option.setRequired(true);
    g1Option.setArgName("FIRST GROUP");
    g1Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g1Option);

    Option g2Option = new Option("g2", "second-group", true, "set second group of datasets");
    g2Option.setRequired(false);
    g2Option.setArgName("SECOND GROUP");
    g2Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g2Option);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    Option removeOption = new Option("r", "remove-not-significant", false,
            "remove relationships that are not" + "significant from the final output");
    removeOption.setRequired(false);
    options.addOption(removeOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Path path = null;
    FileSystem fs = FileSystem.get(new Configuration());

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> firstGroup = new ArrayList<String>();
    ArrayList<String> secondGroup = new ArrayList<String>();
    HashMap<String, String> datasetAgg = new HashMap<String, String>();

    boolean removeNotSignificant = cmd.hasOption("r");
    boolean removeExistingFiles = cmd.hasOption("f");
    boolean completeRandomization = cmd.hasOption("c");
    boolean hasScoreThreshold = cmd.hasOption("sc");
    boolean hasStrengthThreshold = cmd.hasOption("st");
    boolean outputIds = cmd.hasOption("id");
    String scoreThreshold = hasScoreThreshold ? cmd.getOptionValue("sc") : "";
    String strengthThreshold = hasStrengthThreshold ? cmd.getOptionValue("st") : "";

    // all datasets
    ArrayList<String> all_datasets = new ArrayList<String>();
    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        all_datasets.add(line.split("\t")[0]);
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();
    String[] all_datasets_array = new String[all_datasets.size()];
    all_datasets.toArray(all_datasets_array);

    String[] firstGroupCmd = cmd.getOptionValues("g1");
    String[] secondGroupCmd = cmd.hasOption("g2") ? cmd.getOptionValues("g2") : all_datasets_array;
    addDatasets(firstGroupCmd, firstGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);
    addDatasets(secondGroupCmd, secondGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    if (firstGroup.isEmpty()) {
        System.out.println("No indices from datasets in G1.");
        System.exit(0);
    }

    if (secondGroup.isEmpty()) {
        System.out.println("No indices from datasets in G2.");
        System.exit(0);
    }

    // getting dataset ids

    String datasetNames = "";
    String datasetIds = "";
    HashMap<String, String> datasetId = new HashMap<String, String>();
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        datasetId.put(it.next(), null);
    }

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    br = new BufferedReader(new InputStreamReader(fs.open(path)));
    line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        all_datasets.add(dt[0]);
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    String firstGroupStr = "";
    String secondGroupStr = "";
    for (String dataset : firstGroup) {
        firstGroupStr += datasetId.get(dataset) + ",";
    }
    for (String dataset : secondGroup) {
        secondGroupStr += datasetId.get(dataset) + ",";
    }
    firstGroupStr = firstGroupStr.substring(0, firstGroupStr.length() - 1);
    secondGroupStr = secondGroupStr.substring(0, secondGroupStr.length() - 1);

    String relationshipsDir = "";
    if (outputIds) {
        relationshipsDir = FrameworkUtils.relationshipsIdsDir;
    } else {
        relationshipsDir = FrameworkUtils.relationshipsDir;
    }

    FrameworkUtils.createDir(s3bucket + relationshipsDir, s3conf, s3);

    String random = completeRandomization ? "complete" : "restricted";

    String indexInputDirs = "";
    String noRelationship = "";

    HashSet<String> dirs = new HashSet<String>();

    String dataset1;
    String dataset2;
    String datasetId1;
    String datasetId2;
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            datasetId1 = datasetId.get(dataset1);
            datasetId2 = datasetId.get(dataset2);

            if (dataset1.equals(dataset2))
                continue;
            String correlationOutputFileName = s3bucket + relationshipsDir + "/" + dataset1 + "-" + dataset2
                    + "/";

            if (removeExistingFiles) {
                FrameworkUtils.removeFile(correlationOutputFileName, s3conf, s3);
            }
            if (!FrameworkUtils.fileExists(correlationOutputFileName, s3conf, s3)) {
                dirs.add(s3bucket + FrameworkUtils.indexDir + "/" + dataset1);
                dirs.add(s3bucket + FrameworkUtils.indexDir + "/" + dataset2);
            } else {
                noRelationship += datasetId1 + "-" + datasetId2 + ",";
            }
        }
    }

    if (dirs.isEmpty()) {
        System.out.println("All the relationships were already computed.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    for (String dir : dirs) {
        indexInputDirs += dir + ",";
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    String jobName = "relationship" + "-" + random;
    String relationshipOutputDir = s3bucket + relationshipsDir + "/tmp/";

    FrameworkUtils.removeFile(relationshipOutputDir, s3conf, s3);

    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg", datasetAgg.get(shortDataset.get(i)));
    }
    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg-size",
                Integer.toString(datasetAgg.get(shortDataset.get(i)).split(",").length));
    }
    conf.set("dataset-keys", datasetIds);
    conf.set("dataset-names", datasetNames);
    conf.set("first-group", firstGroupStr);
    conf.set("second-group", secondGroupStr);
    conf.set("complete-random", String.valueOf(completeRandomization));
    conf.set("output-ids", String.valueOf(outputIds));
    conf.set("complete-random-str", random);
    conf.set("main-dataset-id", datasetId.get(shortDataset.get(0)));
    conf.set("remove-not-significant", String.valueOf(removeNotSignificant));
    if (noRelationship.length() > 0) {
        conf.set("no-relationship", noRelationship.substring(0, noRelationship.length() - 1));
    }
    if (hasScoreThreshold) {
        conf.set("score-threshold", scoreThreshold);
    }
    if (hasStrengthThreshold) {
        conf.set("strength-threshold", strengthThreshold);
    }

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");
    conf.set("mapreduce.task.timeout", "2400000");

    if (s3) {
        machineConf.setMachineConfiguration(conf);
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        conf.set("bucket", s3bucket);
    }

    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    Job job = new Job(conf);
    job.setJobName(jobName);

    job.setMapOutputKeyClass(PairAttributeWritable.class);
    job.setMapOutputValueClass(TopologyTimeSeriesWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(CorrelationMapper.class);
    job.setReducerClass(CorrelationReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());

    job.setInputFormatClass(SequenceFileInputFormat.class);
    //job.setOutputFormatClass(TextOutputFormat.class);
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);

    FileInputFormat.setInputDirRecursive(job, true);
    FileInputFormat.setInputPaths(job, indexInputDirs.substring(0, indexInputDirs.length() - 1));
    FileOutputFormat.setOutputPath(job, new Path(relationshipOutputDir));

    job.setJarByClass(Relationship.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            if (dataset1.equals(dataset2))
                continue;

            String from = s3bucket + relationshipsDir + "/tmp/" + dataset1 + "-" + dataset2 + "/";
            String to = s3bucket + relationshipsDir + "/" + dataset1 + "-" + dataset2 + "/";
            FrameworkUtils.renameFile(from, to, s3conf, s3);
        }
    }
}

From source file:com.enioka.jqm.tools.Main.java

/**
 * Startup method for the packaged JAR/*from  w ww . j av a  2  s .  c  o m*/
 * 
 * @param args
 *            0 is node name
 */
@SuppressWarnings("static-access")
public static void main(String[] args) {
    Helpers.setLogFileName("cli");
    Option o00 = OptionBuilder.withArgName("nodeName").hasArg().withDescription("name of the JQM node to start")
            .isRequired().create("startnode");
    Option o01 = OptionBuilder.withDescription("display help").withLongOpt("help").create("h");
    Option o11 = OptionBuilder.withArgName("applicationname").hasArg()
            .withDescription("name of the application to launch").isRequired().create("enqueue");
    Option o21 = OptionBuilder.withArgName("xmlpath").hasArg()
            .withDescription("path of the XML configuration file to import").isRequired()
            .create("importjobdef");
    Option o31 = OptionBuilder.withArgName("xmlpath").hasArg()
            .withDescription("export all queue definitions into an XML file").isRequired()
            .create("exportallqueues");
    OptionBuilder.withArgName("xmlpath").hasArg()
            .withDescription("export some queue definitions into an XML file").isRequired()
            .create("exportqueuefile");
    OptionBuilder.withArgName("queues").hasArg().withDescription("queues to export").withValueSeparator(',')
            .isRequired().create("queue");
    Option o51 = OptionBuilder.withArgName("xmlpath").hasArg()
            .withDescription("import all queue definitions from an XML file").isRequired()
            .create("importqueuefile");
    Option o61 = OptionBuilder.withArgName("nodeName").hasArg()
            .withDescription("creates a JQM node of this name, or updates it if it exists. Implies -u.")
            .isRequired().create("createnode");
    Option o71 = OptionBuilder.withDescription("display JQM engine version").withLongOpt("version").create("v");
    Option o81 = OptionBuilder.withDescription("upgrade JQM database").withLongOpt("upgrade").create("u");
    Option o91 = OptionBuilder.withArgName("jobInstanceId").hasArg()
            .withDescription("get job instance status by ID").isRequired().withLongOpt("getstatus").create("g");
    Option o101 = OptionBuilder.withArgName("password").hasArg()
            .withDescription("creates or resets root admin account password").isRequired().withLongOpt("root")
            .create("r");
    Option o111 = OptionBuilder.withArgName("option").hasArg()
            .withDescription(
                    "ws handling. Possible values are: enable, disable, ssl, nossl, internalpki, externalapi")
            .isRequired().withLongOpt("gui").create("w");
    Option o121 = OptionBuilder.withArgName("id[,logfilepath]").hasArg().withDescription("single launch mode")
            .isRequired().withLongOpt("gui").create("s");
    Option o131 = OptionBuilder.withArgName("resourcefile").hasArg()
            .withDescription("resource parameter file to use. Default is resources.xml")
            .withLongOpt("resources").create("p");
    Option o141 = OptionBuilder.withArgName("login,password,role1,role2,...").hasArgs(Option.UNLIMITED_VALUES)
            .withValueSeparator(',')
            .withDescription("Create or update a JQM account. Roles must exist beforehand.").create("U");

    Options options = new Options();
    OptionGroup og1 = new OptionGroup();
    og1.setRequired(true);
    og1.addOption(o00);
    og1.addOption(o01);
    og1.addOption(o11);
    og1.addOption(o21);
    og1.addOption(o31);
    og1.addOption(o51);
    og1.addOption(o61);
    og1.addOption(o71);
    og1.addOption(o81);
    og1.addOption(o91);
    og1.addOption(o101);
    og1.addOption(o111);
    og1.addOption(o121);
    og1.addOption(o141);
    options.addOptionGroup(og1);
    OptionGroup og2 = new OptionGroup();
    og2.addOption(o131);
    options.addOptionGroup(og2);

    HelpFormatter formatter = new HelpFormatter();
    formatter.setWidth(160);

    try {
        // Parse arguments
        CommandLineParser parser = new BasicParser();
        CommandLine line = parser.parse(options, args);

        // Other db connection?
        if (line.getOptionValue(o131.getOpt()) != null) {
            jqmlogger.info("Using resource XML file " + line.getOptionValue(o131.getOpt()));
            Helpers.resourceFile = line.getOptionValue(o131.getOpt());
        }

        // Set db connection
        Helpers.registerJndiIfNeeded();

        // Enqueue
        if (line.getOptionValue(o11.getOpt()) != null) {
            enqueue(line.getOptionValue(o11.getOpt()));
        }
        // Get status
        if (line.getOptionValue(o91.getOpt()) != null) {
            getStatus(Integer.parseInt(line.getOptionValue(o91.getOpt())));
        }
        // Import XML
        else if (line.getOptionValue(o21.getOpt()) != null) {
            importJobDef(line.getOptionValue(o21.getOpt()));
        }
        // Start engine
        else if (line.getOptionValue(o00.getOpt()) != null) {
            startEngine(line.getOptionValue(o00.getOpt()));
        }
        // Export all Queues
        else if (line.getOptionValue(o31.getOpt()) != null) {
            exportAllQueues(line.getOptionValue(o31.getOpt()));
        }
        // Import queues
        else if (line.getOptionValue(o51.getOpt()) != null) {
            importQueues(line.getOptionValue(o51.getOpt()));
        }
        // Create node
        else if (line.getOptionValue(o61.getOpt()) != null) {
            createEngine(line.getOptionValue(o61.getOpt()));
        }
        // Upgrade
        else if (line.hasOption(o81.getOpt())) {
            upgrade();
        }
        // Help
        else if (line.hasOption(o01.getOpt())) {
            formatter.printHelp("java -jar jqm-engine.jar", options, true);
        }
        // Version
        else if (line.hasOption(o71.getOpt())) {
            jqmlogger.info("Engine version: " + Helpers.getMavenVersion());
        }
        // Root password
        else if (line.hasOption(o101.getOpt())) {
            root(line.getOptionValue(o101.getOpt()));
        }
        // Web options
        else if (line.hasOption(o111.getOpt())) {
            ws(line.getOptionValue(o111.getOpt()));
        }
        // Web options
        else if (line.hasOption(o121.getOpt())) {
            single(line.getOptionValue(o121.getOpt()));
        }
        // User handling
        else if (line.hasOption(o141.getOpt())) {
            user(line.getOptionValues(o141.getOpt()));
        }
    } catch (ParseException exp) {
        jqmlogger.fatal("Could not read command line: " + exp.getMessage());
        formatter.printHelp("java -jar jqm-engine.jar", options, true);
        return;
    }
}

From source file:com.basistech.ninja.Train.java

/**
 * Command line interface to train a model.
 *
 * <pre>/*from w  ww  .  ja  v  a2  s .  c o m*/
 *  usage: Train [options]
 *  --batch-size <arg>      batch size (default = 10)
 *  --epochs <arg>          epochs (default = 5)
 *  --examples <arg>        input examples file (required)
 *  --layer-sizes <arg>     layer sizes, including input/output, e.g. 3 4 2 (required)
 *  --learning-rate <arg>   learning-rate (default = 0.7)
 *  --model <arg>           output model file (required)
 * </pre>
 *
 * @param args command line arguments
 * @throws IOException
 */
public static void main(String[] args) throws IOException {
    String defaultBatchSize = "10";
    String deafaultEpochs = "5";
    String defaultLearningRate = "0.7";

    Options options = new Options();
    Option option;
    option = new Option(null, "examples", true, "input examples file (required)");
    option.setRequired(true);
    options.addOption(option);
    option = new Option(null, "model", true, "output model file (required)");
    option.setRequired(true);
    options.addOption(option);
    option = new Option(null, "layer-sizes", true,
            "layer sizes, including input/output, e.g. 3 4 2 (required)");
    option.setRequired(true);
    option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(option);
    option = new Option(null, "batch-size", true, String.format("batch size (default = %s)", defaultBatchSize));
    options.addOption(option);
    option = new Option(null, "epochs", true, String.format("epochs (default = %s)", deafaultEpochs));
    options.addOption(option);
    option = new Option(null, "learning-rate", true,
            String.format("learning-rate (default = %s)", defaultLearningRate));
    options.addOption(option);

    CommandLineParser parser = new GnuParser();
    CommandLine cmdline = null;
    try {
        cmdline = parser.parse(options, args);
    } catch (org.apache.commons.cli.ParseException e) {
        System.err.println(e.getMessage());
        usage(options);
        System.exit(1);
    }
    String[] remaining = cmdline.getArgs();
    if (remaining == null) {
        usage(options);
        System.exit(1);
    }

    List<Integer> layerSizes = Lists.newArrayList();
    for (String s : cmdline.getOptionValues("layer-sizes")) {
        layerSizes.add(Integer.parseInt(s));
    }

    File examplesFile = new File(cmdline.getOptionValue("examples"));
    Train that = new Train(layerSizes, examplesFile);
    int batchSize = Integer.parseInt(cmdline.getOptionValue("batch-size", defaultBatchSize));
    int epochs = Integer.parseInt(cmdline.getOptionValue("epochs", deafaultEpochs));
    double learningRate = Double.parseDouble(cmdline.getOptionValue("learning-rate", defaultLearningRate));
    File modelFile = new File(cmdline.getOptionValue("model"));

    that.train(batchSize, epochs, learningRate, modelFile);
}

From source file:tuit.java

@SuppressWarnings("ConstantConditions")
public static void main(String[] args) {
    System.out.println(licence);/* www  .j  a v  a2  s  .  co m*/
    //Declare variables
    File inputFile;
    File outputFile;
    File tmpDir;
    File blastnExecutable;
    File properties;
    File blastOutputFile = null;
    //
    TUITPropertiesLoader tuitPropertiesLoader;
    TUITProperties tuitProperties;
    //
    String[] parameters = null;
    //
    Connection connection = null;
    MySQL_Connector mySQL_connector;
    //
    Map<Ranks, TUITCutoffSet> cutoffMap;
    //
    BLASTIdentifier blastIdentifier = null;
    //
    RamDb ramDb = null;

    CommandLineParser parser = new GnuParser();
    Options options = new Options();

    options.addOption(tuit.IN, "input<file>", true, "Input file (currently fasta-formatted only)");
    options.addOption(tuit.OUT, "output<file>", true, "Output file (in " + tuit.TUIT_EXT + " format)");
    options.addOption(tuit.P, "prop<file>", true, "Properties file (XML formatted)");
    options.addOption(tuit.V, "verbose", false, "Enable verbose output");
    options.addOption(tuit.B, "blast_output<file>", true, "Perform on a pre-BLASTed output");
    options.addOption(tuit.DEPLOY, "deploy", false, "Deploy the taxonomic databases");
    options.addOption(tuit.UPDATE, "update", false, "Update the taxonomic databases");
    options.addOption(tuit.USE_DB, "usedb", false, "Use RDBMS instead of RAM-based taxonomy");

    Option option = new Option(tuit.REDUCE, "reduce", true,
            "Pack identical (100% similar sequences) records in the given sample file");
    option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(option);
    option = new Option(tuit.COMBINE, "combine", true,
            "Combine a set of given reduction files into an HMP Tree-compatible taxonomy");
    option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(option);
    options.addOption(tuit.NORMALIZE, "normalize", false,
            "If used in combination with -combine ensures that the values are normalized by the root value");

    HelpFormatter formatter = new HelpFormatter();

    try {

        //Get TUIT directory
        final File tuitDir = new File(
                new File(tuit.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath())
                        .getParent());
        final File ramDbFile = new File(tuitDir, tuit.RAM_DB);

        //Setup logger
        Log.getInstance().setLogName("tuit.log");

        //Read command line
        final CommandLine commandLine = parser.parse(options, args, true);

        //Check if the REDUCE option is on
        if (commandLine.hasOption(tuit.REDUCE)) {

            final String[] fileList = commandLine.getOptionValues(tuit.REDUCE);
            for (String s : fileList) {
                final Path path = Paths.get(s);
                Log.getInstance().log(Level.INFO, "Processing " + path.toString() + "...");
                final NucleotideFastaSequenceReductor nucleotideFastaSequenceReductor = NucleotideFastaSequenceReductor
                        .fromPath(path);
                ReductorFileOperator.save(nucleotideFastaSequenceReductor,
                        path.resolveSibling(path.getFileName().toString() + ".rdc"));
            }

            Log.getInstance().log(Level.FINE, "Task done, exiting...");
            return;
        }

        //Check if COMBINE is on
        if (commandLine.hasOption(tuit.COMBINE)) {
            final boolean normalize = commandLine.hasOption(tuit.NORMALIZE);
            final String[] fileList = commandLine.getOptionValues(tuit.COMBINE);
            //TODO: implement a test for format here

            final List<TreeFormatter.TreeFormatterFormat.HMPTreesOutput> hmpTreesOutputs = new ArrayList<>();
            final TreeFormatter treeFormatter = TreeFormatter
                    .newInstance(new TreeFormatter.TuitLineTreeFormatterFormat());
            for (String s : fileList) {
                final Path path = Paths.get(s);
                Log.getInstance().log(Level.INFO, "Merging " + path.toString() + "...");
                treeFormatter.loadFromPath(path);
                final TreeFormatter.TreeFormatterFormat.HMPTreesOutput output = TreeFormatter.TreeFormatterFormat.HMPTreesOutput
                        .newInstance(treeFormatter.toHMPTree(normalize), s.substring(0, s.indexOf(".")));
                hmpTreesOutputs.add(output);
                treeFormatter.erase();
            }
            final Path destination;
            if (commandLine.hasOption(OUT)) {
                destination = Paths.get(commandLine.getOptionValue(tuit.OUT));
            } else {
                destination = Paths.get("merge.tcf");
            }
            CombinatorFileOperator.save(hmpTreesOutputs, treeFormatter, destination);
            Log.getInstance().log(Level.FINE, "Task done, exiting...");
            return;
        }

        if (!commandLine.hasOption(tuit.P)) {
            throw new ParseException("No properties file option found, exiting.");
        } else {
            properties = new File(commandLine.getOptionValue(tuit.P));
        }

        //Load properties
        tuitPropertiesLoader = TUITPropertiesLoader.newInstanceFromFile(properties);
        tuitProperties = tuitPropertiesLoader.getTuitProperties();

        //Create tmp directory and blastn executable
        tmpDir = new File(tuitProperties.getTMPDir().getPath());
        blastnExecutable = new File(tuitProperties.getBLASTNPath().getPath());

        //Check for deploy
        if (commandLine.hasOption(tuit.DEPLOY)) {
            if (commandLine.hasOption(tuit.USE_DB)) {
                NCBITablesDeployer.fastDeployNCBIDatabasesFromNCBI(connection, tmpDir);
            } else {
                NCBITablesDeployer.fastDeployNCBIRamDatabaseFromNCBI(tmpDir, ramDbFile);
            }

            Log.getInstance().log(Level.FINE, "Task done, exiting...");
            return;
        }
        //Check for update
        if (commandLine.hasOption(tuit.UPDATE)) {
            if (commandLine.hasOption(tuit.USE_DB)) {
                NCBITablesDeployer.updateDatabasesFromNCBI(connection, tmpDir);
            } else {
                //No need to specify a different way to update the database other than just deploy in case of the RAM database
                NCBITablesDeployer.fastDeployNCBIRamDatabaseFromNCBI(tmpDir, ramDbFile);
            }
            Log.getInstance().log(Level.FINE, "Task done, exiting...");
            return;
        }

        //Connect to the database
        if (commandLine.hasOption(tuit.USE_DB)) {
            mySQL_connector = MySQL_Connector.newDefaultInstance(
                    "jdbc:mysql://" + tuitProperties.getDBConnection().getUrl().trim() + "/",
                    tuitProperties.getDBConnection().getLogin().trim(),
                    tuitProperties.getDBConnection().getPassword().trim());
            mySQL_connector.connectToDatabase();
            connection = mySQL_connector.getConnection();
        } else {
            //Probe for ram database

            if (ramDbFile.exists() && ramDbFile.canRead()) {
                Log.getInstance().log(Level.INFO, "Loading RAM taxonomic map...");
                try {
                    ramDb = RamDb.loadSelfFromFile(ramDbFile);
                } catch (IOException ie) {
                    if (ie instanceof java.io.InvalidClassException)
                        throw new IOException("The RAM-based taxonomic database needs to be updated.");
                }

            } else {
                Log.getInstance().log(Level.SEVERE,
                        "The RAM database either has not been deployed, or is not accessible."
                                + "Please use the --deploy option and check permissions on the TUIT directory. "
                                + "If you were looking to use the RDBMS as a taxonomic reference, plese use the -usedb option.");
                return;
            }
        }

        if (commandLine.hasOption(tuit.B)) {
            blastOutputFile = new File(commandLine.getOptionValue(tuit.B));
            if (!blastOutputFile.exists() || !blastOutputFile.canRead()) {
                throw new Exception("BLAST output file either does not exist, or is not readable.");
            } else if (blastOutputFile.isDirectory()) {
                throw new Exception("BLAST output file points to a directory.");
            }
        }
        //Check vital parameters
        if (!commandLine.hasOption(tuit.IN)) {
            throw new ParseException("No input file option found, exiting.");
        } else {
            inputFile = new File(commandLine.getOptionValue(tuit.IN));
            Log.getInstance().setLogName(inputFile.getName().split("\\.")[0] + ".tuit.log");
        }
        //Correct the output file option if needed
        if (!commandLine.hasOption(tuit.OUT)) {
            outputFile = new File((inputFile.getPath()).split("\\.")[0] + tuit.TUIT_EXT);
        } else {
            outputFile = new File(commandLine.getOptionValue(tuit.OUT));
        }

        //Adjust the output level
        if (commandLine.hasOption(tuit.V)) {
            Log.getInstance().setLevel(Level.FINE);
            Log.getInstance().log(Level.INFO, "Using verbose output for the log");
        } else {
            Log.getInstance().setLevel(Level.INFO);
        }
        //Try all files
        if (inputFile != null) {
            if (!inputFile.exists() || !inputFile.canRead()) {
                throw new Exception("Input file either does not exist, or is not readable.");
            } else if (inputFile.isDirectory()) {
                throw new Exception("Input file points to a directory.");
            }
        }

        if (!properties.exists() || !properties.canRead()) {
            throw new Exception("Properties file either does not exist, or is not readable.");
        } else if (properties.isDirectory()) {
            throw new Exception("Properties file points to a directory.");
        }

        //Create blast parameters
        final StringBuilder stringBuilder = new StringBuilder();
        for (Database database : tuitProperties.getBLASTNParameters().getDatabase()) {
            stringBuilder.append(database.getUse());
            stringBuilder.append(" ");//Gonna insert an extra space for the last database
        }
        String remote;
        String entrez_query;
        if (tuitProperties.getBLASTNParameters().getRemote().getDelegate().equals("yes")) {
            remote = "-remote";
            entrez_query = "-entrez_query";
            parameters = new String[] { "-db", stringBuilder.toString(), remote, entrez_query,
                    tuitProperties.getBLASTNParameters().getEntrezQuery().getValue(), "-evalue",
                    tuitProperties.getBLASTNParameters().getExpect().getValue() };
        } else {
            if (!commandLine.hasOption(tuit.B)) {
                if (tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase()
                        .startsWith("NOT")
                        || tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase()
                                .startsWith("ALL")) {
                    parameters = new String[] { "-db", stringBuilder.toString(), "-evalue",
                            tuitProperties.getBLASTNParameters().getExpect().getValue(), "-negative_gilist",
                            TUITFileOperatorHelper.restrictToEntrez(tmpDir,
                                    tuitProperties.getBLASTNParameters().getEntrezQuery().getValue()
                                            .toUpperCase().replace("NOT", "OR"))
                                    .getAbsolutePath(),
                            "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() };
                } else if (tuitProperties.getBLASTNParameters().getEntrezQuery().getValue().toUpperCase()
                        .equals("")) {
                    parameters = new String[] { "-db", stringBuilder.toString(), "-evalue",
                            tuitProperties.getBLASTNParameters().getExpect().getValue(), "-num_threads",
                            tuitProperties.getBLASTNParameters().getNumThreads().getValue() };
                } else {
                    parameters = new String[] { "-db", stringBuilder.toString(), "-evalue",
                            tuitProperties.getBLASTNParameters().getExpect().getValue(),
                            /*"-gilist", TUITFileOperatorHelper.restrictToEntrez(
                            tmpDir, tuitProperties.getBLASTNParameters().getEntrezQuery().getValue()).getAbsolutePath(),*/ //TODO remove comment!!!!!
                            "-num_threads", tuitProperties.getBLASTNParameters().getNumThreads().getValue() };
                }
            }
        }
        //Prepare a cutoff Map
        if (tuitProperties.getSpecificationParameters() != null
                && tuitProperties.getSpecificationParameters().size() > 0) {
            cutoffMap = new HashMap<Ranks, TUITCutoffSet>(tuitProperties.getSpecificationParameters().size());
            for (SpecificationParameters specificationParameters : tuitProperties
                    .getSpecificationParameters()) {
                cutoffMap.put(Ranks.valueOf(specificationParameters.getCutoffSet().getRank()),
                        TUITCutoffSet.newDefaultInstance(
                                Double.parseDouble(
                                        specificationParameters.getCutoffSet().getPIdentCutoff().getValue()),
                                Double.parseDouble(specificationParameters.getCutoffSet()
                                        .getQueryCoverageCutoff().getValue()),
                                Double.parseDouble(
                                        specificationParameters.getCutoffSet().getAlpha().getValue())));
            }
        } else {
            cutoffMap = new HashMap<Ranks, TUITCutoffSet>();
        }
        final TUITFileOperatorHelper.OutputFormat format;
        if (tuitProperties.getBLASTNParameters().getOutputFormat().getFormat().equals("rdp")) {
            format = TUITFileOperatorHelper.OutputFormat.RDP_FIXRANK;
        } else {
            format = TUITFileOperatorHelper.OutputFormat.TUIT;
        }

        try (TUITFileOperator<NucleotideFasta> nucleotideFastaTUITFileOperator = NucleotideFastaTUITFileOperator
                .newInstance(format, cutoffMap);) {
            nucleotideFastaTUITFileOperator.setInputFile(inputFile);
            nucleotideFastaTUITFileOperator.setOutputFile(outputFile);
            final String cleanupString = tuitProperties.getBLASTNParameters().getKeepBLASTOuts().getKeep();
            final boolean cleanup;
            if (cleanupString.equals("no")) {
                Log.getInstance().log(Level.INFO, "Temporary BLAST files will be deleted.");
                cleanup = true;
            } else {
                Log.getInstance().log(Level.INFO, "Temporary BLAST files will be kept.");
                cleanup = false;
            }
            //Create blast identifier
            ExecutorService executorService = Executors.newSingleThreadExecutor();
            if (commandLine.hasOption(tuit.USE_DB)) {

                if (blastOutputFile == null) {
                    blastIdentifier = TUITBLASTIdentifierDB.newInstanceFromFileOperator(tmpDir,
                            blastnExecutable, parameters, nucleotideFastaTUITFileOperator, connection,
                            cutoffMap,
                            Integer.parseInt(
                                    tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()),
                            cleanup);

                } else {
                    try {
                        blastIdentifier = TUITBLASTIdentifierDB.newInstanceFromBLASTOutput(
                                nucleotideFastaTUITFileOperator, connection, cutoffMap, blastOutputFile,
                                Integer.parseInt(
                                        tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()),
                                cleanup);

                    } catch (JAXBException e) {
                        Log.getInstance().log(Level.SEVERE, "Error reading " + blastOutputFile.getName()
                                + ", please check input. The file must be XML formatted.");
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }

            } else {
                if (blastOutputFile == null) {
                    blastIdentifier = TUITBLASTIdentifierRAM.newInstanceFromFileOperator(tmpDir,
                            blastnExecutable, parameters, nucleotideFastaTUITFileOperator, cutoffMap,
                            Integer.parseInt(
                                    tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()),
                            cleanup, ramDb);

                } else {
                    try {
                        blastIdentifier = TUITBLASTIdentifierRAM.newInstanceFromBLASTOutput(
                                nucleotideFastaTUITFileOperator, cutoffMap, blastOutputFile,
                                Integer.parseInt(
                                        tuitProperties.getBLASTNParameters().getMaxFilesInBatch().getValue()),
                                cleanup, ramDb);

                    } catch (JAXBException e) {
                        Log.getInstance().log(Level.SEVERE, "Error reading " + blastOutputFile.getName()
                                + ", please check input. The file must be XML formatted.");
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
            Future<?> runnableFuture = executorService.submit(blastIdentifier);
            runnableFuture.get();
            executorService.shutdown();
        }
    } catch (ParseException pe) {
        Log.getInstance().log(Level.SEVERE, (pe.getMessage()));
        formatter.printHelp("tuit", options);
    } catch (SAXException saxe) {
        Log.getInstance().log(Level.SEVERE, saxe.getMessage());
    } catch (FileNotFoundException fnfe) {
        Log.getInstance().log(Level.SEVERE, fnfe.getMessage());
    } catch (TUITPropertyBadFormatException tpbfe) {
        Log.getInstance().log(Level.SEVERE, tpbfe.getMessage());
    } catch (ClassCastException cce) {
        Log.getInstance().log(Level.SEVERE, cce.getMessage());
    } catch (JAXBException jaxbee) {
        Log.getInstance().log(Level.SEVERE,
                "The properties file is not well formatted. Please ensure that the XML is consistent with the io.properties.dtd schema.");
    } catch (ClassNotFoundException cnfe) {
        //Probably won't happen unless the library deleted from the .jar
        Log.getInstance().log(Level.SEVERE, cnfe.getMessage());
        //cnfe.printStackTrace();
    } catch (SQLException sqle) {
        Log.getInstance().log(Level.SEVERE,
                "A database communication error occurred with the following message:\n" + sqle.getMessage());
        //sqle.printStackTrace();
        if (sqle.getMessage().contains("Access denied for user")) {
            Log.getInstance().log(Level.SEVERE, "Please use standard database login: "
                    + NCBITablesDeployer.login + " and password: " + NCBITablesDeployer.password);
        }
    } catch (Exception e) {
        Log.getInstance().log(Level.SEVERE, e.getMessage());
        e.printStackTrace();
    } finally {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException sqle) {
                Log.getInstance().log(Level.SEVERE, "Problem closing the database connection: " + sqle);
            }
        }
        Log.getInstance().log(Level.FINE, "Task done, exiting...");
    }
}

From source file:com.example.dlp.Redact.java

/** Command line application to redact strings, images using the Data Loss Prevention API. */
public static void main(String[] args) throws Exception {
    OptionGroup optionsGroup = new OptionGroup();
    optionsGroup.setRequired(true);/*from w w w  . j a  v  a  2 s  . c  om*/
    Option stringOption = new Option("s", "string", true, "redact string");
    optionsGroup.addOption(stringOption);

    Option fileOption = new Option("f", "file path", true, "redact input file path");
    optionsGroup.addOption(fileOption);

    Options commandLineOptions = new Options();
    commandLineOptions.addOptionGroup(optionsGroup);

    Option minLikelihoodOption = Option.builder("minLikelihood").hasArg(true).required(false).build();

    commandLineOptions.addOption(minLikelihoodOption);

    Option replaceOption = Option.builder("r").longOpt("replace string").hasArg(true).required(false).build();
    commandLineOptions.addOption(replaceOption);

    Option infoTypesOption = Option.builder("infoTypes").hasArg(true).required(false).build();
    infoTypesOption.setArgs(Option.UNLIMITED_VALUES);
    commandLineOptions.addOption(infoTypesOption);

    Option outputFilePathOption = Option.builder("o").hasArg(true).longOpt("outputFilePath").required(false)
            .build();
    commandLineOptions.addOption(outputFilePathOption);

    CommandLineParser parser = new DefaultParser();
    HelpFormatter formatter = new HelpFormatter();
    CommandLine cmd;

    try {
        cmd = parser.parse(commandLineOptions, args);
    } catch (ParseException e) {
        System.out.println(e.getMessage());
        formatter.printHelp(Redact.class.getName(), commandLineOptions);
        System.exit(1);
        return;
    }

    String replacement = cmd.getOptionValue(replaceOption.getOpt(), "_REDACTED_");

    List<InfoType> infoTypesList = new ArrayList<>();
    String[] infoTypes = cmd.getOptionValues(infoTypesOption.getOpt());
    if (infoTypes != null) {
        for (String infoType : infoTypes) {
            infoTypesList.add(InfoType.newBuilder().setName(infoType).build());
        }
    }
    Likelihood minLikelihood = Likelihood.valueOf(
            cmd.getOptionValue(minLikelihoodOption.getOpt(), Likelihood.LIKELIHOOD_UNSPECIFIED.name()));

    // string inspection
    if (cmd.hasOption("s")) {
        String source = cmd.getOptionValue(stringOption.getOpt());
        redactString(source, replacement, minLikelihood, infoTypesList);
    } else if (cmd.hasOption("f")) {
        String filePath = cmd.getOptionValue(fileOption.getOpt());
        String outputFilePath = cmd.getOptionValue(outputFilePathOption.getOpt());
        redactImage(filePath, minLikelihood, infoTypesList, outputFilePath);
    }
}

From source file:com.samsung.sjs.Compiler.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws IOException, SolverException, InterruptedException {
    boolean debug = false;
    boolean use_gc = true;
    CompilerOptions.Platform p = CompilerOptions.Platform.Native;
    CompilerOptions opts = null;//  w  w  w.ja v a 2 s  . c  o m
    boolean field_opts = false;
    boolean typecheckonly = false;
    boolean showconstraints = false;
    boolean showconstraintsolution = false;
    String[] decls = null;
    String[] links = null;
    String[] objs = null;
    boolean guest = false;
    boolean stop_at_c = false;
    String external_compiler = null;
    boolean encode_vals = false;
    boolean x32 = false;
    boolean validate = true;
    boolean interop = false;
    boolean boot_interop = false;
    boolean oldExpl = false;
    String explanationStrategy = null;
    boolean efl = false;

    Options options = new Options();
    options.addOption("debugcompiler", false, "Enable compiler debug spew");
    //options.addOption("o", true, "Set compiler output file (must be .c)");
    options.addOption(OptionBuilder //.withArgName("o")
            .withLongOpt("output-file").withDescription("Output file (must be .c)").hasArg().withArgName("file")
            .create("o"));
    options.addOption(OptionBuilder.withLongOpt("target")
            .withDescription("Select target platform: 'native' or 'web'").hasArg().create());
    options.addOption(OptionBuilder.withLongOpt("gc")
            .withDescription("Disable GC: param is 'on' (default) or 'off'").hasArg().create());

    options.addOption(OptionBuilder
            .withDescription("Enable field access optimizations: param is 'true' (default) or 'false'").hasArg()
            .create("Xfields"));

    options.addOption(OptionBuilder
            .withDescription("Compile for encoded values.  TEMPORARY.  For testing interop codegen").hasArg()
            .create("XEncodedValues"));

    options.addOption(OptionBuilder.withLongOpt("typecheck-only")
            .withDescription("Only typecheck the file, don't compile").create());
    options.addOption(OptionBuilder.withLongOpt("show-constraints")
            .withDescription("Show constraints generated during type inference").create());
    options.addOption(OptionBuilder.withLongOpt("show-constraint-solution")
            .withDescription("Show solution to type inference constraints").create());
    options.addOption(OptionBuilder.withLongOpt("extra-decls")
            .withDescription("Specify extra declaration files, comma-separated").hasArg()
            .withValueSeparator(',').create());
    options.addOption(OptionBuilder.withLongOpt("native-libs")
            .withDescription("Specify extra linkage files, comma-separated").hasArg().withValueSeparator(',')
            .create());
    Option extraobjs = OptionBuilder.withLongOpt("extra-objs")
            .withDescription("Specify extra .c/.cpp files, comma-separated").hasArg().withValueSeparator(',')
            .create();
    extraobjs.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(extraobjs);
    options.addOption(OptionBuilder.withLongOpt("guest-runtime")
            .withDescription(
                    "Emit code to be called by another runtime (i.e., main() is written in another language).")
            .create());
    options.addOption(OptionBuilder.withLongOpt("only-c")
            .withDescription("Generate C code, but do not attempt to compile it").create());
    options.addOption(OptionBuilder.withLongOpt("c-compiler")
            .withDescription("Disable GC: param is 'on' (default) or 'off'").hasArg().create("cc"));
    options.addOption(OptionBuilder.withLongOpt("runtime-src")
            .withDescription("Specify path to runtime system source").hasArg().create());
    options.addOption(OptionBuilder.withLongOpt("ext-path")
            .withDescription("Specify path to external dependency dir (GC, etc.)").hasArg().create());
    options.addOption(OptionBuilder.withLongOpt("skip-validation")
            .withDescription("Run the backend without validating the results of type inference").create());

    options.addOption(OptionBuilder.withLongOpt("m32").withDescription("Force 32-bit compilation").create());
    options.addOption(OptionBuilder.withLongOpt("Xbootinterop")
            .withDescription("Programs start with global interop dirty flag set (experimental)").create());
    options.addOption(OptionBuilder.withLongOpt("Xinterop")
            .withDescription("Enable (experimental) interoperability backend").create());

    options.addOption(OptionBuilder.withDescription("C compiler default optimization level").create("O"));
    options.addOption(OptionBuilder.withDescription("C compiler optimization level 0").create("O0"));
    options.addOption(OptionBuilder.withDescription("C compiler optimization level 1").create("O1"));
    options.addOption(OptionBuilder.withDescription("C compiler optimization level 2").create("O2"));
    options.addOption(OptionBuilder.withDescription("C compiler optimization level 3").create("O3"));

    options.addOption(
            OptionBuilder.withLongOpt("oldExpl").withDescription("Use old error explanations").create());

    String explanationStrategyHelp = "default: " + FixingSetFinder.defaultStrategy() + "; other choices: "
            + FixingSetFinder.strategyNames().stream().filter(s -> !s.equals(FixingSetFinder.defaultStrategy()))
                    .collect(Collectors.joining(", "));

    options.addOption(OptionBuilder.withLongOpt("explanation-strategy")
            .withDescription("Error explanation strategy to use (" + explanationStrategyHelp + ')').hasArg()
            .create());

    options.addOption(
            OptionBuilder.withLongOpt("efl").withDescription("Set up efl environment in main()").create());

    try {
        CommandLineParser parser = new BasicParser();
        CommandLine cmd = parser.parse(options, args);

        String[] newargs = cmd.getArgs();
        if (newargs.length != 1) {
            throw new ParseException("Invalid number of arguments");
        }

        String sourcefile = newargs[0];
        if (!sourcefile.endsWith(".js")) {
            throw new ParseException("Invalid file extension on input file: " + sourcefile);
        }

        String gc = cmd.getOptionValue("gc", "on");
        if (gc.equals("on")) {
            use_gc = true;
        } else if (gc.equals("off")) {
            use_gc = false;
        } else {
            throw new ParseException("Invalid GC option: " + gc);
        }
        String fields = cmd.getOptionValue("Xfields", "true");
        if (fields.equals("true")) {
            field_opts = true;
        } else if (fields.equals("false")) {
            field_opts = false;
        } else {
            throw new ParseException("Invalid field optimization option: " + fields);
        }
        String encoding = cmd.getOptionValue("XEncodedValues", "false");
        if (encoding.equals("true")) {
            encode_vals = true;
        } else if (encoding.equals("false")) {
            encode_vals = false;
        } else {
            throw new ParseException("Invalid value encoding option: " + encode_vals);
        }
        String plat = cmd.getOptionValue("target", "native");
        if (plat.equals("native")) {
            p = CompilerOptions.Platform.Native;
        } else if (plat.equals("web")) {
            p = CompilerOptions.Platform.Web;
        } else {
            throw new ParseException("Invalid target platform: " + plat);
        }
        if (cmd.hasOption("cc")) {
            external_compiler = cmd.getOptionValue("cc");
        }
        if (cmd.hasOption("skip-validation")) {
            validate = false;
        }
        if (cmd.hasOption("typecheck-only")) {
            typecheckonly = true;
        }
        if (cmd.hasOption("show-constraints")) {
            showconstraints = true;
        }
        if (cmd.hasOption("show-constraint-solution")) {
            showconstraintsolution = true;
        }
        if (cmd.hasOption("debugcompiler")) {
            debug = true;
        }
        if (cmd.hasOption("m32")) {
            x32 = true;
        }
        if (cmd.hasOption("Xinterop")) {
            interop = true;
        }
        if (cmd.hasOption("Xbootinterop")) {
            boot_interop = true;
            if (!interop) {
                System.err.println("WARNING: --Xbootinterop enabled without --Xinterop (no effect)");
            }
        }
        if (cmd.hasOption("oldExpl")) {
            oldExpl = true;
        }
        if (cmd.hasOption("explanation-strategy")) {
            explanationStrategy = cmd.getOptionValue("explanation-strategy");
        }
        String output = cmd.getOptionValue("o");
        if (output == null) {
            output = sourcefile.replaceFirst(".js$", ".c");
        } else {
            if (!output.endsWith(".c")) {
                throw new ParseException("Invalid file extension on output file: " + output);
            }
        }
        String runtime_src = cmd.getOptionValue("runtime-src");
        String ext_path = cmd.getOptionValue("ext-path");
        if (ext_path == null) {
            ext_path = new File(".").getCanonicalPath() + "/external";
        }

        if (cmd.hasOption("extra-decls")) {
            decls = cmd.getOptionValues("extra-decls");
        }
        if (cmd.hasOption("native-libs")) {
            links = cmd.getOptionValues("native-libs");
        }
        if (cmd.hasOption("extra-objs")) {
            objs = cmd.getOptionValues("extra-objs");
        }
        if (cmd.hasOption("guest-runtime")) {
            guest = true;
        }
        if (cmd.hasOption("only-c")) {
            stop_at_c = true;
        }
        if (cmd.hasOption("efl")) {
            efl = true;
        }

        int coptlevel = -1; // default optimization
        if (cmd.hasOption("O3")) {
            coptlevel = 3;
        } else if (cmd.hasOption("O2")) {
            coptlevel = 2;
        } else if (cmd.hasOption("O1")) {
            coptlevel = 1;
        } else if (cmd.hasOption("O0")) {
            coptlevel = 0;
        } else if (cmd.hasOption("O")) {
            coptlevel = -1;
        } else {
            coptlevel = 3;
        }

        if (!Files.exists(Paths.get(sourcefile))) {
            System.err.println("File " + sourcefile + " was not found.");
            throw new FileNotFoundException(sourcefile);
        }

        String cwd = new java.io.File(".").getCanonicalPath();

        opts = new CompilerOptions(p, sourcefile, debug, output, use_gc,
                external_compiler == null ? "clang" : external_compiler,
                external_compiler == null ? "emcc" : external_compiler, cwd + "/a.out", // emcc automatically adds .js
                new File(".").getCanonicalPath(), true, field_opts, showconstraints, showconstraintsolution,
                runtime_src, encode_vals, x32, oldExpl, explanationStrategy, efl, coptlevel);
        if (guest) {
            opts.setGuestRuntime();
        }
        if (interop) {
            opts.enableInteropMode();
        }
        if (boot_interop) {
            opts.startInInteropMode();
        }
        opts.setExternalDeps(ext_path);
        if (decls != null) {
            for (String s : decls) {
                Path fname = FileSystems.getDefault().getPath(s);
                opts.addDeclarationFile(fname);
            }
        }
        if (links != null) {
            for (String s : links) {
                Path fname = FileSystems.getDefault().getPath(s);
                opts.addLinkageFile(fname);
            }
        }
        if (objs == null) {
            objs = new String[0];
        }

    } catch (Exception e) {
        System.err.println(e.getMessage());
        e.printStackTrace();
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("sjsc", options);
    }

    if (opts != null) {
        // This typechecks, and depending on flags, also generates C
        compile(opts, typecheckonly, validate); // don't worry about type validation on command line for now
        if (!typecheckonly && !stop_at_c) {
            int ret = 0;
            // Kept around for debugging 32-bit...
            if (p == CompilerOptions.Platform.Native) {
                if (opts.m32()) {
                    String[] x = new String[2];
                    x[0] = "-m32";
                    x[1] = opts.getExternalDeps() + "/gc/x86/lib/libgc.a";
                    ret = clang_compile(opts, objs, x);
                } else {
                    ret = clang_compile(opts, objs, new String[0]);
                }
            } else {
                ret = emcc_compile(opts, objs, new String[0]);
            }
            // If clang failed, propagate the failure outwards
            if (ret != 0) {
                System.exit(ret);
            }
        }
    }
}

From source file:at.ac.tuwien.inso.subcat.miner.MinerRunner.java

public static void main(String[] args) {
    Options options = new Options();
    options.addOption("h", "help", false, "Show this options");
    options.addOption("m", "miner-help", false, "Show miner specific options");
    options.addOption("d", "db", true, "The database to process (required)");
    options.addOption("p", "project", true, "The project ID to process");
    options.addOption("P", "list-projects", false, "List all registered projects");
    options.addOption("v", "verbose", false, "Show details");

    options.addOption(null, "bug-repo", true, "Bug Repository URL");
    options.addOption(null, "bug-product", true, "Bug Product Name");
    options.addOption(null, "bug-tracker", true, "Bug Tracker name (e.g. bugzilla)");
    options.addOption(null, "bug-account", true, "Bug account name");
    options.addOption(null, "bug-passwd", true, "Bug account password");
    options.addOption(null, "bug-enable-untrusted", false, "Accept untrusted certificates");
    options.addOption(null, "bug-threads", true, "Thread count used in bug miners");
    options.addOption(null, "bug-cooldown-time", true, "Bug cooldown time");
    options.addOption(null, "bug-miner-option", true, "Bug miner specific option. Format: <option-name>:value");
    options.addOption(null, "bug-update", false, "Mine all changes since the last run");
    options.getOption("bug-miner-option").setArgs(Option.UNLIMITED_VALUES);

    options.addOption(null, "src-path", true, "Local source repository path");
    options.addOption(null, "src-remote", true, "Remote address");
    options.addOption(null, "src-passwd", true, "Source repository account password");
    options.addOption(null, "src-account", true, "Source repository account name");
    options.addOption(null, "src-miner-option", true,
            "Source miner specific option. Format: <option-name>:value");
    options.getOption("src-miner-option").setArgs(Option.UNLIMITED_VALUES);

    final Reporter reporter = new Reporter(true);
    reporter.startTimer();/*from   w  w w .  j ava  2 s . c o m*/

    boolean printTraces = false;
    Settings settings = new Settings();
    ModelPool pool = null;

    CommandLineParser parser = new PosixParser();

    try {
        CommandLine cmd = parser.parse(options, args);

        if (cmd.hasOption("help")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("postprocessor", options);
            return;
        }

        if (cmd.hasOption("miner-help")) {
            init();
            for (MetaData meta : registeredMiner) {
                System.out.println(meta.name());
                for (Entry<String, ParamType> opt : meta.getSpecificParams().entrySet()) {
                    System.out.println("  - " + opt.getKey() + " (" + opt.getValue() + ")");
                }
            }
            return;
        }

        if (cmd.hasOption("db") == false) {
            reporter.error("miner", "Option --db is required");
            reporter.printSummary();
            return;
        }

        printTraces = cmd.hasOption("verbose");
        pool = new ModelPool(cmd.getOptionValue("db"), 2);

        if (cmd.hasOption("list-projects")) {
            Model model = pool.getModel();

            for (Project proj : model.getProjects()) {
                System.out.println("  " + proj.getId() + ": " + proj.getDate());
            }

            model.close();
            return;
        }

        Project project;
        Model model = pool.getModel();
        boolean hasSrcInfos = false;
        boolean hasBugInfos = false;
        if (cmd.hasOption("project")) {
            try {
                int projId = Integer.parseInt(cmd.getOptionValue("project"));
                project = model.getProject(projId);
            } catch (NumberFormatException e) {
                reporter.error("post-processor", "Invalid project ID");
                reporter.printSummary();
                return;
            }

            if (project == null) {
                reporter.error("post-processor", "Invalid project ID");
                reporter.printSummary();
                return;
            }

            List<String> flags = model.getFlags(project);
            hasBugInfos = flags.contains(Model.FLAG_BUG_INFO);
            hasSrcInfos = flags.contains(Model.FLAG_SRC_INFO);
        } else {
            project = model.addProject(new Date(), null, settings.bugTrackerName, settings.bugRepository,
                    settings.bugProductName, null);
        }
        model.close();

        //
        // Source Repository Mining:
        //

        settings.srcLocalPath = cmd.getOptionValue("src-path");
        settings.srcRemote = cmd.getOptionValue("src-remote");
        settings.srcRemotePw = cmd.getOptionValue("src-passwd");
        settings.srcRemoteUser = cmd.getOptionValue("src-account");

        if (settings.srcRemotePw == null || settings.srcRemoteUser == null) {
            if (settings.srcRemotePw != null) {
                reporter.error("miner", "--src-passwd should only be used in combination with --src-account");
                reporter.printSummary();
                return;
            } else if (settings.srcRemoteUser != null) {
                reporter.error("miner", "--src-account should only be used in combination with --src-passwd");
                reporter.printSummary();
                return;
            }
        }

        if (settings.srcRemoteUser != null && settings.srcRemote == null) {
            reporter.error("miner", "--src-account should only be used in combination with --src-remote");
            reporter.printSummary();
            return;
        }

        if (settings.srcLocalPath != null && hasSrcInfos) {
            reporter.error("miner", "Source repository updates are not supported yet.");
            reporter.printSummary(true);
            return;
        }

        //
        // Bug Repository Mining:
        //

        settings.bugRepository = cmd.getOptionValue("bug-repo");
        settings.bugProductName = cmd.getOptionValue("bug-product");
        settings.bugTrackerName = cmd.getOptionValue("bug-tracker");
        settings.bugEnableUntrustedCertificates = cmd.hasOption("bug-enable-untrusted");
        settings.bugLoginUser = cmd.getOptionValue("bug-account");
        settings.bugLoginPw = cmd.getOptionValue("bug-passwd");

        settings.bugUpdate = cmd.hasOption("bug-update");
        boolean includeBugs = false;
        if (settings.bugUpdate) {
            if (project.getBugTracker() == null || project.getDomain() == null) {
                reporter.error("miner",
                        "flag --bug-update is requires previously mined bugs. Use --bug-repository, --bug-product and --bug-tracker.");
                reporter.printSummary(true);
                return;
            }
            if (settings.bugTrackerName != null) {
                reporter.warning("miner", "flag --bug-tracker without effect");
            }
            if (settings.bugProductName != null) {
                reporter.warning("miner", "flag --bug-product without effect");
            }
            if (settings.bugRepository != null) {
                reporter.warning("miner", "flag --bug-repository without effect");
            }

            settings.bugTrackerName = project.getBugTracker();
            settings.bugProductName = project.getProduct();
            settings.bugRepository = project.getDomain();
            includeBugs = true;
        } else if (settings.bugRepository != null && settings.bugProductName != null
                && settings.bugTrackerName != null) {
            if (hasBugInfos == false) {
                // The user is trying to append bug tracker information to a existing project.
            } else if (!settings.bugTrackerName.equalsIgnoreCase(project.getBugTracker())
                    || !settings.bugProductName.equalsIgnoreCase(project.getProduct())
                    || !settings.bugRepository.equalsIgnoreCase(project.getDomain())) {
                reporter.error("miner",
                        "There are already previously mined bugs for this project. Use --bug-update to update the database.");
                reporter.printSummary(true);
                return;
            }
            if (settings.bugRepository == null) {
                reporter.error("miner", "flag --bug-repository is required");
                reporter.printSummary(true);
                return;
            }
            if (settings.bugProductName == null) {
                reporter.error("miner", "flag --bug-product is required");
                reporter.printSummary(true);
                return;
            }
            includeBugs = true;
        }

        if (includeBugs) {
            if (settings.bugLoginPw == null || settings.bugLoginUser == null) {
                if (settings.bugLoginPw != null) {
                    reporter.error("miner",
                            "--bug-passwd should only be used in combination with --bug-account");
                    reporter.printSummary();
                    return;
                } else if (settings.bugLoginUser != null) {
                    reporter.error("miner",
                            "--bug-account should only be used in combination with --bug-passwd");
                    reporter.printSummary();
                    return;
                }
            }

            if (cmd.hasOption("bug-threads")) {
                try {
                    settings.bugThreads = Integer.parseInt(cmd.getOptionValue("bug-threads"));
                } catch (Exception e) {
                    reporter.error("miner", "--bug-threads: Invalid parameter type");
                    reporter.printSummary();
                    return;
                }
            }

            if (cmd.hasOption("bug-cooldown-time")) {
                try {
                    settings.bugCooldownTime = Integer.parseInt(cmd.getOptionValue("bug-cooldown-time"));
                } catch (Exception e) {
                    reporter.error("miner", "--bug-cooldown-time: Invalid parameter type");
                    reporter.printSummary();
                    return;
                }
            }

            if (cmd.hasOption("bug-miner-option")) {
                for (String str : cmd.getOptionValues("bug-miner-option")) {
                    addSpecificParameter(settings.bugSpecificParams, str);
                }
            }

            if (cmd.hasOption("src-miner-option")) {
                for (String str : cmd.getOptionValues("src-miner-option")) {
                    addSpecificParameter(settings.srcSpecificParams, str);
                }
            }
        } else {
            if (settings.bugLoginPw != null) {
                reporter.error("miner", "--bug-passwd should only be used in combination with --bug-account");
                reporter.printSummary();
                return;
            }
            if (settings.bugLoginUser != null) {
                reporter.error("miner", "--bug-account should only be used in combination with --bug-account");
                reporter.printSummary();
                return;
            }
            if (settings.bugRepository != null) {
                reporter.error("miner",
                        "--bug-repo should only be used in combination with --bug-product and --bug-tracker");
                reporter.printSummary();
                return;
            }
            if (settings.bugProductName != null) {
                reporter.error("miner",
                        "--bug-product should only be used in combination with --bug-repo and --bug-tracker");
                reporter.printSummary();
                return;
            }
            if (settings.bugTrackerName != null) {
                reporter.error("miner",
                        "--bug-tracker should only be used in combination with --bug-repo and --bug-product");
                reporter.printSummary();
                return;
            }
            if (settings.bugEnableUntrustedCertificates) {
                reporter.error("miner",
                        "--bug-enable-untrusted should only be used in combination with --bug-repo, --bug-tracker and --bug-product");
                reporter.printSummary();
                return;
            }
            if (settings.bugUpdate) {
                reporter.error("miner",
                        "--bug-update should only be used in combination with --bug-repo, --bug-tracker and --bug-product");
                reporter.printSummary();
                return;
            }
            if (cmd.hasOption("bug-threads")) {
                reporter.error("miner",
                        "--bug-threads should only be used in combination with --bug-repo, --bug-tracker and --bug-product");
                reporter.printSummary();
                return;
            }
            if (cmd.hasOption("bug-cooldown-time")) {
                reporter.error("miner",
                        "--bug-cooldown-time should only be used in combination with --bug-repo, --bug-tracker and --bug-product");
                reporter.printSummary();
                return;
            }
            if (cmd.hasOption("bug-miner-option")) {
                reporter.error("miner",
                        "--bug-miner-option should only be used in combination with --bug-repo, --bug-tracker and --bug-product");
                reporter.printSummary();
                return;
            }
        }

        //
        // Run:
        //

        MinerRunner runner = new MinerRunner(pool, project, settings, reporter);
        if (cmd.hasOption("verbose")) {
            runner.addListener(new MinerListener() {
                private Map<Miner, Integer> totals = new HashMap<Miner, Integer>();

                @Override
                public void start(Miner miner) {
                }

                @Override
                public void end(Miner miner) {
                }

                @Override
                public void stop(Miner miner) {
                }

                @Override
                public void tasksTotal(Miner miner, int count) {
                    totals.put(miner, count);
                }

                @Override
                public void tasksProcessed(Miner miner, int processed) {
                    Integer total = totals.get(miner);
                    reporter.note(miner.getName(),
                            "status: " + processed + "/" + ((total == null) ? "0" : total));
                }
            });
        }

        runner.run();
    } catch (ParameterException e) {
        reporter.error(e.getMiner().getName(), e.getMessage());
    } catch (ParseException e) {
        reporter.error("miner", "Parsing failed: " + e.getMessage());
        if (printTraces == true) {
            e.printStackTrace();
        }
    } catch (ClassNotFoundException e) {
        reporter.error("miner", "Failed to create a database connection: " + e.getMessage());
        if (printTraces == true) {
            e.printStackTrace();
        }
    } catch (SQLException e) {
        reporter.error("miner", "Failed to create a database connection: " + e.getMessage());
        if (printTraces == true) {
            e.printStackTrace();
        }
    } catch (MinerException e) {
        reporter.error("miner", "Mining Error: " + e.getMessage());
        if (printTraces == true) {
            e.printStackTrace();
        }
    } finally {
        if (pool != null) {
            pool.close();
        }
    }

    reporter.printSummary(true);
}

From source file:io.hops.tensorflow.CommonArguments.java

protected static Options createOptions() {
    Options opts = new Options();

    opts.addOption(PYTHON, true, "Path to custom Python binary, if not set the default will be " + "used");
    opts.addOption(ARGS, true,/*from w ww .  j ava 2  s  .  com*/
            "Command line args for the application. Multiple args can be separated by empty space.");
    opts.getOption(ARGS).setArgs(Option.UNLIMITED_VALUES);
    opts.addOption(WORKERS, true, "Number of workers");
    opts.addOption(PSES, true, "Number of parameter servers");
    opts.addOption(ENV, true, "Environment for Python application. Specified as env_key=env_val pairs");
    opts.addOption(TENSORBOARD, false, "Enable TensorBoard, one for each worker");

    opts.addOption(MEMORY, true, "Amount of memory in MB to be requested to run the TF application");
    opts.addOption(VCORES, true, "Amount of virtual cores to be requested to run the TF application");
    opts.addOption(GPUS, true, "Amount of GPUs for each TF application container");
    opts.addOption(PROTOCOL, true, "Specify the protocol parameter for tf.train.Server()");
    // opts.addOption(PRIORITY, true, "Priority for the Python application containers");
    opts.addOption(ALLOCATION_TIMEOUT, true, "Container allocation timeout in seconds");

    opts.addOption(DEBUG, false, "Dump out debug information");
    opts.addOption(HELP, false, "Print usage");
    return opts;
}

From source file:com.zimbra.common.localconfig.LocalConfigUpgrade.java

private static Options getAllOptions() {
    Options options = new Options();
    options.addOption(O_HELP, "help", false, "print usage");
    options.addOption(O_CONFIG, "config", true, "path to localconfig.xml");
    options.addOption(O_TAG, "tag", true, "backup and tag with this suffix");
    Option bugOpt = new Option(O_BUG, "bug", true, "bug number this upgrade is for (multiple allowed)");
    bugOpt.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(bugOpt);/*from w w w. j  av a2 s.  c o m*/
    return options;
}

From source file:com.emc.ecs.util.OptionBuilder.java

public OptionBuilder hasArgs() {
    argNum = Option.UNLIMITED_VALUES;
    return this;
}