Example usage for org.apache.commons.cli Option Option

List of usage examples for org.apache.commons.cli Option Option

Introduction

In this page you can find the example usage for org.apache.commons.cli Option Option.

Prototype

public Option(String opt, String longOpt, boolean hasArg, String description) throws IllegalArgumentException 

Source Link

Document

Creates an Option using the specified parameters.

Usage

From source file:es.eucm.ead.exporter.ExporterMain.java

@SuppressWarnings("all")
public static void main(String args[]) {

    Options options = new Options();

    Option help = new Option("h", "help", false, "print this message");
    Option quiet = new Option("q", "quiet", false, "be extra quiet");
    Option verbose = new Option("v", "verbose", false, "be extra verbose");

    Option legacy = OptionBuilder.withArgName("s> <t").hasArgs(3)
            .withDescription(/*from ww  w .j  a v  a 2 s  .c  om*/
                    "source is a version 1.x game; must specify\n" + "<simplify> if 'true', simplifies result\n"
                            + "<translate> if 'true', enables translation")
            .withLongOpt("import").create("i");

    Option war = OptionBuilder.withArgName("web-base").hasArg()
            .withDescription("WAR packaging (web app); " + "must specify\n<web-base> the base WAR directory")
            .withLongOpt("war").create("w");

    Option jar = OptionBuilder.withDescription("JAR packaging (desktop)").withLongOpt("jar").create("j");

    Option apk = OptionBuilder.withArgName("props> <adk> <d").hasArgs(3)
            .withDescription("APK packaging (android); must specify \n" + "<props> (a properties file) \n"
                    + "<adk> (location of the ADK to use) \n" + "<deploy> ('true' to install & deploy)")
            .withLongOpt("apk").create("a");

    // EAD option
    Option ead = OptionBuilder.withDescription("EAD packaging (eAdventure)").withLongOpt("ead").create("e");

    options.addOption(legacy);
    options.addOption(help);
    options.addOption(quiet);
    options.addOption(verbose);
    options.addOption(jar);
    options.addOption(war);
    options.addOption(apk);
    options.addOption(ead);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd;
    try {
        cmd = parser.parse(options, args);
    } catch (ParseException pe) {
        System.err.println("Error parsing command-line: " + pe.getMessage());
        showHelp(options);
        return;
    }

    // general options

    String[] extras = cmd.getArgs();

    if (cmd.hasOption(help.getOpt()) || extras.length < 2) {
        showHelp(options);
        return;
    }
    if (cmd.hasOption(verbose.getOpt())) {
        verbosity = Verbose;
    }
    if (cmd.hasOption(quiet.getOpt())) {
        verbosity = Quiet;
    }

    // import

    String source = extras[0];

    // optional import step
    if (cmd.hasOption(legacy.getOpt())) {
        String[] values = cmd.getOptionValues(legacy.getOpt());

        AdventureConverter converter = new AdventureConverter();
        if (values.length > 0 && values[0].equalsIgnoreCase("true")) {
            converter.setEnableSimplifications(true);
        }
        if (values.length > 1 && values[1].equalsIgnoreCase("true")) {
            converter.setEnableTranslations(true);
        }

        // set source for next steps to import-target
        source = converter.convert(source, null);
    }

    if (cmd.hasOption(jar.getOpt())) {
        if (checkFilesExist(cmd, options, source)) {
            JarExporter e = new JarExporter();
            e.export(source, extras[1], verbosity.equals(Quiet) ? new QuietStream() : System.err);
        }
    } else if (cmd.hasOption(apk.getOpt())) {
        String[] values = cmd.getOptionValues(apk.getOpt());
        if (checkFilesExist(cmd, options, values[0], values[1], source)) {
            AndroidExporter e = new AndroidExporter();
            Properties props = new Properties();
            File propsFile = new File(values[0]);
            try {
                props.load(new FileReader(propsFile));
                props.setProperty(AndroidExporter.SDK_HOME,
                        props.getProperty(AndroidExporter.SDK_HOME, values[1]));
            } catch (IOException ioe) {
                System.err.println("Could not load properties from " + propsFile.getAbsolutePath());
                return;
            }
            e.export(source, extras[1], props, values.length > 2 && values[2].equalsIgnoreCase("true"));
        }
    } else if (cmd.hasOption(war.getOpt())) {
        if (checkFilesExist(cmd, options, extras[0])) {
            WarExporter e = new WarExporter();
            e.setWarPath(cmd.getOptionValue(war.getOpt()));
            e.export(source, extras[1]);
        }
    } else if (cmd.hasOption(ead.getOpt())) {
        String destiny = extras[1];
        if (!destiny.endsWith(".ead")) {
            destiny += ".ead";
        }
        FileUtils.zip(new File(destiny), new File(source));
    } else {
        showHelp(options);
    }
}

From source file:es.tid.fiware.fiwareconnectors.cygnus.nodes.CygnusApplication.java

/**
 * Main application to be run when this CygnusApplication is invoked. The only differences with the original one
 * are the CygnusApplication is used instead of the Application one, and the Management Interface port option in
 * the command line./*  w  ww.ja  v  a 2 s .c  o  m*/
 * @param args
 */
public static void main(String[] args) {
    try {
        Options options = new Options();

        Option option = new Option("n", "name", true, "the name of this agent");
        option.setRequired(true);
        options.addOption(option);

        option = new Option("f", "conf-file", true, "specify a conf file");
        option.setRequired(true);
        options.addOption(option);

        option = new Option(null, "no-reload-conf", false, "do not reload " + "conf file if changed");
        options.addOption(option);

        option = new Option("h", "help", false, "display help text");
        options.addOption(option);

        option = new Option("p", "mgmt-if-port", true, "the management interface port");
        option.setRequired(false);
        options.addOption(option);

        CommandLineParser parser = new GnuParser();
        CommandLine commandLine = parser.parse(options, args);

        File configurationFile = new File(commandLine.getOptionValue('f'));
        String agentName = commandLine.getOptionValue('n');
        boolean reload = !commandLine.hasOption("no-reload-conf");

        if (commandLine.hasOption('h')) {
            new HelpFormatter().printHelp("flume-ng agent", options, true);
            return;
        } // if

        int mgmtIfPort = 8081; // default value

        if (commandLine.hasOption('p')) {
            mgmtIfPort = new Integer(commandLine.getOptionValue('p')).intValue();
        } // if

        // the following is to ensure that by default the agent will fail on startup if the file does not exist

        if (!configurationFile.exists()) {
            // if command line invocation, then need to fail fast
            if (System.getProperty(Constants.SYSPROP_CALLED_FROM_SERVICE) == null) {
                String path = configurationFile.getPath();

                try {
                    path = configurationFile.getCanonicalPath();
                } catch (IOException ex) {
                    logger.error("Failed to read canonical path for file: " + path, ex);
                } // try catch

                throw new ParseException("The specified configuration file does not exist: " + path);
            } // if
        } // if

        List<LifecycleAware> components = Lists.newArrayList();
        CygnusApplication application;

        if (reload) {
            EventBus eventBus = new EventBus(agentName + "-event-bus");
            PollingPropertiesFileConfigurationProvider configurationProvider = new PollingPropertiesFileConfigurationProvider(
                    agentName, configurationFile, eventBus, 30);
            components.add(configurationProvider);
            application = new CygnusApplication(components, mgmtIfPort);
            eventBus.register(application);
        } else {
            PropertiesFileConfigurationProvider configurationProvider = new PropertiesFileConfigurationProvider(
                    agentName, configurationFile);
            application = new CygnusApplication(mgmtIfPort);
            application.handleConfigurationEvent(configurationProvider.getConfiguration());
        } // if else

        application.start();

        final CygnusApplication appReference = application;
        Runtime.getRuntime().addShutdownHook(new Thread("agent-shutdown-hook") {
            @Override
            public void run() {
                appReference.stop();
            } // run
        });
    } catch (Exception e) {
        logger.error("A fatal error occurred while running. Exception follows.", e);
    } // try catch
}

From source file:edu.ksu.cis.indus.staticanalyses.concurrency.escape.EscapeAndReadWriteCLI.java

/**
 * The entry point to this class./*from w w w .j a  v a  2s. co m*/
 * 
 * @param args command line arguments.
 * @throws RuntimeException when escape information and side-effect information calculation fails.
 */
public static void main(final String[] args) {
    final Options _options = new Options();
    Option _option = new Option("h", "help", false, "Display message.");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("p", "soot-classpath", false, "Prepend this to soot class path.");
    _option.setArgs(1);
    _option.setArgName("classpath");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("S", "scope", true, "The scope that should be analyzed.");
    _option.setArgs(1);
    _option.setArgName("scope");
    _option.setRequired(false);
    _options.addOption(_option);

    final CommandLineParser _parser = new GnuParser();

    try {
        final CommandLine _cl = _parser.parse(_options, args);

        if (_cl.hasOption("h")) {
            final String _cmdLineSyn = "java " + EscapeAndReadWriteCLI.class.getName()
                    + " <options> <classnames>";
            (new HelpFormatter()).printHelp(_cmdLineSyn, _options);
            System.exit(1);
        }

        if (_cl.getArgList().isEmpty()) {
            throw new MissingArgumentException("Please specify atleast one class.");
        }

        final EscapeAndReadWriteCLI _cli = new EscapeAndReadWriteCLI();

        if (_cl.hasOption('p')) {
            _cli.addToSootClassPath(_cl.getOptionValue('p'));
        }

        if (_cl.hasOption('S')) {
            _cli.setScopeSpecFile(_cl.getOptionValue('S'));
        }
        _cli.setClassNames(_cl.getArgList());
        _cli.<ITokens>execute();
    } catch (final ParseException _e) {
        LOGGER.error("Error while parsing command line.", _e);
        System.out.println("Error while parsing command line." + _e);
        final String _cmdLineSyn = "java " + EscapeAndReadWriteCLI.class.getName() + " <options> <classnames>";
        (new HelpFormatter()).printHelp(_cmdLineSyn, "Options are:", _options, "");
    } catch (final Throwable _e) {
        LOGGER.error("Beyond our control. May day! May day!", _e);
        throw new RuntimeException(_e);
    }
}

From source file:edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques.java

/**
 * @param args/*  w ww .ja  v  a  2 s .co  m*/
 * @throws ParseException 
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the relationship " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option g1Option = new Option("g1", "first-group", true, "set first group of datasets");
    g1Option.setRequired(true);
    g1Option.setArgName("FIRST GROUP");
    g1Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g1Option);

    Option g2Option = new Option("g2", "second-group", true, "set second group of datasets");
    g2Option.setRequired(false);
    g2Option.setArgName("SECOND GROUP");
    g2Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g2Option);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar "
                        + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar "
                        + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Path path = null;
    FileSystem fs = FileSystem.get(new Configuration());

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> firstGroup = new ArrayList<String>();
    ArrayList<String> secondGroup = new ArrayList<String>();
    HashMap<String, String> datasetAgg = new HashMap<String, String>();

    boolean removeExistingFiles = cmd.hasOption("f");

    String[] firstGroupCmd = cmd.getOptionValues("g1");
    String[] secondGroupCmd = cmd.hasOption("g2") ? cmd.getOptionValues("g2") : new String[0];
    addDatasets(firstGroupCmd, firstGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);
    addDatasets(secondGroupCmd, secondGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    if (firstGroup.isEmpty()) {
        System.out.println("First group of datasets (G1) is empty. " + "Doing G1 = G2.");
        firstGroup.addAll(secondGroup);
    }

    if (secondGroup.isEmpty()) {
        System.out.println("Second group of datasets (G2) is empty. " + "Doing G2 = G1.");
        secondGroup.addAll(firstGroup);
    }

    // getting dataset ids

    String datasetNames = "";
    String datasetIds = "";
    HashMap<String, String> datasetId = new HashMap<String, String>();
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        datasetId.put(it.next(), null);
    }

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    String firstGroupStr = "";
    String secondGroupStr = "";
    for (String dataset : firstGroup) {
        firstGroupStr += datasetId.get(dataset) + ",";
    }
    for (String dataset : secondGroup) {
        secondGroupStr += datasetId.get(dataset) + ",";
    }
    firstGroupStr = firstGroupStr.substring(0, firstGroupStr.length() - 1);
    secondGroupStr = secondGroupStr.substring(0, secondGroupStr.length() - 1);

    FrameworkUtils.createDir(s3bucket + FrameworkUtils.correlationTechniquesDir, s3conf, s3);

    String dataAttributesInputDirs = "";
    String noRelationship = "";

    HashSet<String> dirs = new HashSet<String>();

    String dataset1;
    String dataset2;
    String datasetId1;
    String datasetId2;
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            datasetId1 = datasetId.get(dataset1);
            datasetId2 = datasetId.get(dataset2);

            if (dataset1.equals(dataset2))
                continue;
            String correlationOutputFileName = s3bucket + FrameworkUtils.correlationTechniquesDir + "/"
                    + dataset1 + "-" + dataset2 + "/";

            if (removeExistingFiles) {
                FrameworkUtils.removeFile(correlationOutputFileName, s3conf, s3);
            }
            if (!FrameworkUtils.fileExists(correlationOutputFileName, s3conf, s3)) {
                dirs.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset1);
                dirs.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset2);
            } else {
                noRelationship += datasetId1 + "-" + datasetId2 + ",";
            }
        }
    }

    if (dirs.isEmpty()) {
        System.out.println("All the relationships were already computed.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    for (String dir : dirs) {
        dataAttributesInputDirs += dir + ",";
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    String jobName = "correlation";
    String correlationOutputDir = s3bucket + FrameworkUtils.correlationTechniquesDir + "/tmp/";

    FrameworkUtils.removeFile(correlationOutputDir, s3conf, s3);

    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg", datasetAgg.get(shortDataset.get(i)));
    }
    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg-size",
                Integer.toString(datasetAgg.get(shortDataset.get(i)).split(",").length));
    }
    conf.set("dataset-keys", datasetIds);
    conf.set("dataset-names", datasetNames);
    conf.set("first-group", firstGroupStr);
    conf.set("second-group", secondGroupStr);
    conf.set("main-dataset-id", datasetId.get(shortDataset.get(0)));
    if (noRelationship.length() > 0) {
        conf.set("no-relationship", noRelationship.substring(0, noRelationship.length() - 1));
    }

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");
    conf.set("mapreduce.task.timeout", "2400000");

    if (s3) {
        machineConf.setMachineConfiguration(conf);
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        conf.set("bucket", s3bucket);
    }

    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    Job job = new Job(conf);
    job.setJobName(jobName);

    job.setMapOutputKeyClass(PairAttributeWritable.class);
    job.setMapOutputValueClass(SpatioTemporalValueWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(CorrelationTechniquesMapper.class);
    job.setReducerClass(CorrelationTechniquesReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());

    job.setInputFormatClass(SequenceFileInputFormat.class);
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);

    FileInputFormat.setInputDirRecursive(job, true);
    FileInputFormat.setInputPaths(job,
            dataAttributesInputDirs.substring(0, dataAttributesInputDirs.length() - 1));
    FileOutputFormat.setOutputPath(job, new Path(correlationOutputDir));

    job.setJarByClass(CorrelationTechniques.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            if (dataset1.equals(dataset2))
                continue;

            String from = s3bucket + FrameworkUtils.correlationTechniquesDir + "/tmp/" + dataset1 + "-"
                    + dataset2 + "/";
            String to = s3bucket + FrameworkUtils.correlationTechniquesDir + "/" + dataset1 + "-" + dataset2
                    + "/";
            FrameworkUtils.renameFile(from, to, s3conf, s3);
        }
    }
}

From source file:edu.ksu.cis.indus.staticanalyses.concurrency.DeadlockAnalysisCLI.java

/**
 * The entry point to this class./*from w w w . j  a  v a 2  s  .  c  o m*/
 * 
 * @param args command line arguments.
 * @throws RuntimeException when escape information and side-effect information calculation fails.
 */
public static void main(final String[] args) {
    final Options _options = new Options();
    Option _option = new Option("h", "help", false, "Display message.");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("p", "soot-classpath", false, "Prepend this to soot class path.");
    _option.setArgs(1);
    _option.setArgName("classpath");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("S", "scope", true, "The scope that should be analyzed.");
    _option.setArgs(1);
    _option.setArgName("scope");
    _option.setRequired(false);
    _options.addOption(_option);

    final CommandLineParser _parser = new GnuParser();

    try {
        final CommandLine _cl = _parser.parse(_options, args);

        if (_cl.hasOption("h")) {
            final String _cmdLineSyn = "java " + DeadlockAnalysisCLI.class.getName()
                    + " <options> <classnames>";
            (new HelpFormatter()).printHelp(_cmdLineSyn, _options);
            System.exit(1);
        }

        if (_cl.getArgList().isEmpty()) {
            throw new MissingArgumentException("Please specify atleast one class.");
        }

        final DeadlockAnalysisCLI _cli = new DeadlockAnalysisCLI();

        if (_cl.hasOption('p')) {
            _cli.addToSootClassPath(_cl.getOptionValue('p'));
        }

        if (_cl.hasOption('S')) {
            _cli.setScopeSpecFile(_cl.getOptionValue('S'));
        }
        _cli.setClassNames(_cl.getArgList());
        _cli.<ITokens>execute();
    } catch (final ParseException _e) {
        LOGGER.error("Error while parsing command line.", _e);
        System.out.println("Error while parsing command line." + _e);
        final String _cmdLineSyn = "java " + DeadlockAnalysisCLI.class.getName() + " <options> <classnames>";
        (new HelpFormatter()).printHelp(_cmdLineSyn, "Options are:", _options, "");
    } catch (final Throwable _e) {
        LOGGER.error("Beyond our control. May day! May day!", _e);
        throw new RuntimeException(_e);
    }
}

From source file:edu.nyu.vida.data_polygamy.relationship_computation.Relationship.java

/**
 * @param args/*from w ww.  j a  v a2  s  . c o  m*/
 * @throws ParseException 
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the relationship " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option scoreOption = new Option("sc", "score", true, "set threhsold for relationship score");
    scoreOption.setRequired(false);
    scoreOption.setArgName("SCORE THRESHOLD");
    options.addOption(scoreOption);

    Option strengthOption = new Option("st", "strength", true, "set threhsold for relationship strength");
    strengthOption.setRequired(false);
    strengthOption.setArgName("STRENGTH THRESHOLD");
    options.addOption(strengthOption);

    Option completeRandomizationOption = new Option("c", "complete-randomization", false,
            "use complete randomization when performing significance tests");
    completeRandomizationOption.setRequired(false);
    options.addOption(completeRandomizationOption);

    Option idOption = new Option("id", "ids", false, "output id instead of names for datasets and attributes");
    idOption.setRequired(false);
    options.addOption(idOption);

    Option g1Option = new Option("g1", "first-group", true, "set first group of datasets");
    g1Option.setRequired(true);
    g1Option.setArgName("FIRST GROUP");
    g1Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g1Option);

    Option g2Option = new Option("g2", "second-group", true, "set second group of datasets");
    g2Option.setRequired(false);
    g2Option.setArgName("SECOND GROUP");
    g2Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g2Option);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    Option removeOption = new Option("r", "remove-not-significant", false,
            "remove relationships that are not" + "significant from the final output");
    removeOption.setRequired(false);
    options.addOption(removeOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship", options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.relationship_computation.Relationship",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Path path = null;
    FileSystem fs = FileSystem.get(new Configuration());

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> firstGroup = new ArrayList<String>();
    ArrayList<String> secondGroup = new ArrayList<String>();
    HashMap<String, String> datasetAgg = new HashMap<String, String>();

    boolean removeNotSignificant = cmd.hasOption("r");
    boolean removeExistingFiles = cmd.hasOption("f");
    boolean completeRandomization = cmd.hasOption("c");
    boolean hasScoreThreshold = cmd.hasOption("sc");
    boolean hasStrengthThreshold = cmd.hasOption("st");
    boolean outputIds = cmd.hasOption("id");
    String scoreThreshold = hasScoreThreshold ? cmd.getOptionValue("sc") : "";
    String strengthThreshold = hasStrengthThreshold ? cmd.getOptionValue("st") : "";

    // all datasets
    ArrayList<String> all_datasets = new ArrayList<String>();
    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        all_datasets.add(line.split("\t")[0]);
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();
    String[] all_datasets_array = new String[all_datasets.size()];
    all_datasets.toArray(all_datasets_array);

    String[] firstGroupCmd = cmd.getOptionValues("g1");
    String[] secondGroupCmd = cmd.hasOption("g2") ? cmd.getOptionValues("g2") : all_datasets_array;
    addDatasets(firstGroupCmd, firstGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);
    addDatasets(secondGroupCmd, secondGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    if (firstGroup.isEmpty()) {
        System.out.println("No indices from datasets in G1.");
        System.exit(0);
    }

    if (secondGroup.isEmpty()) {
        System.out.println("No indices from datasets in G2.");
        System.exit(0);
    }

    // getting dataset ids

    String datasetNames = "";
    String datasetIds = "";
    HashMap<String, String> datasetId = new HashMap<String, String>();
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        datasetId.put(it.next(), null);
    }

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    br = new BufferedReader(new InputStreamReader(fs.open(path)));
    line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        all_datasets.add(dt[0]);
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    String firstGroupStr = "";
    String secondGroupStr = "";
    for (String dataset : firstGroup) {
        firstGroupStr += datasetId.get(dataset) + ",";
    }
    for (String dataset : secondGroup) {
        secondGroupStr += datasetId.get(dataset) + ",";
    }
    firstGroupStr = firstGroupStr.substring(0, firstGroupStr.length() - 1);
    secondGroupStr = secondGroupStr.substring(0, secondGroupStr.length() - 1);

    String relationshipsDir = "";
    if (outputIds) {
        relationshipsDir = FrameworkUtils.relationshipsIdsDir;
    } else {
        relationshipsDir = FrameworkUtils.relationshipsDir;
    }

    FrameworkUtils.createDir(s3bucket + relationshipsDir, s3conf, s3);

    String random = completeRandomization ? "complete" : "restricted";

    String indexInputDirs = "";
    String noRelationship = "";

    HashSet<String> dirs = new HashSet<String>();

    String dataset1;
    String dataset2;
    String datasetId1;
    String datasetId2;
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            datasetId1 = datasetId.get(dataset1);
            datasetId2 = datasetId.get(dataset2);

            if (dataset1.equals(dataset2))
                continue;
            String correlationOutputFileName = s3bucket + relationshipsDir + "/" + dataset1 + "-" + dataset2
                    + "/";

            if (removeExistingFiles) {
                FrameworkUtils.removeFile(correlationOutputFileName, s3conf, s3);
            }
            if (!FrameworkUtils.fileExists(correlationOutputFileName, s3conf, s3)) {
                dirs.add(s3bucket + FrameworkUtils.indexDir + "/" + dataset1);
                dirs.add(s3bucket + FrameworkUtils.indexDir + "/" + dataset2);
            } else {
                noRelationship += datasetId1 + "-" + datasetId2 + ",";
            }
        }
    }

    if (dirs.isEmpty()) {
        System.out.println("All the relationships were already computed.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    for (String dir : dirs) {
        indexInputDirs += dir + ",";
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    String jobName = "relationship" + "-" + random;
    String relationshipOutputDir = s3bucket + relationshipsDir + "/tmp/";

    FrameworkUtils.removeFile(relationshipOutputDir, s3conf, s3);

    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg", datasetAgg.get(shortDataset.get(i)));
    }
    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg-size",
                Integer.toString(datasetAgg.get(shortDataset.get(i)).split(",").length));
    }
    conf.set("dataset-keys", datasetIds);
    conf.set("dataset-names", datasetNames);
    conf.set("first-group", firstGroupStr);
    conf.set("second-group", secondGroupStr);
    conf.set("complete-random", String.valueOf(completeRandomization));
    conf.set("output-ids", String.valueOf(outputIds));
    conf.set("complete-random-str", random);
    conf.set("main-dataset-id", datasetId.get(shortDataset.get(0)));
    conf.set("remove-not-significant", String.valueOf(removeNotSignificant));
    if (noRelationship.length() > 0) {
        conf.set("no-relationship", noRelationship.substring(0, noRelationship.length() - 1));
    }
    if (hasScoreThreshold) {
        conf.set("score-threshold", scoreThreshold);
    }
    if (hasStrengthThreshold) {
        conf.set("strength-threshold", strengthThreshold);
    }

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");
    conf.set("mapreduce.task.timeout", "2400000");

    if (s3) {
        machineConf.setMachineConfiguration(conf);
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        conf.set("bucket", s3bucket);
    }

    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    Job job = new Job(conf);
    job.setJobName(jobName);

    job.setMapOutputKeyClass(PairAttributeWritable.class);
    job.setMapOutputValueClass(TopologyTimeSeriesWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(CorrelationMapper.class);
    job.setReducerClass(CorrelationReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());

    job.setInputFormatClass(SequenceFileInputFormat.class);
    //job.setOutputFormatClass(TextOutputFormat.class);
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);

    FileInputFormat.setInputDirRecursive(job, true);
    FileInputFormat.setInputPaths(job, indexInputDirs.substring(0, indexInputDirs.length() - 1));
    FileOutputFormat.setOutputPath(job, new Path(relationshipOutputDir));

    job.setJarByClass(Relationship.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            if (dataset1.equals(dataset2))
                continue;

            String from = s3bucket + relationshipsDir + "/tmp/" + dataset1 + "-" + dataset2 + "/";
            String to = s3bucket + relationshipsDir + "/" + dataset1 + "-" + dataset2 + "/";
            FrameworkUtils.renameFile(from, to, s3conf, s3);
        }
    }
}

From source file:edu.ksu.cis.indus.staticanalyses.callgraphs.CallGraphXMLizerCLI.java

/**
 * The entry point to the program via command line.
 * /*ww w.j ava 2s. c o  m*/
 * @param args is the command line arguments.
 * @throws RuntimeException when the analyses fail.
 */
public static void main(final String[] args) {
    final Options _options = new Options();
    Option _option = new Option("c", "cumulative", false,
            "Builds one call graph that includes all root methods.");
    _options.addOption(_option);
    _option = new Option("o", "output", true,
            "Directory into which xml files will be written into.  Defaults to current directory if omitted");
    _option.setArgs(1);
    _option.setArgName("output-dir");
    _options.addOption(_option);
    _option = new Option("j", "jimple", false, "Dump xmlized jimple.");
    _option.setArgName("dump-jimple");
    _options.addOption(_option);
    _option = new Option("p", "soot-classpath", true, "Prepend this to soot class path.");
    _option.setArgs(1);
    _option.setArgName("classpath");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("h", "help", false, "Display message.");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("t", "call-graph-type", true,
            "Call graph type.  This has to be one of {cha, rta, ofa-oi, " + "ofa-oirt, ofa-os}.");
    _option.setArgs(1);
    _option.setArgName("type");
    _option.setRequired(true);
    _options.addOption(_option);
    _option = new Option("S", "scope", true, "The scope that should be analyzed.");
    _option.setArgs(1);
    _option.setArgName("scope");
    _option.setRequired(false);
    _options.addOption(_option);

    final PosixParser _parser = new PosixParser();

    try {
        final CommandLine _cl = _parser.parse(_options, args);

        if (_cl.hasOption('h')) {
            printUsage(_options);
            System.exit(1);
        }

        String _outputDir = _cl.getOptionValue('o');

        if (_outputDir == null) {
            if (LOGGER.isWarnEnabled()) {
                LOGGER.warn("Defaulting to current directory for output.");
            }
            _outputDir = ".";
        }

        if (_cl.getArgList().isEmpty()) {
            throw new MissingArgumentException("Please specify atleast one class.");
        }

        final CallGraphXMLizerCLI _cli = new CallGraphXMLizerCLI();

        _cli.xmlizer.setXmlOutputDir(_outputDir);
        _cli.xmlizer.setGenerator(new UniqueJimpleIDGenerator());
        _cli.setCumulative(_cl.hasOption('c'));
        _cli.setClassNames(_cl.getArgList());
        _cli.addToSootClassPath(_cl.getOptionValue('p'));

        if (_cl.hasOption('S')) {
            _cli.setScopeSpecFile(_cl.getOptionValue('S'));
        }

        _cli.initialize();

        _cli.execute(_cl.hasOption('j'), _cl.getOptionValue('t'));
    } catch (final ParseException _e) {
        LOGGER.error("Error while parsing command line.", _e);
        System.out.println("Error while parsing command line." + _e);
        printUsage(_options);
    } catch (final Throwable _e) {
        LOGGER.error("Beyond our control. May day! May day!", _e);
        throw new RuntimeException(_e);
    }
}

From source file:edu.ksu.cis.indus.staticanalyses.concurrency.independence.IndependenceDetectionCLI.java

/**
 * The entry point to the program via command line.
 * //from  www . j a  v a 2  s  .  c o m
 * @param args is the command line arguments.
 * @throws RuntimeException when CLI fails.
 */
public static void main(final String[] args) {
    final Options _options = new Options();
    Option _option = new Option("o", "output", true,
            "Directory into which jimple files will be written into. [required]");
    _option.setArgs(1);
    _option.setArgName("ouput-directory");
    _option.setRequired(true);
    _options.addOption(_option);
    _option = new Option("h", "help", false, "Display message.");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("p", "soot-classpath", false, "Prepend this to soot class path.");
    _option.setArgs(1);
    _option.setArgName("classpath");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("useV2", false, "Use version 2 of the atomicity detection algorithm.");
    _options.addOption(_option);
    _option = new Option("scheme", false,
            "Scheme to indicate atomicity. Valid values are 'tag-stmt' and 'tag-region'.  By default, 'tag-stmt' "
                    + "scheme is used. ");
    _option.setArgs(1);
    _option.setArgName("scheme-name");
    _options.addOption(_option);
    _option = new Option("S", "scope", true, "The scope that should be analyzed.");
    _option.setArgs(1);
    _option.setArgName("scope");
    _option.setRequired(false);
    _options.addOption(_option);

    final CommandLineParser _parser = new GnuParser();

    try {
        final CommandLine _cl = _parser.parse(_options, args);

        if (_cl.hasOption("h")) {
            final String _cmdLineSyn = "java " + IndependenceDetectionCLI.class.getName()
                    + " <options> <classnames>";
            (new HelpFormatter()).printHelp(_cmdLineSyn, _options);
            System.exit(1);
        }

        if (_cl.getArgList().isEmpty()) {
            throw new MissingArgumentException("Please specify atleast one class.");
        }

        final IndependenceDetectionCLI _cli;

        if (_cl.hasOption("useV2")) {
            _cli = new IndependenceDetectionCLI(new IndependentStmtDetectorv2());
        } else {
            _cli = new IndependenceDetectionCLI(new IndependentStmtDetector());
        }

        if (_cl.hasOption('p')) {
            _cli.addToSootClassPath(_cl.getOptionValue('p'));
        }

        if (_cl.hasOption('S')) {
            _cli.setScopeSpecFile(_cl.getOptionValue('S'));
        }

        _cli.setClassNames(_cl.getArgList());
        _cli.setOutputDir(_cl.getOptionValue('o'));
        _cli.<ITokens>execute(_cl);
    } catch (final ParseException _e) {
        LOGGER.error("Error while parsing command line.", _e);
        System.out.println("Error while parsing command line." + _e);
        final String _cmdLineSyn = "java " + IndependenceDetectionCLI.class.getName()
                + " <options> <classnames>";
        (new HelpFormatter()).printHelp(_cmdLineSyn, "Options are:", _options, "");
    } catch (final Throwable _e) {
        LOGGER.error("Beyond our control. May day! May day!", _e);
        throw new RuntimeException(_e);
    }
}

From source file:ca.uhn.hunit.run.TestRunner.java

/**
 * @param args/*from  w w  w .j  a  v  a 2 s  . c  o m*/
 * @throws URISyntaxException
 * @throws JAXBException
 * @throws ConfigurationException
 * @throws InterfaceWontStartException
 * @throws FileNotFoundException
 * @throws ParseException
 */
public static void main(String[] theArgs) throws URISyntaxException, JAXBException, InterfaceWontStartException,
        ConfigurationException, FileNotFoundException, ParseException {
    Options options = new Options();

    OptionGroup fileOptionGroup = new OptionGroup();
    fileOptionGroup.setRequired(false);

    Option option = new Option("f", "file", true, "The path to the file to load the test battery from");
    option.setValueSeparator('=');
    fileOptionGroup.addOption(option);
    option = new Option("c", "classpath", true, "The classpath path to the file to load the test battery from");
    option.setValueSeparator('=');
    fileOptionGroup.addOption(option);
    options.addOptionGroup(fileOptionGroup);

    OptionGroup uiOptionGroup = new OptionGroup();
    option = new Option("g", "gui", false, "Start hUnit in GUI mode (default)");
    uiOptionGroup.addOption(option);
    option = new Option("x", "text", false, "Start hUnit in Text mode");
    uiOptionGroup.addOption(option);
    options.addOptionGroup(uiOptionGroup);

    option = new Option("t", "tests", true, "A comma separated list of tests to execute (default is all)");
    option.setValueSeparator('=');
    option.setRequired(false);
    options.addOption(option);

    Resource defFile = null;
    CommandLine parser;
    boolean textMode = false;

    try {
        parser = new PosixParser().parse(options, theArgs);

        if (parser.hasOption("f")) {
            defFile = new FileSystemResource(parser.getOptionValue("f"));
        } else if (parser.hasOption("c")) {
            defFile = new ClassPathResource(parser.getOptionValue("c"));
        }

        if (parser.hasOption("x")) {
            textMode = true;
        }
    } catch (Exception e) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("java -jar hunit-[version]-jar-with-dependencies.jar [-c FILE|-f FILE] [options]",
                options);

        return;
    }

    String[] testsToExecute = null;

    if (parser.hasOption("t")) {
        testsToExecute = parser.getOptionValue("t").split(",");
    }

    if (textMode) {
        executeInTextMode(defFile, testsToExecute);
    } else {
        executeInGuiMode(defFile, testsToExecute);
    }
}

From source file:asl.seedscan.DQAWeb.java

public static void main(String args[]) {
    db = new MetricDatabase("", "", "");
    findConsoleHandler();/*from w  w w  . jav  a  2 s . co  m*/
    consoleHandler.setLevel(Level.ALL);
    Logger.getLogger("").setLevel(Level.CONFIG);

    // Default locations of config and schema files
    File configFile = new File("dqaweb-config.xml");
    File schemaFile = new File("schemas/DQAWebConfig.xsd");
    boolean parseConfig = true;
    boolean testMode = false;

    ArrayList<File> schemaFiles = new ArrayList<File>();
    schemaFiles.add(schemaFile);
    // ==== Command Line Parsing ====
    Options options = new Options();
    Option opConfigFile = new Option("c", "config-file", true,
            "The config file to use for seedscan. XML format according to SeedScanConfig.xsd.");
    Option opSchemaFile = new Option("s", "schema-file", true,
            "The schame file which should be used to verify the config file format. ");
    Option opTest = new Option("t", "test", false, "Run in test console mode rather than as a servlet.");

    OptionGroup ogConfig = new OptionGroup();
    ogConfig.addOption(opConfigFile);

    OptionGroup ogSchema = new OptionGroup();
    ogConfig.addOption(opSchemaFile);

    OptionGroup ogTest = new OptionGroup();
    ogTest.addOption(opTest);

    options.addOptionGroup(ogConfig);
    options.addOptionGroup(ogSchema);
    options.addOptionGroup(ogTest);

    PosixParser optParser = new PosixParser();
    CommandLine cmdLine = null;
    try {
        cmdLine = optParser.parse(options, args, true);
    } catch (org.apache.commons.cli.ParseException e) {
        logger.severe("Error while parsing command-line arguments.");
        System.exit(1);
    }

    Option opt;
    Iterator iter = cmdLine.iterator();
    while (iter.hasNext()) {
        opt = (Option) iter.next();

        if (opt.getOpt().equals("c")) {
            configFile = new File(opt.getValue());
        } else if (opt.getOpt().equals("s")) {
            schemaFile = new File(opt.getValue());
        } else if (opt.getOpt().equals("t")) {
            testMode = true;
        }
    }
    String query = "";
    System.out.println("Entering Test Mode");
    System.out.println("Enter a query string to view results or type \"help\" for example query strings");
    InputStreamReader input = new InputStreamReader(System.in);
    BufferedReader reader = new BufferedReader(input);
    String result = "";

    while (testMode == true) {
        try {

            System.out.printf("Query: ");
            query = reader.readLine();
            if (query.equals("exit")) {
                testMode = false;
            } else if (query.equals("help")) {
                System.out.println("Need to add some help for people"); //TODO
            } else {
                result = processCommand(query);
            }
            System.out.println(result);
        } catch (IOException err) {
            System.err.println("Error reading line, in DQAWeb.java");
        }
    }
    System.err.printf("DONE.\n");
}