Example usage for org.apache.commons.cli Option setRequired

List of usage examples for org.apache.commons.cli Option setRequired

Introduction

In this page you can find the example usage for org.apache.commons.cli Option setRequired.

Prototype

public void setRequired(boolean required) 

Source Link

Document

Sets whether this Option is mandatory.

Usage

From source file:com.aestel.chemistry.openEye.fp.Fingerprinter.java

public static void main(String... args) throws IOException {
    long start = System.currentTimeMillis();
    long iCounter = 0;

    // create command line Options object
    Options options = new Options();
    Option opt = new Option("in", true, "input file [.ism,.sdf,...]");
    opt.setRequired(true);
    options.addOption(opt);/*from w w w . j  a v  a  2  s  . c om*/

    opt = new Option("out", true, "output file .tsv or oe-supported");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option("idTag", true, "field with ID (default title)");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("fpType", true, "fingerPrintType: maccs|linear7|linear7*4|HashLinear7*4\n"
            + "   maccs: generate maccs keys\n"
            + "   linear7 generate 7 bonds long linear fingerprints (210k known rest hashed)\n"
            + "   linear7*4 linear 7 bonds if more than 4 atoms code atoms as * (5.1k known rest hashed)\n"
            + "   HashLinear7*4: as linear7*4 but hashed to 16k");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option("format", true,
            "folded512|folded2048|bitList|fragList|none\n" + "   folded512/2048: hex encoded 512/2048 bits\n"
                    + "   bitList: list of bitpositions\n" + "   none: no fp output for use with writeCodeMap");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option("writeCodeMap", false, "Overwrite the codeMap file at the end of processing");
    opt.setRequired(false);
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();

    if (cmd.hasOption("d")) {
        System.err.println("Start debugger and press return:");
        new BufferedReader(new InputStreamReader(System.in)).readLine();
    }

    String idTag = null;
    if (cmd.hasOption("idTag"))
        idTag = cmd.getOptionValue("idTag");

    String outformat = cmd.getOptionValue("format").toLowerCase().intern();
    if (args.length != 0) {
        exitWithHelp(options);
    }

    String type = cmd.getOptionValue("fpType");
    boolean updateDictionaryFile = cmd.hasOption("writeCodeMap");
    boolean hashUnknownFrag = true;
    if (type.equals("HashLinear7*4"))
        hashUnknownFrag = false;
    if (type.equals("maccs"))
        hashUnknownFrag = false;
    if (updateDictionaryFile)
        hashUnknownFrag = false;
    Fingerprinter fprinter = createFingerprinter(type, updateDictionaryFile, hashUnknownFrag);
    OEMolBase mol = new OEGraphMol();

    String inFile = cmd.getOptionValue("in");
    String outFile = cmd.getOptionValue("out");
    oemolistream ifs = new oemolistream(inFile);

    Runtime rt = Runtime.getRuntime();
    Outputter out;
    if (outFile.endsWith(".txt") || outFile.endsWith(".tab"))
        out = new TabOutputter(fprinter.getMapper(), outFile, outformat);
    else
        out = new OEOutputter(fprinter.getMapper(), outFile, type, outformat);

    while (oechem.OEReadMolecule(ifs, mol)) {
        iCounter++;
        Fingerprint fp = fprinter.getFingerprint(mol);

        String id;
        if (idTag == null)
            id = mol.GetTitle();
        else
            id = oechem.OEGetSDData(mol, idTag);

        if (iCounter % 100 == 0)
            System.err.print(".");
        if (iCounter % 4000 == 0) {
            System.err.printf(" %d %dsec\tt=%d f=%d u=%d m=%d tf=%d\n", iCounter,
                    (System.currentTimeMillis() - start) / 1000, rt.totalMemory() / 1024,
                    rt.freeMemory() / 1024, (rt.totalMemory() - rt.freeMemory()) / 1024, rt.maxMemory() / 1024,
                    (rt.freeMemory() + (rt.maxMemory() - rt.totalMemory())) / 1024);
        }

        out.output(id, mol, fp);
    }

    System.err.printf("Fingerprinter: Read %d structures in %d sec\n", iCounter,
            (System.currentTimeMillis() - start) / 1000);

    if (updateDictionaryFile)
        fprinter.writeDictionary();
    out.close();
    fprinter.close();
}

From source file:com.genentech.chemistry.tool.SDFSelectivityCalculator.java

/**
 * @param args//from  w ww  . j a  v  a2  s . c  o m
 */
public static void main(String... args) throws IOException { // create command line Options object
    Options options = new Options();
    Option opt = new Option(OPT_INFILE, true,
            "input file oe-supported Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_OUTFILE, true, "output file oe-supported. Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_NUMERATOR, true, "Name of the field containing the numerator value.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_NUMERATOR_OP, true,
            "Name of the field containing the numerator operator."
                    + " If this option is not set it is assumed that the operator" + " is included in the "
                    + OPT_NUMERATOR + " field.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_DENOMINATOR, true, "Name of the field containing the denominator value.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_DENOMINATOR_OP, true,
            "Name of the field containing the denominator operator."
                    + " If this option is not set it is assumed that the operator" + " is included in the "
                    + OPT_DENOMINATOR + " field.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_SELECTIVITY, true, "Name of the field containing the output selectivity value."
            + " If this option is not set, the field name of the denominator" + " is used.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_SELECTIVITY_OP, true,
            "Name of the field containing the output selectivity operator."
                    + " If this option is not set it is assumed that the operator" + " is included in the "
                    + OPT_SELECTIVITY + " field.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_OUTPUT_MODE, true,
            "Valid: separate, combine, combine+op;" + " If not specified, default is combine+op."
                    + " (separate=operator and value in separate fields;"
                    + " combine=operator and value in one field;"
                    + " combine+op=one field with the operator and value and"
                    + " plus one field with only the operator)");
    opt.setRequired(false);
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();

    if (cmd.hasOption("d")) {
        System.err.println("Start debugger and press return:");
        new BufferedReader(new InputStreamReader(System.in)).readLine();
    }

    String inFile = cmd.getOptionValue(OPT_INFILE);
    String outFile = cmd.getOptionValue(OPT_OUTFILE);
    String outputMode = cmd.getOptionValue(OPT_OUTPUT_MODE);
    String numeratorTag = cmd.getOptionValue(OPT_NUMERATOR);
    String numeratorOPTag = cmd.getOptionValue(OPT_NUMERATOR_OP);
    String denominatorTag = cmd.getOptionValue(OPT_DENOMINATOR);
    String denominatorOPTag = cmd.getOptionValue(OPT_DENOMINATOR_OP);
    String selectivityTag = cmd.getOptionValue(OPT_SELECTIVITY);
    String selectivityOPTag = cmd.getOptionValue(OPT_SELECTIVITY_OP);

    SDFSelectivityCalculator calculator = new SDFSelectivityCalculator(outFile);
    try {
        calculator.setParameters(outputMode, numeratorTag, numeratorOPTag, denominatorTag, denominatorOPTag,
                selectivityTag, selectivityOPTag);
        calculator.run(inFile);
    } catch (IncorrectInputException e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    } finally {
        calculator.close();
    }
}

From source file:ca.uqac.info.trace.conversion.TraceConverter.java

/**
 * Main program loop/*from w w  w .  j av a 2 s .  co m*/
 * @param args Command-line arguments
 */
@SuppressWarnings("static-access")
public static void main(String[] args) {
    // Default values
    String input_format = "xml", output_format = "smv";
    String input_filename = "trace.xml", output_filename = "";
    //String event_tag_name = "Event";

    // Define and process command line arguments
    Options options = new Options();
    HelpFormatter help_formatter = new HelpFormatter();
    Option opt;
    options.addOption("h", "help", false, "Show help");
    opt = OptionBuilder.withArgName("format").hasArg()
            .withDescription("Input format for trace. Accepted values are csv, xml.").create("i");
    opt.setRequired(false);
    options.addOption(opt);
    opt = OptionBuilder.withArgName("format").hasArg().withDescription(
            "Output format for trace. Accepted values are javamop, json, monpoly, smv, sql, xml. Default: smv")
            .create("t");
    opt.setRequired(false);
    options.addOption(opt);
    opt = OptionBuilder.withArgName("filename").hasArg().withDescription("Input filename. Default: trace.xml")
            .create("f");
    opt.setRequired(false);
    options.addOption(opt);
    opt = OptionBuilder.withArgName("filename").hasArg().withDescription("Output filename").create("o");
    opt.setRequired(false);
    options.addOption(opt);
    opt = OptionBuilder.withArgName("name").hasArg().withDescription("Event tag name. Default: Event")
            .create("e");
    opt.setRequired(false);
    options.addOption(opt);
    opt = OptionBuilder.withArgName("formula").hasArg().withDescription("Formula to translate").create("s");
    opt.setRequired(false);
    options.addOption(opt);
    CommandLine c_line = parseCommandLine(options, args);
    if (c_line.hasOption("h")) {
        help_formatter.printHelp(app_name, options);
        System.exit(0);
    }
    input_filename = c_line.getOptionValue("f");
    if (c_line.hasOption("o"))
        output_filename = c_line.getOptionValue("o");
    /*if (c_line.hasOption("e"))
      event_tag_name = c_line.getOptionValue("e");*/

    // Determine the input format
    if (!c_line.hasOption("i")) {
        // Guess output format by filename extension
        input_format = getExtension(input_filename);
    }
    if (c_line.hasOption("i")) {
        // The "t" parameter overrides the filename extension
        input_format = c_line.getOptionValue("i");
    }

    // Determine which trace reader to initialize
    TraceReader reader = initializeReader(input_format);
    if (reader == null) {
        System.err.println("ERROR: Unrecognized input format");
        System.exit(1);
    }

    // Instantiate the proper trace reader and checks that the trace exists
    //reader.setEventTagName(event_tag_name);
    File in_f = new File(input_filename);
    if (!in_f.exists()) {
        System.err.println("ERROR: Input file not found");
        System.exit(1);
    }
    if (!in_f.canRead()) {
        System.err.println("ERROR: Input file is not readable");
        System.exit(1);
    }

    // Determine the output format
    if (!c_line.hasOption("o") && !c_line.hasOption("t")) {
        System.err.println("ERROR: At least one of output filename and output format must be given");
        System.exit(1);
    }
    if (c_line.hasOption("o")) {
        // Guess output format by filename extension
        output_filename = c_line.getOptionValue("o");
        output_format = getExtension(output_filename);
    }
    if (c_line.hasOption("t")) {
        // The "t" parameter overrides the filename extension
        output_format = c_line.getOptionValue("t");
    }

    // Determine which translator to initialize
    Translator trans = initializeTranslator(output_format);
    if (trans == null) {
        System.err.println("ERROR: Unrecognized output format");
        System.exit(1);
    }

    // Translate the trace into the output format
    EventTrace trace = null;
    try {
        trace = reader.parseEventTrace(new FileInputStream(in_f));
    } catch (FileNotFoundException ex) {
        ex.printStackTrace();
        System.exit(1);
    }
    assert trace != null;
    trans.setTrace(trace);
    String out_trace = trans.translateTrace();
    if (output_filename.isEmpty())
        System.out.println(out_trace);
    else
        writeToFile(output_filename, out_trace);

    // Check if there is a formula to translate
    if (c_line.hasOption("s")) {
        String formula = c_line.getOptionValue("s");
        try {
            Operator o = Operator.parseFromString(formula);
            trans.setFormula(o);
            System.out.println(trans.translateFormula());
        } catch (Operator.ParseException e) {
            System.err.println("ERROR: parsing input formula");
            System.exit(1);
        }
    }
}

From source file:edu.ksu.cis.indus.staticanalyses.concurrency.independence.IndependenceDetectionCLI.java

/**
 * The entry point to the program via command line.
 * //from  w ww. j a v a 2s  .  c o  m
 * @param args is the command line arguments.
 * @throws RuntimeException when CLI fails.
 */
public static void main(final String[] args) {
    final Options _options = new Options();
    Option _option = new Option("o", "output", true,
            "Directory into which jimple files will be written into. [required]");
    _option.setArgs(1);
    _option.setArgName("ouput-directory");
    _option.setRequired(true);
    _options.addOption(_option);
    _option = new Option("h", "help", false, "Display message.");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("p", "soot-classpath", false, "Prepend this to soot class path.");
    _option.setArgs(1);
    _option.setArgName("classpath");
    _option.setOptionalArg(false);
    _options.addOption(_option);
    _option = new Option("useV2", false, "Use version 2 of the atomicity detection algorithm.");
    _options.addOption(_option);
    _option = new Option("scheme", false,
            "Scheme to indicate atomicity. Valid values are 'tag-stmt' and 'tag-region'.  By default, 'tag-stmt' "
                    + "scheme is used. ");
    _option.setArgs(1);
    _option.setArgName("scheme-name");
    _options.addOption(_option);
    _option = new Option("S", "scope", true, "The scope that should be analyzed.");
    _option.setArgs(1);
    _option.setArgName("scope");
    _option.setRequired(false);
    _options.addOption(_option);

    final CommandLineParser _parser = new GnuParser();

    try {
        final CommandLine _cl = _parser.parse(_options, args);

        if (_cl.hasOption("h")) {
            final String _cmdLineSyn = "java " + IndependenceDetectionCLI.class.getName()
                    + " <options> <classnames>";
            (new HelpFormatter()).printHelp(_cmdLineSyn, _options);
            System.exit(1);
        }

        if (_cl.getArgList().isEmpty()) {
            throw new MissingArgumentException("Please specify atleast one class.");
        }

        final IndependenceDetectionCLI _cli;

        if (_cl.hasOption("useV2")) {
            _cli = new IndependenceDetectionCLI(new IndependentStmtDetectorv2());
        } else {
            _cli = new IndependenceDetectionCLI(new IndependentStmtDetector());
        }

        if (_cl.hasOption('p')) {
            _cli.addToSootClassPath(_cl.getOptionValue('p'));
        }

        if (_cl.hasOption('S')) {
            _cli.setScopeSpecFile(_cl.getOptionValue('S'));
        }

        _cli.setClassNames(_cl.getArgList());
        _cli.setOutputDir(_cl.getOptionValue('o'));
        _cli.<ITokens>execute(_cl);
    } catch (final ParseException _e) {
        LOGGER.error("Error while parsing command line.", _e);
        System.out.println("Error while parsing command line." + _e);
        final String _cmdLineSyn = "java " + IndependenceDetectionCLI.class.getName()
                + " <options> <classnames>";
        (new HelpFormatter()).printHelp(_cmdLineSyn, "Options are:", _options, "");
    } catch (final Throwable _e) {
        LOGGER.error("Beyond our control. May day! May day!", _e);
        throw new RuntimeException(_e);
    }
}

From source file:fr.tpt.s3.mcdag.bench.MainBench.java

public static void main(String[] args) throws IOException, InterruptedException {

    // Command line options
    Options options = new Options();

    Option input = new Option("i", "input", true, "MC-DAG XML models");
    input.setRequired(true);
    input.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(input);/*  ww w  .  j  a v  a 2  s  .  co  m*/

    Option output = new Option("o", "output", true, "Folder where results have to be written.");
    output.setRequired(true);
    options.addOption(output);

    Option uUti = new Option("u", "utilization", true, "Utilization.");
    uUti.setRequired(true);
    options.addOption(uUti);

    Option output2 = new Option("ot", "output-total", true, "File where total results are being written");
    output2.setRequired(true);
    options.addOption(output2);

    Option oCores = new Option("c", "cores", true, "Cores given to the test");
    oCores.setRequired(true);
    options.addOption(oCores);

    Option oLvls = new Option("l", "levels", true, "Levels tested for the system");
    oLvls.setRequired(true);
    options.addOption(oLvls);

    Option jobs = new Option("j", "jobs", true, "Number of threads to be launched.");
    jobs.setRequired(false);
    options.addOption(jobs);

    Option debug = new Option("d", "debug", false, "Debug logs.");
    debug.setRequired(false);
    options.addOption(debug);

    /*
     * Parsing of the command line
     */
    CommandLineParser parser = new DefaultParser();
    HelpFormatter formatter = new HelpFormatter();
    CommandLine cmd;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        formatter.printHelp("Benchmarks MultiDAG", options);
        System.exit(1);
        return;
    }

    String inputFilePath[] = cmd.getOptionValues("input");
    String outputFilePath = cmd.getOptionValue("output");
    String outputFilePathTotal = cmd.getOptionValue("output-total");
    double utilization = Double.parseDouble(cmd.getOptionValue("utilization"));
    boolean boolDebug = cmd.hasOption("debug");
    int nbLvls = Integer.parseInt(cmd.getOptionValue("levels"));
    int nbJobs = 1;
    int nbFiles = inputFilePath.length;

    if (cmd.hasOption("jobs"))
        nbJobs = Integer.parseInt(cmd.getOptionValue("jobs"));

    int nbCores = Integer.parseInt(cmd.getOptionValue("cores"));

    /*
     *  While files need to be allocated
     *  run the tests in the pool of threads
     */

    // For dual-criticality systems we call a specific thread
    if (nbLvls == 2) {

        System.out.println(">>>>>>>>>>>>>>>>>>>>> NB levels " + nbLvls);

        int i_files2 = 0;
        String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.'))
                .concat("-schedulability.csv");
        PrintWriter writer = new PrintWriter(outFile, "UTF-8");
        writer.println(
                "Thread; File; FSched (%); FPreempts; FAct; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization");
        writer.close();

        ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs);
        while (i_files2 != nbFiles) {
            BenchThreadDualCriticality bt2 = new BenchThreadDualCriticality(inputFilePath[i_files2], outFile,
                    nbCores, boolDebug);

            executor2.execute(bt2);
            i_files2++;
        }

        executor2.shutdown();
        executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);

        int fedTotal = 0;
        int laxTotal = 0;
        int edfTotal = 0;
        int hybridTotal = 0;
        int fedPreempts = 0;
        int laxPreempts = 0;
        int edfPreempts = 0;
        int hybridPreempts = 0;
        int fedActiv = 0;
        int laxActiv = 0;
        int edfActiv = 0;
        int hybridActiv = 0;
        // Read lines in file and do average
        int i = 0;
        File f = new File(outFile);
        @SuppressWarnings("resource")
        Scanner line = new Scanner(f);
        while (line.hasNextLine()) {
            String s = line.nextLine();
            if (i > 0) { // To skip the first line
                try (Scanner inLine = new Scanner(s).useDelimiter("; ")) {
                    int j = 0;

                    while (inLine.hasNext()) {
                        String val = inLine.next();
                        if (j == 2) {
                            fedTotal += Integer.parseInt(val);
                        } else if (j == 3) {
                            fedPreempts += Integer.parseInt(val);
                        } else if (j == 4) {
                            fedActiv += Integer.parseInt(val);
                        } else if (j == 5) {
                            laxTotal += Integer.parseInt(val);
                        } else if (j == 6) {
                            laxPreempts += Integer.parseInt(val);
                        } else if (j == 7) {
                            laxActiv += Integer.parseInt(val);
                        } else if (j == 8) {
                            edfTotal += Integer.parseInt(val);
                        } else if (j == 9) {
                            edfPreempts += Integer.parseInt(val);
                        } else if (j == 10) {
                            edfActiv += Integer.parseInt(val);
                        } else if (j == 11) {
                            hybridTotal += Integer.parseInt(val);
                        } else if (j == 12) {
                            hybridPreempts += Integer.parseInt(val);
                        } else if (j == 13) {
                            hybridActiv += Integer.parseInt(val);
                        }
                        j++;
                    }
                }
            }
            i++;
        }

        // Write percentage
        double fedPerc = (double) fedTotal / nbFiles;
        double laxPerc = (double) laxTotal / nbFiles;
        double edfPerc = (double) edfTotal / nbFiles;
        double hybridPerc = (double) hybridTotal / nbFiles;

        double fedPercPreempts = (double) fedPreempts / fedActiv;
        double laxPercPreempts = (double) laxPreempts / laxActiv;
        double edfPercPreempts = (double) edfPreempts / edfActiv;
        double hybridPercPreempts = (double) hybridPreempts / hybridActiv;

        Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true));
        wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + fedPerc + "; "
                + fedPreempts + "; " + fedActiv + "; " + fedPercPreempts + "; " + laxPerc + "; " + laxPreempts
                + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; "
                + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; "
                + hybridActiv + "; " + hybridPercPreempts + "\n");
        wOutput.close();

    } else if (nbLvls > 2) {
        int i_files2 = 0;
        String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.'))
                .concat("-schedulability.csv");
        PrintWriter writer = new PrintWriter(outFile, "UTF-8");
        writer.println(
                "Thread; File; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization");
        writer.close();

        ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs);
        while (i_files2 != nbFiles) {
            BenchThreadNLevels bt2 = new BenchThreadNLevels(inputFilePath[i_files2], outFile, nbCores,
                    boolDebug);

            executor2.execute(bt2);
            i_files2++;
        }

        executor2.shutdown();
        executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);

        int laxTotal = 0;
        int edfTotal = 0;
        int hybridTotal = 0;
        int laxPreempts = 0;
        int edfPreempts = 0;
        int hybridPreempts = 0;
        int laxActiv = 0;
        int edfActiv = 0;
        int hybridActiv = 0;
        // Read lines in file and do average
        int i = 0;
        File f = new File(outFile);
        @SuppressWarnings("resource")
        Scanner line = new Scanner(f);
        while (line.hasNextLine()) {
            String s = line.nextLine();
            if (i > 0) { // To skip the first line
                try (Scanner inLine = new Scanner(s).useDelimiter("; ")) {
                    int j = 0;

                    while (inLine.hasNext()) {
                        String val = inLine.next();
                        if (j == 2) {
                            laxTotal += Integer.parseInt(val);
                        } else if (j == 3) {
                            laxPreempts += Integer.parseInt(val);
                        } else if (j == 4) {
                            laxActiv += Integer.parseInt(val);
                        } else if (j == 5) {
                            edfTotal += Integer.parseInt(val);
                        } else if (j == 6) {
                            edfPreempts += Integer.parseInt(val);
                        } else if (j == 7) {
                            edfActiv += Integer.parseInt(val);
                        } else if (j == 8) {
                            hybridTotal += Integer.parseInt(val);
                        } else if (j == 9) {
                            hybridPreempts += Integer.parseInt(val);
                        } else if (j == 10) {
                            hybridActiv += Integer.parseInt(val);
                        }
                        j++;
                    }
                }
            }
            i++;
        }

        // Write percentage
        double laxPerc = (double) laxTotal / nbFiles;
        double edfPerc = (double) edfTotal / nbFiles;
        double hybridPerc = (double) hybridTotal / nbFiles;

        double laxPercPreempts = (double) laxPreempts / laxActiv;
        double edfPercPreempts = (double) edfPreempts / edfActiv;
        double hybridPercPreempts = (double) hybridPreempts / hybridActiv;

        Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true));
        wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + laxPerc + "; "
                + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts
                + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; "
                + hybridActiv + "; " + hybridPercPreempts + "\n");
        wOutput.close();

    } else {
        System.err.println("Wrong number of levels");
        System.exit(-1);
    }

    System.out.println("[BENCH Main] Done benchmarking U = " + utilization + " Levels " + nbLvls);
}

From source file:com.genentech.chemistry.openEye.apps.QTorsionProfileGenerator.java

public static void main(String... args) throws IOException { // create command line Options object
    Options options = new Options();
    Option opt = new Option(OPT_INFILE, true,
            "input file oe-supported Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);//from  ww w .jav  a 2  s .  c o  m

    opt = new Option(OPT_SDFFILE, true,
            "file to write onformers for debugging (oe-supported Use .sdf|.smi to specify the file type).");
    options.addOption(opt);

    opt = new Option(OPT_WORKDIR, true, "Write files into this directory!");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_OUTPREFIX, true, "Prefix for output file.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_OUTNAMETAG, true,
            "TagName of field containing outFilePrefix. (Use TITLE for mol title).");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_TEMPLATE, true,
            "Template file for quantum program containing #XYZ# place holder line. A #FName# placeholder can be used fro chk files and the like.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_SPIN_MULTIPLICITY, true, "Spin Multiplicity of the input molecules (default 1)");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_NCPU, true, "Overwrite nprocshared parameter in guassian input file if given");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_MEM, true, "Memory for gaussian default='10GB' replaces #mem# in tempalte");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_MINIMIZE, false, "minimize conformer at each step using MMFFs");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_CONSTRIANT, true,
            "one of strong (90),medium (45), weak(20), none or a floating point number"
                    + " specifying the strength of tethered constrains for -doMinimize (def=strong)");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_BONDFILE, true, "Structure file containing 4 atoms defining the torsion. "
            + "In each input molecule the atoms colses these atoms are used to define the torsion.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_STARTTorsion, true,
            "The torsion in your inMol will be rotated by this value for the first job");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_TORSIONIncrement, true, "Incremnt each subsequent conformation by this step size");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_NSTEPS, true, "Number of conformations to create");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_MAXCONFS_PER_STEP, true,
            "While holding the torsion fixed, maximum number of conformations of free atoms to generate.  default=1");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_COREFILE, true, "Outputfile to store guessed core.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_DEBUG, false, "Produce more debug output.");
    opt.setRequired(false);
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();
    if (args.length != 0) {
        System.err.println("Unknown arguments" + args);
        exitWithHelp(options);
    }

    if (cmd.hasOption("d")) {
        System.err.println("Start debugger and press return:");
        new BufferedReader(new InputStreamReader(System.in)).readLine();
    }

    String inFile = cmd.getOptionValue(OPT_INFILE);
    String sdfFile = cmd.getOptionValue(OPT_SDFFILE);
    String outPrefix = cmd.getOptionValue(OPT_OUTPREFIX);
    String outNameTag = cmd.getOptionValue(OPT_OUTNAMETAG);
    String workDir = cmd.getOptionValue(OPT_WORKDIR);
    String coreFile = cmd.getOptionValue(OPT_COREFILE);
    String tempalte = cmd.getOptionValue(OPT_TEMPLATE);
    String bondFile = cmd.getOptionValue(OPT_BONDFILE);
    String nCPU = cmd.getOptionValue(OPT_NCPU);
    String memStr = cmd.getOptionValue(OPT_MEM);
    String maxConfsStr = cmd.getOptionValue(OPT_MAXCONFS_PER_STEP);
    String spinMult = cmd.getOptionValue(OPT_SPIN_MULTIPLICITY);

    boolean doMinimize = cmd.hasOption(OPT_MINIMIZE);
    String constraintStrength = cmd.getOptionValue(OPT_CONSTRIANT);
    int nStep = Integer.parseInt(cmd.getOptionValue(OPT_NSTEPS));

    if (memStr == null || memStr.length() == 0)
        memStr = "10GB";

    if (spinMult == null || spinMult.length() == 0)
        spinMult = "1";

    if ((outPrefix == null && outNameTag == null) || (outPrefix != null && outNameTag != null)) {
        System.err.println("Exactly one of -outPrefix or outNameTag must be given!");
        exitWithHelp(options);
    }

    if (workDir == null || workDir.trim().length() == 0)
        workDir = ".";

    double startTorsion = Double.NaN;
    if (cmd.hasOption(OPT_STARTTorsion))
        startTorsion = Double.parseDouble(cmd.getOptionValue(OPT_STARTTorsion));

    double torInc = Double.parseDouble(cmd.getOptionValue(OPT_TORSIONIncrement));
    int maxStepConfs = maxConfsStr == null ? 1 : Integer.parseInt(maxConfsStr);

    QTorsionProfileGenerator calculator = new QTorsionProfileGenerator(tempalte, workDir, outPrefix, outNameTag,
            bondFile, spinMult, startTorsion, torInc, nStep, maxStepConfs, doMinimize, constraintStrength,
            cmd.hasOption(OPT_DEBUG));

    calculator.run(inFile, sdfFile, coreFile, nCPU, memStr);
}

From source file:com.telefonica.iot.cygnus.nodes.CygnusApplication.java

/**
 * Main application to be run when this CygnusApplication is invoked. The only differences with the original one
 * are the CygnusApplication is used instead of the Application one, and the Management Interface port option in
 * the command line.//  w ww . j  av  a2s .c o  m
 * @param args
 */
public static void main(String[] args) {
    try {
        // Print Cygnus starting trace including version
        LOGGER.info("Starting Cygnus, version " + CommonUtils.getCygnusVersion() + "."
                + CommonUtils.getLastCommit());

        // Define the options to be read
        Options options = new Options();

        Option option = new Option("n", "name", true, "the name of this agent");
        option.setRequired(true);
        options.addOption(option);

        option = new Option("f", "conf-file", true, "specify a conf file");
        option.setRequired(true);
        options.addOption(option);

        option = new Option(null, "no-reload-conf", false, "do not reload conf file if changed");
        options.addOption(option);

        option = new Option("h", "help", false, "display help text");
        options.addOption(option);

        option = new Option("p", "mgmt-if-port", true, "the management interface port");
        option.setRequired(false);
        options.addOption(option);

        option = new Option("g", "gui-port", true, "the GUI port");
        option.setRequired(false);
        options.addOption(option);

        option = new Option("t", "polling-interval", true, "polling interval");
        option.setRequired(false);
        options.addOption(option);

        // Read the options
        CommandLineParser parser = new GnuParser();
        CommandLine commandLine = parser.parse(options, args);

        File configurationFile = new File(commandLine.getOptionValue('f'));
        String agentName = commandLine.getOptionValue('n');
        boolean reload = !commandLine.hasOption("no-reload-conf");

        if (commandLine.hasOption('h')) {
            new HelpFormatter().printHelp("cygnus-flume-ng agent", options, true);
            return;
        } // if

        int apiPort = DEF_MGMT_IF_PORT;

        if (commandLine.hasOption('p')) {
            apiPort = new Integer(commandLine.getOptionValue('p'));
        } // if

        int guiPort = DEF_GUI_PORT;

        if (commandLine.hasOption('g')) {
            guiPort = new Integer(commandLine.getOptionValue('g'));
        } else {
            guiPort = 0; // this disables the GUI for the time being
        } // if else

        int pollingInterval = DEF_POLLING_INTERVAL;

        if (commandLine.hasOption('t')) {
            pollingInterval = new Integer(commandLine.getOptionValue('t'));
        } // if

        // the following is to ensure that by default the agent will fail on startup if the file does not exist
        if (!configurationFile.exists()) {
            // if command line invocation, then need to fail fast
            if (System.getProperty(Constants.SYSPROP_CALLED_FROM_SERVICE) == null) {
                String path = configurationFile.getPath();

                try {
                    path = configurationFile.getCanonicalPath();
                } catch (IOException e) {
                    LOGGER.error(
                            "Failed to read canonical path for file: " + path + ". Details=" + e.getMessage());
                } // try catch

                throw new ParseException("The specified configuration file does not exist: " + path);
            } // if
        } // if

        List<LifecycleAware> components = Lists.newArrayList();
        CygnusApplication application;

        if (reload) {
            LOGGER.debug(
                    "no-reload-conf was not set, thus the configuration file will be polled each 30 seconds");
            EventBus eventBus = new EventBus(agentName + "-event-bus");
            PollingPropertiesFileConfigurationProvider configurationProvider = new PollingPropertiesFileConfigurationProvider(
                    agentName, configurationFile, eventBus, pollingInterval);
            components.add(configurationProvider);
            application = new CygnusApplication(components);
            eventBus.register(application);
        } else {
            LOGGER.debug("no-reload-conf was set, thus the configuration file will only be read this time");
            PropertiesFileConfigurationProvider configurationProvider = new PropertiesFileConfigurationProvider(
                    agentName, configurationFile);
            application = new CygnusApplication();
            application.handleConfigurationEvent(configurationProvider.getConfiguration());
        } // if else

        // use the agent name as component name in the logs through log4j Mapped Diagnostic Context (MDC)
        MDC.put(CommonConstants.LOG4J_COMP, commandLine.getOptionValue('n'));

        // start the Cygnus application
        application.start();

        // wait until the references to Flume components are not null
        try {
            while (sourcesRef == null || channelsRef == null || sinksRef == null) {
                LOGGER.info("Waiting for valid Flume components references...");
                Thread.sleep(1000);
            } // while
        } catch (InterruptedException e) {
            LOGGER.error("There was an error while waiting for Flume components references. Details: "
                    + e.getMessage());
        } // try catch

        // start the Management Interface, passing references to Flume components
        LOGGER.info("Starting a Jetty server listening on port " + apiPort + " (Management Interface)");
        mgmtIfServer = new JettyServer(apiPort, guiPort, new ManagementInterface(configurationFile, sourcesRef,
                channelsRef, sinksRef, apiPort, guiPort));
        mgmtIfServer.start();

        // create a hook "listening" for shutdown interrupts (runtime.exit(int), crtl+c, etc)
        Runtime.getRuntime().addShutdownHook(new AgentShutdownHook("agent-shutdown-hook", supervisorRef));

        // start YAFS
        YAFS yafs = new YAFS();
        yafs.start();
    } catch (IllegalArgumentException e) {
        LOGGER.error("A fatal error occurred while running. Exception follows. Details=" + e.getMessage());
    } catch (ParseException e) {
        LOGGER.error("A fatal error occurred while running. Exception follows. Details=" + e.getMessage());
    } // try catch // try catch
}

From source file:IMAPService.java

public static void main(String[] args) {
    // Arguments/*from  w  w w.  j a  va2s .  c  om*/
    String server = "";
    int port = -1;
    String login = "";
    String password = "";
    boolean deleteAfterDownload = false;
    boolean downloadAll = false;
    String[] foldersToDownload = null;

    // Set up apache cli
    Options options = new Options();

    Option S = new Option("S", true, "Server Name");
    S.setRequired(true);
    options.addOption(S);

    Option P = new Option("P", true, "Port Number");
    P.setRequired(true);
    options.addOption(P);

    Option l = new Option("l", true, "Login");
    l.setRequired(true);
    options.addOption(l);

    Option p = new Option("p", true, "Password if not on stdin");
    options.addOption(p);

    Option d = new Option("d", false, "Delete after downloading");
    d.setRequired(false);
    options.addOption(d);

    Option a = new Option("a", false, "Download from all folders");
    a.setRequired(false);
    options.addOption(a);

    Option f = new Option("f", true, "Download messages from specified folder");
    options.addOption(f);

    CommandLineParser clp = new DefaultParser();

    try {
        CommandLine cl = clp.parse(options, args);

        if (cl.hasOption("S"))
            server = cl.getOptionValue("S");

        if (cl.hasOption("P"))
            port = Integer.parseInt(cl.getOptionValue("P"));

        if (cl.hasOption("l"))
            login = cl.getOptionValue("l");

        if (cl.hasOption("p"))
            password = cl.getOptionValue("p");

        if (cl.hasOption("d"))
            deleteAfterDownload = true;

        if (cl.hasOption("a"))
            downloadAll = true;
        else if (cl.hasOption("f"))
            foldersToDownload = cl.getOptionValues("f");
        else {
            showArgMenu(options);
            return;
        }

    } catch (Exception e) {
        showArgMenu(options);
        return;
    }

    if (password.isEmpty()) {
        // Grab p/w of stdin if it's there
        Scanner sc = new Scanner(System.in);

        password = sc.nextLine();

        sc.close();
    }

    if (!server.isEmpty() && port != -1 && !login.isEmpty() && !password.isEmpty()
            && (downloadAll || foldersToDownload != null)) {
        //IMAPService imapService = new IMAPService("imap.gmail.com", 993);
        IMAPService imapService = new IMAPService(server, port);
        //imapService.login("kinglibingli@gmail.com", "kingli1bingli");
        imapService.login(login, password);
        imapService.buildMailbox();
        // Check if service should delete emails
        if (deleteAfterDownload)
            imapService.deleteEmailsAfterDownload();
        // Proceed with downloads
        if (downloadAll)
            imapService.downloadAllFolders();
        else {
            for (String folderNames : foldersToDownload) {
                imapService.downloadFoldersEmails(folderNames);
            }
        }

        imapService.logout();
    }

}

From source file:com.genentech.chemistry.openEye.apps.SDFStructureTagger.java

/**
 * @param args//  ww  w.  j  av  a2 s . co m
 */
public static void main(String... args) throws IOException { // create command line Options object
    Options options = new Options();
    Option opt = new Option(OPT_INFILE, true,
            "input file oe-supported Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_OUTFILE, true, "output file oe-supported. Use .sdf|.smi to specify the file type.");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_SMARTS_FILE, true,
            "Tag-delimited file containing SMARTS, tag name, name of the tag set."
                    + " Lines starting with '#' are ignores as comment."
                    + " NOTE: Use the SMARTS features supported by OpenEye.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_QSDF_FILE, true,
            "SDFiles with molfile for tagging as well " + InputFieldName.TAG_NAME.toString() + " and "
                    + InputFieldName.SET_NAME.toString() + " fields."
                    + " NOTE: SMARTS are read in first, then the SD file.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_TAG_SETS, true,
            TAG_SETS_ALL + " or name1|name2|...; The given names must match the names"
                    + " in the files disregarding the letter case");
    opt.setRequired(true);
    options.addOption(opt);

    opt = new Option(OPT_TAG_INFO, true,
            OutputTag.getTagsAsText("|") + " or if not specified all the listed ones.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_OUTPUT_COUNT, false, "Add for each structure pattern a field containing the"
            + " occurrence of the given pattern in the molecule"
            + " (NOTE: if multiple patterns have the same tag name the" + " counts will be summed up).");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_OUTPUT_EXISTS, false, "Add for each structure pattern a field containing 0 "
            + " if the pattern does not match and 1 if the pattern matches "
            + " (NOTE: if multiple patterns have the same tag name the" + " any match will yield a 1).");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_ONLY_MATCHES, false,
            "Remove records from output unless they match at least one query pattern.");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option(OPT_NO_MATCHES, false, "Remove records from output if they match any query pattern.");
    opt.setRequired(false);
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();

    if (cmd.hasOption("d")) {
        System.err.println("Start debugger and press return:");
        new BufferedReader(new InputStreamReader(System.in)).readLine();
    }

    String inFile = cmd.getOptionValue(OPT_INFILE);
    String outFile = cmd.getOptionValue(OPT_OUTFILE);
    String smartsFile = cmd.getOptionValue(OPT_SMARTS_FILE);
    String molPatternFile = cmd.getOptionValue(OPT_QSDF_FILE);
    String tagSets = cmd.getOptionValue(OPT_TAG_SETS);
    String tagInfo = cmd.getOptionValue(OPT_TAG_INFO);
    boolean outputCount = cmd.hasOption(OPT_OUTPUT_COUNT);
    boolean outputExists = cmd.hasOption(OPT_OUTPUT_EXISTS);
    boolean onlyMatches = cmd.hasOption(OPT_ONLY_MATCHES);
    boolean noMatches = cmd.hasOption(OPT_NO_MATCHES);
    SDFStructureTagger tagger = new SDFStructureTagger(outFile);

    try {
        tagger.prepare(smartsFile, molPatternFile, tagSets, tagInfo, outputCount, outputExists, onlyMatches,
                noMatches);
        tagger.run(inFile);
    } catch (IncorrectInputException iie) {
        System.err.println(iie.toString());
        exitWithHelp(options);
    } finally {
        tagger.close();
    }
}

From source file:edu.nyu.vida.data_polygamy.pre_processing.PreProcessing.java

/**
 * @param args/*from   w w w.ja  v  a 2 s.  com*/
 * @throws IOException 
 * @throws ClassNotFoundException 
 * @throws InterruptedException 
 */
@SuppressWarnings("deprecation")
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option nameOption = new Option("dn", "name", true, "the name of the dataset");
    nameOption.setRequired(true);
    nameOption.setArgName("DATASET NAME");
    options.addOption(nameOption);

    Option headerOption = new Option("dh", "header", true, "the file that contains the header of the dataset");
    headerOption.setRequired(true);
    headerOption.setArgName("DATASET HEADER FILE");
    options.addOption(headerOption);

    Option deafultsOption = new Option("dd", "defaults", true,
            "the file that contains the default values of the dataset");
    deafultsOption.setRequired(true);
    deafultsOption.setArgName("DATASET DEFAULTS FILE");
    options.addOption(deafultsOption);

    Option tempResOption = new Option("t", "temporal", true,
            "desired temporal resolution (hour, day, week, or month)");
    tempResOption.setRequired(true);
    tempResOption.setArgName("TEMPORAL RESOLUTION");
    options.addOption(tempResOption);

    Option spatialResOption = new Option("s", "spatial", true,
            "desired spatial resolution (points, nbhd, zip, grid, or city)");
    spatialResOption.setRequired(true);
    spatialResOption.setArgName("SPATIAL RESOLUTION");
    options.addOption(spatialResOption);

    Option currentSpatialResOption = new Option("cs", "current-spatial", true,
            "current spatial resolution (points, nbhd, zip, grid, or city)");
    currentSpatialResOption.setRequired(true);
    currentSpatialResOption.setArgName("CURRENT SPATIAL RESOLUTION");
    options.addOption(currentSpatialResOption);

    Option indexResOption = new Option("i", "index", true, "indexes of the temporal and spatial attributes");
    indexResOption.setRequired(true);
    indexResOption.setArgName("INDEX OF SPATIO-TEMPORAL RESOLUTIONS");
    indexResOption.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(indexResOption);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing",
                options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing",
                options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.pre_processing.PreProcessing",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);
    String dataset = cmd.getOptionValue("dn");
    String header = cmd.getOptionValue("dh");
    String defaults = cmd.getOptionValue("dd");
    String temporalResolution = cmd.getOptionValue("t");
    String spatialResolution = cmd.getOptionValue("s");
    String gridResolution = "";
    String currentSpatialResolution = cmd.getOptionValue("cs");

    if (spatialResolution.contains("grid")) {
        String[] res = spatialResolution.split("-");
        spatialResolution = res[0];
        gridResolution = res[1];
    }

    conf.set("header", s3bucket + FrameworkUtils.dataDir + "/" + header);
    conf.set("defaults", s3bucket + FrameworkUtils.dataDir + "/" + defaults);
    conf.set("temporal-resolution", temporalResolution);
    conf.set("spatial-resolution", spatialResolution);
    conf.set("grid-resolution", gridResolution);
    conf.set("current-spatial-resolution", currentSpatialResolution);

    String[] indexes = cmd.getOptionValues("i");
    String temporalPos = "";
    Integer sizeSpatioTemp = 0;
    if (!(currentSpatialResolution.equals("points"))) {
        String spatialPos = "";
        for (int i = 0; i < indexes.length; i++) {
            temporalPos += indexes[i] + ",";
            spatialPos += indexes[++i] + ",";
            sizeSpatioTemp++;
        }
        conf.set("spatial-pos", spatialPos);
    } else {
        String xPositions = "", yPositions = "";
        for (int i = 0; i < indexes.length; i++) {
            temporalPos += indexes[i] + ",";
            xPositions += indexes[++i] + ",";
            yPositions += indexes[++i] + ",";
            sizeSpatioTemp++;
        }
        conf.set("xPositions", xPositions);
        conf.set("yPositions", yPositions);
    }
    conf.set("temporal-pos", temporalPos);

    conf.set("size-spatio-temporal", sizeSpatioTemp.toString());

    // checking resolutions

    if (utils.spatialResolution(spatialResolution) < 0) {
        System.out.println("Invalid spatial resolution: " + spatialResolution);
        System.exit(-1);
    }

    if (utils.spatialResolution(spatialResolution) == FrameworkUtils.POINTS) {
        System.out.println("The data needs to be reduced at least to neighborhoods or grid.");
        System.exit(-1);
    }

    if (utils.spatialResolution(currentSpatialResolution) < 0) {
        System.out.println("Invalid spatial resolution: " + currentSpatialResolution);
        System.exit(-1);
    }

    if (utils.spatialResolution(currentSpatialResolution) > utils.spatialResolution(spatialResolution)) {
        System.out.println("The current spatial resolution is coarser than "
                + "the desired one. You can only navigate from a fine resolution" + " to a coarser one.");
        System.exit(-1);
    }

    if (utils.temporalResolution(temporalResolution) < 0) {
        System.out.println("Invalid temporal resolution: " + temporalResolution);
        System.exit(-1);
    }

    String fileName = s3bucket + FrameworkUtils.preProcessingDir + "/" + dataset + "-" + temporalResolution
            + "-" + spatialResolution + gridResolution;
    conf.set("aggregates", fileName + ".aggregates");

    // making sure both files are removed, if they exist
    FrameworkUtils.removeFile(fileName, s3conf, s3);
    FrameworkUtils.removeFile(fileName + ".aggregates", s3conf, s3);

    /**
     * Hadoop Parameters
     * sources: http://www.slideshare.net/ImpetusInfo/ppt-on-advanced-hadoop-tuning-n-optimisation
     *          https://cloudcelebrity.wordpress.com/2013/08/14/12-key-steps-to-keep-your-hadoop-cluster-running-strong-and-performing-optimum/
     */

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");

    // using SnappyCodec for intermediate and output data ?
    // TODO: for now, using SnappyCodec -- what about LZO + Protocol Buffer serialization?
    //   LZO - http://www.oberhumer.com/opensource/lzo/#download
    //   Hadoop-LZO - https://github.com/twitter/hadoop-lzo
    //   Protocol Buffer - https://github.com/twitter/elephant-bird
    //   General Info - http://www.devx.com/Java/Article/47913
    //   Compression - http://comphadoop.weebly.com/index.html
    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
                "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        conf.set("mapreduce.output.fileoutputformat.compress.codec",
                "org.apache.hadoop.io.compress.BZip2Codec");
    }

    // TODO: this is dangerous!
    if (s3) {
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
    }

    Job job = new Job(conf);
    job.setJobName(dataset + "-" + temporalResolution + "-" + spatialResolution);

    job.setMapOutputKeyClass(MultipleSpatioTemporalWritable.class);
    job.setMapOutputValueClass(AggregationArrayWritable.class);

    job.setOutputKeyClass(MultipleSpatioTemporalWritable.class);
    job.setOutputValueClass(AggregationArrayWritable.class);

    job.setMapperClass(PreProcessingMapper.class);
    job.setCombinerClass(PreProcessingCombiner.class);
    job.setReducerClass(PreProcessingReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());
    //job.setNumReduceTasks(1);

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    SequenceFileOutputFormat.setCompressOutput(job, true);
    SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);

    FileInputFormat.setInputPaths(job, new Path(s3bucket + FrameworkUtils.dataDir + "/" + dataset));
    FileOutputFormat.setOutputPath(job, new Path(fileName));

    job.setJarByClass(PreProcessing.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(fileName + "\t" + (System.currentTimeMillis() - start));

}