Example usage for org.apache.commons.cli ParseException printStackTrace

List of usage examples for org.apache.commons.cli ParseException printStackTrace

Introduction

In this page you can find the example usage for org.apache.commons.cli ParseException printStackTrace.

Prototype

public void printStackTrace() 

Source Link

Document

Prints this throwable and its backtrace to the standard error stream.

Usage

From source file:fr.tpt.s3.mcdag.generator.MainGenerator.java

/**
 * Main method for the generator: it launches a given number of threads with the parameters
 * given/*  w  w  w.j a v  a  2  s  . c  o m*/
 * @param args
 */
public static void main(String[] args) {

    /* ============================ Command line ================= */
    Options options = new Options();

    Option o_hi = new Option("mu", "max_utilization", true, "Upper bound utilization");
    o_hi.setRequired(true);
    options.addOption(o_hi);

    Option o_tasks = new Option("nt", "nb_tasks", true, "Number of tasks for the system");
    o_tasks.setRequired(true);
    options.addOption(o_tasks);

    Option o_eprob = new Option("e", "eprobability", true, "Probability of edges");
    o_eprob.setRequired(true);
    options.addOption(o_eprob);

    Option o_levels = new Option("l", "levels", true, "Number of criticality levels");
    o_levels.setRequired(true);
    options.addOption(o_levels);

    Option o_para = new Option("p", "parallelism", true, "Max parallelism for the DAGs");
    o_para.setRequired(true);
    options.addOption(o_para);

    Option o_nbdags = new Option("nd", "num_dags", true, "Number of DAGs");
    o_nbdags.setRequired(true);
    options.addOption(o_nbdags);

    Option o_nbfiles = new Option("nf", "num_files", true, "Number of files");
    o_nbfiles.setRequired(true);
    options.addOption(o_nbfiles);

    Option o_rfactor = new Option("rf", "reduc_factor", true, "Reduction factor for criticality modes");
    o_rfactor.setRequired(false);
    options.addOption(o_rfactor);

    Option o_out = new Option("o", "output", true, "Output file for the DAG");
    o_out.setRequired(true);
    options.addOption(o_out);

    Option graphOpt = new Option("g", "graphviz", false, "Generate a graphviz DOT file");
    graphOpt.setRequired(false);
    options.addOption(graphOpt);

    Option debugOpt = new Option("d", "debug", false, "Enabling debug");
    debugOpt.setRequired(false);
    options.addOption(debugOpt);

    Option jobsOpt = new Option("j", "jobs", true, "Number of jobs");
    jobsOpt.setRequired(false);
    options.addOption(jobsOpt);

    CommandLineParser parser = new DefaultParser();
    HelpFormatter formatter = new HelpFormatter();
    CommandLine cmd;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.out.println(e.getMessage());
        formatter.printHelp("DAG Generator", options);

        System.exit(1);
        return;
    }

    double maxU = Double.parseDouble(cmd.getOptionValue("max_utilization"));
    int edgeProb = Integer.parseInt(cmd.getOptionValue("eprobability"));
    int levels = Integer.parseInt(cmd.getOptionValue("levels"));
    int nbDags = Integer.parseInt(cmd.getOptionValue("num_dags"));
    int nbFiles = Integer.parseInt(cmd.getOptionValue("num_files"));
    int para = Integer.parseInt(cmd.getOptionValue("parallelism"));
    int nbTasks = Integer.parseInt(cmd.getOptionValue("nb_tasks"));
    boolean graph = cmd.hasOption("graphviz");
    boolean debug = cmd.hasOption("debug");
    String output = cmd.getOptionValue("output");
    int nbJobs = 1;
    if (cmd.hasOption("jobs"))
        nbJobs = Integer.parseInt(cmd.getOptionValue("jobs"));
    double rfactor = 2.0;
    if (cmd.hasOption("reduc_factor"))
        rfactor = Double.parseDouble(cmd.getOptionValue("reduc_factor"));
    /* ============================= Generator parameters ============================= */

    if (nbFiles < 0 || nbDags < 0 || nbJobs < 0) {
        System.err.println("[ERROR] Generator: Number of files & DAGs need to be positive.");
        formatter.printHelp("DAG Generator", options);
        System.exit(1);
        return;
    }

    Thread threads[] = new Thread[nbJobs];

    int nbFilesCreated = 0;
    int count = 0;

    while (nbFilesCreated != nbFiles) {
        int launched = 0;

        for (int i = 0; i < nbJobs && count < nbFiles; i++) {
            String outFile = output.substring(0, output.lastIndexOf('.')).concat("-" + count + ".xml");
            GeneratorThread gt = new GeneratorThread(maxU, nbTasks, edgeProb, levels, para, nbDags, rfactor,
                    outFile, graph, debug);
            threads[i] = new Thread(gt);
            threads[i].setName("GeneratorThread-" + i);
            launched++;
            count++;
            threads[i].start();
        }

        for (int i = 0; i < launched; i++) {
            try {
                threads[i].join();
                nbFilesCreated++;
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:ch.epfl.leb.sass.commandline.CommandLineInterface.java

/**
 * Shows help, launches the interpreter and executes scripts according to input args.
 * @param args input arguments// w w  w.  j  a  v  a 2  s  .  c  o m
 */
public static void main(String args[]) {
    // parse input arguments
    CommandLineParser parser = new DefaultParser();
    CommandLine line = null;
    try {
        line = parser.parse(options, args);
    } catch (ParseException ex) {
        System.err.println("Parsing of arguments failed. Reason: " + ex.getMessage());
        System.err.println("Use -help for usage.");
        System.exit(1);
    }

    // decide how do we make the interpreter available based on options
    Interpreter interpreter = null;
    // show help and exit
    if (line.hasOption("help")) {
        HelpFormatter helpFormatter = new HelpFormatter();
        helpFormatter.printHelp("java -jar <jar-name>", options, true);
        System.exit(0);
        // launch interpreter inside current terminal
    } else if (line.hasOption("interpreter")) {
        // assign in, out and err streams to the interpreter
        interpreter = new Interpreter(new InputStreamReader(System.in), System.out, System.err, true);
        interpreter.setShowResults(true);
        // if a script was given, execute it before giving access to user
        if (line.hasOption("script")) {
            try {
                interpreter.source(line.getOptionValue("script"));
            } catch (IOException ex) {
                Logger.getLogger(BeanShellConsole.class.getName()).log(Level.SEVERE,
                        "IOException while executing shell script.", ex);
            } catch (EvalError ex) {
                Logger.getLogger(BeanShellConsole.class.getName()).log(Level.SEVERE,
                        "EvalError while executing shell script.", ex);
            }
        }
        // give access to user
        new Thread(interpreter).start();
        // only execute script and exit
    } else if (line.hasOption("script")) {
        interpreter = new Interpreter();
        try {
            interpreter.source(line.getOptionValue("script"));
            System.exit(0);
        } catch (IOException ex) {
            Logger.getLogger(BeanShellConsole.class.getName()).log(Level.SEVERE,
                    "IOException while executing shell script.", ex);
            System.exit(1);
        } catch (EvalError ex) {
            Logger.getLogger(BeanShellConsole.class.getName()).log(Level.SEVERE,
                    "EvalError while executing shell script.", ex);
            System.exit(1);
        }

        // Launches the RPC server with the model contained in the file whose
        // filename was passed by argument.
    } else if (line.hasOption("rpc_server")) {

        IJPluginModel model = new IJPluginModel();
        File file = new File(line.getOptionValue("rpc_server"));
        try {
            FileInputStream stream = new FileInputStream(file);
            model = IJPluginModel.read(stream);
        } catch (FileNotFoundException ex) {
            System.out.println("Error: " + file.getName() + " not found.");
            System.exit(1);
        } catch (Exception ex) {
            ex.printStackTrace();
        }

        // Check whether a port number was specified.
        if (line.hasOption("port")) {
            try {
                port = Integer.valueOf(line.getOptionValue("port"));
                System.out.println("Using port: " + String.valueOf(port));
            } catch (java.lang.NumberFormatException ex) {
                System.out.println("Error: the port number argument is not a number.");
                System.exit(1);
            }
        } else {
            System.out.println("No port number provided. Using default port: " + String.valueOf(port));
        }

        RPCServer server = new RPCServer(model, port);

        System.out.println("Starting RPC server...");
        server.serve();

    } else if (line.hasOption("port") & !line.hasOption("rpc_server")) {
        System.out.println("Error: Port number provided without requesting the RPC server. Exiting...");
        System.exit(1);

        // if System.console() returns null, it means we were launched by
        // double-clicking the .jar, so launch own BeanShellConsole
        // if System.console() returns null, it means we were launched by
        // double-clicking the .jar, so launch own ConsoleFrame
    } else if (System.console() == null) {
        BeanShellConsole cframe = new BeanShellConsole("SASS BeanShell Prompt");
        interpreter = cframe.getInterpreter();
        cframe.setVisible(true);
        System.setOut(cframe.getInterpreter().getOut());
        System.setErr(cframe.getInterpreter().getErr());
        new Thread(cframe.getInterpreter()).start();
        // otherwise, show help
    } else {
        HelpFormatter helpFormatter = new HelpFormatter();
        helpFormatter.printHelp("java -jar <jar-name>", options, true);
        System.exit(0);
    }

    if (interpreter != null) {
        printWelcomeText(interpreter.getOut());
    }
}

From source file:herddb.cli.HerdDBCLI.java

public static void main(String... args) throws IOException {
    try {//from w  ww.  j ava 2s .c o m
        DefaultParser parser = new DefaultParser();
        Options options = new Options();
        options.addOption("x", "url", true, "JDBC URL");
        options.addOption("u", "username", true, "JDBC Username");
        options.addOption("pwd", "password", true, "JDBC Password");
        options.addOption("q", "query", true, "Execute inline query");
        options.addOption("v", "verbose", false, "Verbose output");
        options.addOption("a", "async", false, "Use (experimental) executeBatchAsync for sending DML");
        options.addOption("s", "schema", true, "Default tablespace (SQL schema)");
        options.addOption("fi", "filter", true, "SQL filter mode: all|ddl|dml");
        options.addOption("f", "file", true, "SQL Script to execute (statement separated by 'GO' lines)");
        options.addOption("at", "autotransaction", false,
                "Execute scripts in autocommit=false mode and commit automatically");
        options.addOption("atbs", "autotransactionbatchsize", true, "Batch size for 'autotransaction' mode");
        options.addOption("g", "script", true, "Groovy Script to execute");
        options.addOption("i", "ignoreerrors", false, "Ignore SQL Errors during file execution");
        options.addOption("sc", "sqlconsole", false, "Execute SQL console in interactive mode");
        options.addOption("fmd", "mysql", false,
                "Intruct the parser that the script is coming from a MySQL Dump");
        options.addOption("rwst", "rewritestatements", false, "Rewrite all statements to use JDBC parameters");
        options.addOption("b", "backup", false, "Backup one or more tablespaces (selected with --schema)");
        options.addOption("r", "restore", false, "Restore tablespace");
        options.addOption("nl", "newleader", true, "Leader for new restored tablespace");
        options.addOption("ns", "newschema", true, "Name for new restored tablespace");
        options.addOption("tsm", "tablespacemapper", true,
                "Path to groovy script with a custom functin to map table names to tablespaces");
        options.addOption("dfs", "dumpfetchsize", true,
                "Fetch size for dump operations. Defaults to chunks of 100000 records");
        options.addOption("n", "nodeid", true, "Node id");
        options.addOption("t", "table", true, "Table name");
        options.addOption("p", "param", true, "Parameter name");
        options.addOption("val", "values", true, "Parameter values");
        options.addOption("lts", "list-tablespaces", false, "List available tablespaces");
        options.addOption("ln", "list-nodes", false, "List available nodes");
        options.addOption("sts", "show-tablespace", false,
                "Show full informations about a tablespace (needs -s option)");
        options.addOption("lt", "list-tables", false, "List tablespace tables (needs -s option)");
        options.addOption("st", "show-table", false,
                "Show full informations about a table (needs -s and -t options)");

        options.addOption("ar", "add-replica", false,
                "Add a replica to the tablespace (needs -s and -r options)");
        options.addOption("rr", "remove-replica", false,
                "Remove a replica from the tablespace (needs -s and -r options)");
        options.addOption("adt", "create-tablespace", false, "Create a tablespace (needs -ns and -nl options)");
        options.addOption("at", "alter-tablespace", false,
                "Alter a tablespace (needs -s, -param and --values options)");

        options.addOption("d", "describe", false, "Checks and describes a raw file");
        options.addOption("ft", "filetype", true,
                "Checks and describes a raw file (valid options are txlog, datapage, tablecheckpoint, indexcheckpoint, tablesmetadata");
        options.addOption("mdf", "metadatafile", true,
                "Tables metadata file, required for 'datapage' filetype");
        options.addOption("tsui", "tablespaceuuid", true, "Tablespace UUID, used for describing raw files");

        org.apache.commons.cli.CommandLine commandLine;
        try {
            commandLine = parser.parse(options, args);
        } catch (ParseException error) {
            println("Syntax error: " + error);
            failAndPrintHelp(options);
            return;
        }
        if (args.length == 0) {
            failAndPrintHelp(options);
            return;
        }

        String schema = commandLine.getOptionValue("schema", TableSpace.DEFAULT);
        String tablespaceuuid = commandLine.getOptionValue("tablespaceuuid", "");
        final boolean verbose = commandLine.hasOption("verbose");
        final boolean async = commandLine.hasOption("async");
        final String filter = commandLine.getOptionValue("filter", "all");
        if (!verbose) {
            LogManager.getLogManager().reset();
        }
        String file = commandLine.getOptionValue("file", "");
        String tablesmetadatafile = commandLine.getOptionValue("metadatafile", "");
        String table = commandLine.getOptionValue("table", "");
        boolean describe = commandLine.hasOption("describe");
        String filetype = commandLine.getOptionValue("filetype", "");
        if (describe) {
            try {
                if (file.isEmpty()) {
                    throw new IllegalArgumentException("file option is required");
                }
                describeRawFile(tablespaceuuid, table, tablesmetadatafile, file, filetype);
            } catch (Exception error) {
                if (verbose) {
                    error.printStackTrace();
                } else {
                    println("error:" + error);
                }
                exitCode = 1;
            }
            return;
        }
        String url = commandLine.getOptionValue("url", "jdbc:herddb:server:localhost:7000");
        String username = commandLine.getOptionValue("username",
                ClientConfiguration.PROPERTY_CLIENT_USERNAME_DEFAULT);
        String password = commandLine.getOptionValue("password",
                ClientConfiguration.PROPERTY_CLIENT_PASSWORD_DEFAULT);
        String query = commandLine.getOptionValue("query", "");

        boolean backup = commandLine.hasOption("backup");
        boolean restore = commandLine.hasOption("restore");
        String newschema = commandLine.getOptionValue("newschema", "");
        String leader = commandLine.getOptionValue("newleader", "");
        String script = commandLine.getOptionValue("script", "");
        String tablespacemapperfile = commandLine.getOptionValue("tablespacemapper", "");
        int dumpfetchsize = Integer.parseInt(commandLine.getOptionValue("dumpfetchsize", 100000 + ""));
        final boolean ignoreerrors = commandLine.hasOption("ignoreerrors");
        boolean sqlconsole = commandLine.hasOption("sqlconsole");
        final boolean frommysqldump = commandLine.hasOption("mysql");
        final boolean rewritestatements = commandLine.hasOption("rewritestatements")
                || !tablespacemapperfile.isEmpty() || frommysqldump;
        boolean autotransaction = commandLine.hasOption("autotransaction") || frommysqldump;
        int autotransactionbatchsize = Integer
                .parseInt(commandLine.getOptionValue("autotransactionbatchsize", 100000 + ""));
        if (!autotransaction) {
            autotransactionbatchsize = 0;
        }

        String nodeId = commandLine.getOptionValue("nodeid", "");
        String param = commandLine.getOptionValue("param", "");
        String values = commandLine.getOptionValue("values", "");

        boolean listTablespaces = commandLine.hasOption("list-tablespaces");
        boolean listNodes = commandLine.hasOption("list-nodes");
        boolean showTablespace = commandLine.hasOption("show-tablespace");
        boolean listTables = commandLine.hasOption("list-tables");
        boolean showTable = commandLine.hasOption("show-table");
        if (showTable) {
            if (table.equals("")) {
                println("Specify the table (-t <table>)");
                exitCode = 1;
                System.exit(exitCode);
            }
        }

        boolean createTablespace = commandLine.hasOption("create-tablespace");
        if (createTablespace) {
            if (newschema.equals("")) {
                println("Specify the tablespace name (--newschema <schema>)");
                exitCode = 1;
                System.exit(exitCode);
            }
            if (leader.equals("")) {
                println("Specify the leader node (--newleader <nodeid>)");
                exitCode = 1;
                System.exit(exitCode);
            }
        }
        boolean alterTablespace = commandLine.hasOption("alter-tablespace");
        if (alterTablespace) {
            if (commandLine.getOptionValue("schema", null) == null) {
                println("Cowardly refusing to assume the default schema in an alter command. Explicitly use \"-s "
                        + TableSpace.DEFAULT + "\" instead");
                exitCode = 1;
                System.exit(exitCode);
            }
            if (param.equals("")) {
                println("Specify the parameter (--param <par>)");
                exitCode = 1;
                System.exit(exitCode);
            }
            if (values.equals("")) {
                println("Specify values (--values <vals>)");
                exitCode = 1;
                System.exit(exitCode);
            }

        }
        boolean addReplica = commandLine.hasOption("add-replica");
        if (addReplica) {
            if (commandLine.getOptionValue("schema", null) == null) {
                println("Cowardly refusing to assume the default schema in an alter command. Explicitly use \"-s "
                        + TableSpace.DEFAULT + "\" instead");
                exitCode = 1;
                System.exit(exitCode);
            }
            if (nodeId.equals("")) {
                println("Specify the node (-n <nodeid>)");
                exitCode = 1;
                System.exit(exitCode);
            }
        }
        boolean removeReplica = commandLine.hasOption("remove-replica");
        if (removeReplica) {
            if (commandLine.getOptionValue("schema", null) == null) {
                println("Cowardly refusing to assume the default schema in an alter command. Explicitly use \"-s "
                        + TableSpace.DEFAULT + "\" instead");
                exitCode = 1;
                System.exit(exitCode);
            }
            if (nodeId.equals("")) {
                println("Specify the node (-n <nodeid>)");
                exitCode = 1;
                System.exit(exitCode);
            }
        }

        TableSpaceMapper tableSpaceMapper = buildTableSpaceMapper(tablespacemapperfile);
        try (HerdDBDataSource datasource = new HerdDBDataSource()) {
            datasource.setUrl(url);
            datasource.setUsername(username);
            datasource.setPassword(password);

            try (Connection connection = datasource.getConnection();
                    Statement statement = connection.createStatement()) {
                connection.setSchema(schema);
                if (sqlconsole) {
                    runSqlConsole(connection, statement, PRETTY_PRINT);
                } else if (backup) {
                    performBackup(statement, schema, file, options, connection, dumpfetchsize);
                } else if (restore) {
                    performRestore(file, leader, newschema, options, statement, connection);
                } else if (!query.isEmpty()) {
                    executeStatement(verbose, ignoreerrors, false, false, query, statement, tableSpaceMapper,
                            false, PRETTY_PRINT);
                } else if (!file.isEmpty()) {
                    executeSqlFile(autotransactionbatchsize, connection, file, verbose, async, ignoreerrors,
                            frommysqldump, rewritestatements, statement, tableSpaceMapper, PRETTY_PRINT, filter,
                            datasource);
                } else if (!script.isEmpty()) {
                    executeScript(connection, datasource, statement, script);
                } else if (listTablespaces) {
                    printTableSpaces(verbose, ignoreerrors, statement, tableSpaceMapper);
                } else if (listNodes) {
                    printNodes(verbose, ignoreerrors, statement, tableSpaceMapper);
                } else if (showTablespace) {
                    printTableSpaceInfos(verbose, ignoreerrors, statement, tableSpaceMapper, schema);
                } else if (listTables) {
                    listTables(verbose, ignoreerrors, statement, tableSpaceMapper, schema);
                } else if (showTable) {
                    printTableInfos(verbose, ignoreerrors, statement, tableSpaceMapper, schema, table);
                } else if (addReplica) {
                    changeReplica(verbose, ignoreerrors, statement, tableSpaceMapper, schema, nodeId,
                            ChangeReplicaAction.ADD);
                } else if (removeReplica) {
                    changeReplica(verbose, ignoreerrors, statement, tableSpaceMapper, schema, nodeId,
                            ChangeReplicaAction.REMOVE);
                } else if (createTablespace) {
                    createTablespace(verbose, ignoreerrors, statement, tableSpaceMapper, newschema, leader);
                } else if (alterTablespace) {
                    alterTablespace(verbose, ignoreerrors, statement, tableSpaceMapper, schema, param, values);
                } else {
                    failAndPrintHelp(options);
                    return;
                }
            }
            exitCode = 0;
        } catch (Exception error) {
            if (verbose) {
                error.printStackTrace();
            } else {
                println("error:" + error);
            }
            exitCode = 1;
        }
    } finally {
        System.exit(exitCode);
    }
}

From source file:com.yahoo.labs.yamall.local.Yamall.java

public static void main(String[] args) {
    String[] remainingArgs = null;
    String inputFile = null;/*w  w w  .  ja v a  2 s. c  o m*/
    String predsFile = null;
    String saveModelFile = null;
    String initialModelFile = null;
    String lossName = null;
    String parserName = null;
    String linkName = null;
    String invertHashName = null;
    double learningRate = 1;
    String minPredictionString = null;
    String maxPredictionString = null;
    String fmNumberFactorsString = null;
    int bitsHash;
    int numberPasses;
    int holdoutPeriod = 10;

    boolean testOnly = false;
    boolean exponentialProgress;
    double progressInterval;

    options.addOption("h", "help", false, "displays this help");
    options.addOption("t", false, "ignore label information and just test");
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("binary")
            .desc("reports loss as binary classification with -1,1 labels").build());
    options.addOption(
            Option.builder().hasArg(false).required(false).longOpt("solo").desc("uses SOLO optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("pcsolo")
            .desc("uses Per Coordinate SOLO optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("pistol")
            .desc("uses PiSTOL optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("kt")
            .desc("(EXPERIMENTAL) uses KT optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("pckt")
            .desc("(EXPERIMENTAL) uses Per Coordinate KT optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("pccocob")
            .desc("(EXPERIMENTAL) uses Per Coordinate COCOB optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("cocob")
            .desc("(EXPERIMENTAL) uses COCOB optimizer").build());
    options.addOption(
            Option.builder().hasArg(false).required(false).longOpt("fm").desc("Factorization Machine").build());
    options.addOption(Option.builder("f").hasArg(true).required(false).desc("final regressor to save")
            .type(String.class).longOpt("final_regressor").build());
    options.addOption(Option.builder("p").hasArg(true).required(false).desc("file to output predictions to")
            .longOpt("predictions").type(String.class).build());
    options.addOption(
            Option.builder("i").hasArg(true).required(false).desc("initial regressor(s) to load into memory")
                    .longOpt("initial_regressor").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false).desc(
            "specify the loss function to be used. Currently available ones are: absolute, squared (default), hinge, logistic")
            .longOpt("loss_function").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false).desc(
            "specify the link function used in the output of the predictions. Currently available ones are: identity (default), logistic")
            .longOpt("link").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("output human-readable final regressor with feature names").longOpt("invert_hash")
            .type(String.class).build());
    options.addOption(
            Option.builder("l").hasArg(true).required(false).desc("set (initial) learning Rate, default = 1.0")
                    .longOpt("learning_rate").type(String.class).build());
    options.addOption(Option.builder("b").hasArg(true).required(false)
            .desc("number of bits in the feature table, default = 18").longOpt("bit_precision")
            .type(String.class).build());
    options.addOption(Option.builder("P").hasArg(true).required(false)
            .desc("progress update frequency, integer: additive; float: multiplicative, default = 2.0")
            .longOpt("progress").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("smallest prediction to output, before the link function, default = -50")
            .longOpt("min_prediction").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("smallest prediction to output, before the link function, default = 50")
            .longOpt("max_prediction").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("ignore namespaces beginning with the characters in <arg>").longOpt("ignore")
            .type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false).desc("number of training passes")
            .longOpt("passes").type(String.class).build());
    options.addOption(
            Option.builder().hasArg(true).required(false).desc("holdout period for test only, default = 10")
                    .longOpt("holdout_period").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("number of factors for Factorization Machines default = 8").longOpt("fmNumberFactors")
            .type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("specify the parser to use. Currently available ones are: vw (default), libsvm, tsv")
            .longOpt("parser").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false).desc("schema file for the TSV input")
            .longOpt("schema").type(String.class).build());

    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.out.println("Unrecognized option");
        help();
    }
    if (cmd.hasOption("h"))
        help();
    if (cmd.hasOption("t"))
        testOnly = true;
    if (cmd.hasOption("binary")) {
        binary = true;
        System.out.println("Reporting binary loss");
    }
    initialModelFile = cmd.getOptionValue("i");
    predsFile = cmd.getOptionValue("p");
    lossName = cmd.getOptionValue("loss_function", "squared");
    linkName = cmd.getOptionValue("link", "identity");
    saveModelFile = cmd.getOptionValue("f");
    learningRate = Double.parseDouble(cmd.getOptionValue("l", "1.0"));
    bitsHash = Integer.parseInt(cmd.getOptionValue("b", "18"));
    invertHashName = cmd.getOptionValue("invert_hash");
    minPredictionString = cmd.getOptionValue("min_prediction", "-50");
    maxPredictionString = cmd.getOptionValue("max_prediction", "50");
    fmNumberFactorsString = cmd.getOptionValue("fmNumberFactors", "8");
    parserName = cmd.getOptionValue("parser", "vw");

    numberPasses = Integer.parseInt(cmd.getOptionValue("passes", "1"));
    System.out.println("Number of passes = " + numberPasses);
    if (numberPasses > 1) {
        holdoutPeriod = Integer.parseInt(cmd.getOptionValue("holdout_period", "10"));
        System.out.println("Holdout period = " + holdoutPeriod);
    }

    remainingArgs = cmd.getArgs();
    if (remainingArgs.length == 1)
        inputFile = remainingArgs[0];

    InstanceParser instanceParser = null;
    if (parserName.equals("vw"))
        instanceParser = new VWParser(bitsHash, cmd.getOptionValue("ignore"), (invertHashName != null));
    else if (parserName.equals("libsvm"))
        instanceParser = new LIBSVMParser(bitsHash, (invertHashName != null));
    else if (parserName.equals("tsv")) {
        String schema = cmd.getOptionValue("schema");
        if (schema == null) {
            System.out.println("TSV parser requires a schema file.");
            System.exit(0);
        } else {
            String spec = null;
            try {
                spec = new String(Files.readAllBytes(Paths.get(schema)));
            } catch (IOException e) {
                System.out.println("Error reading the TSV schema file.");
                e.printStackTrace();
                System.exit(0);
            }
            instanceParser = new TSVParser(bitsHash, cmd.getOptionValue("ignore"), (invertHashName != null),
                    spec);
        }
    } else {
        System.out.println("Unknown parser.");
        System.exit(0);
    }
    System.out.println("Num weight bits = " + bitsHash);

    // setup progress
    String progress = cmd.getOptionValue("P", "2.0");
    if (progress.indexOf('.') >= 0) {
        exponentialProgress = true;
        progressInterval = (double) Double.parseDouble(progress);
    } else {
        exponentialProgress = false;
        progressInterval = (double) Integer.parseInt(progress);
    }

    // min and max predictions
    minPrediction = (double) Double.parseDouble(minPredictionString);
    maxPrediction = (double) Double.parseDouble(maxPredictionString);

    // number of factors for Factorization Machines
    fmNumberFactors = (int) Integer.parseInt(fmNumberFactorsString);

    // configure the learner
    Loss lossFnc = null;
    LinkFunction link = null;
    if (initialModelFile == null) {
        if (cmd.hasOption("kt")) {
            learner = new KT(bitsHash);
        } else if (cmd.hasOption("pckt")) {
            learner = new PerCoordinateKT(bitsHash);
        } else if (cmd.hasOption("pcsolo")) {
            learner = new PerCoordinateSOLO(bitsHash);
        } else if (cmd.hasOption("solo")) {
            learner = new SOLO(bitsHash);
        } else if (cmd.hasOption("pccocob")) {
            learner = new PerCoordinateCOCOB(bitsHash);
        } else if (cmd.hasOption("cocob")) {
            learner = new COCOB(bitsHash);
        } else if (cmd.hasOption("pistol")) {
            learner = new PerCoordinatePiSTOL(bitsHash);
        } else if (cmd.hasOption("fm")) {
            learner = new SGD_FM(bitsHash, fmNumberFactors);
        } else
            learner = new SGD_VW(bitsHash);
    } else {
        learner = IOLearner.loadLearner(initialModelFile);
    }

    // setup link function
    if (linkName.equals("identity")) {
        link = new IdentityLinkFunction();
    } else if (linkName.equals("logistic")) {
        link = new LogisticLinkFunction();
    } else {
        System.out.println("Unknown link function.");
        System.exit(0);
    }

    // setup loss function
    if (lossName.equals("squared")) {
        lossFnc = new SquareLoss();
    } else if (lossName.equals("hinge")) {
        lossFnc = new HingeLoss();
    } else if (lossName.equals("logistic")) {
        lossFnc = new LogisticLoss();
    } else if (lossName.equals("absolute")) {
        lossFnc = new AbsLoss();
    } else {
        System.out.println("Unknown loss function.");
        System.exit(0);
    }

    learner.setLoss(lossFnc);
    learner.setLearningRate(learningRate);

    // maximum range predictions
    System.out.println("Max prediction = " + maxPrediction + ", Min Prediction = " + minPrediction);
    // print information about the learner
    System.out.println(learner.toString());
    // print information about the link function
    System.out.println(link.toString());
    // print information about the parser
    System.out.println(instanceParser.toString());
    // print information about ignored namespaces
    System.out.println("Ignored namespaces = " + cmd.getOptionValue("ignore", ""));

    long start = System.nanoTime();
    FileInputStream fstream;
    try {
        BufferedReader br = null;
        if (inputFile != null) {
            fstream = new FileInputStream(inputFile);
            System.out.println("Reading datafile = " + inputFile);
            br = new BufferedReader(new InputStreamReader(fstream));
        } else {
            System.out.println("Reading from console");
            br = new BufferedReader(new InputStreamReader(System.in));
        }

        File fout = null;
        FileOutputStream fos = null;
        BufferedWriter bw = null;
        if (predsFile != null) {
            fout = new File(predsFile);
            fos = new FileOutputStream(fout);
            bw = new BufferedWriter(new OutputStreamWriter(fos));
        }

        try {
            System.out.println("average       example  current  current  current");
            System.out.println("loss          counter    label  predict  features");
            int iter = 0;
            double cumLoss = 0;
            double weightedSampleSum = 0;
            double sPlus = 0;
            double sMinus = 0;
            Instance sample = null;
            boolean justPrinted = false;
            int pass = 0;
            ObjectOutputStream ooutTr = null;
            ObjectOutputStream ooutHO = null;
            ObjectInputStream oinTr = null;
            double pred = 0;
            int limit = 1;
            double hError = Double.MAX_VALUE;
            double lastHError = Double.MAX_VALUE;
            int numTestSample = 0;
            int numTrainingSample = 0;
            int idx = 0;

            if (numberPasses > 1) {
                ooutTr = new ObjectOutputStream(new FileOutputStream("cache_training.bin"));
                ooutHO = new ObjectOutputStream(new FileOutputStream("cache_holdout.bin"));
                oinTr = new ObjectInputStream(new FileInputStream("cache_training.bin"));
            }

            do {
                while (true) {
                    double score;

                    if (pass > 0 && numberPasses > 1) {
                        Instance tmp = (Instance) oinTr.readObject();
                        if (tmp != null)
                            sample = tmp;
                        else
                            break;
                    } else {
                        String strLine = br.readLine();
                        if (strLine != null)
                            sample = instanceParser.parse(strLine);
                        else
                            break;
                    }

                    justPrinted = false;
                    idx++;

                    if (numberPasses > 1 && pass == 0 && idx % holdoutPeriod == 0) {
                        // store the current sample for the holdout set
                        ooutHO.writeObject(sample);
                        ooutHO.reset();
                        numTestSample++;
                    } else {
                        if (numberPasses > 1 && pass == 0) {
                            ooutTr.writeObject(sample);
                            ooutTr.reset();
                            numTrainingSample++;
                        }

                        iter++;
                        if (testOnly) {
                            // predict the sample
                            score = learner.predict(sample);
                        } else {
                            // predict the sample and update the classifier using the sample
                            score = learner.update(sample);
                        }
                        score = Math.min(Math.max(score, minPrediction), maxPrediction);
                        pred = link.apply(score);
                        if (!binary)
                            cumLoss += learner.getLoss().lossValue(score, sample.getLabel())
                                    * sample.getWeight();
                        else if (Math.signum(score) != sample.getLabel())
                            cumLoss += sample.getWeight();

                        weightedSampleSum += sample.getWeight();
                        if (sample.getLabel() > 0)
                            sPlus = sPlus + sample.getWeight();
                        else
                            sMinus = sMinus + sample.getWeight();

                        // output predictions to file
                        if (predsFile != null) {
                            bw.write(String.format("%.6f %s", pred, sample.getTag()));
                            bw.newLine();
                        }

                        // print statistics to screen
                        if (iter == limit) {
                            justPrinted = true;
                            System.out.printf("%.6f %12d  % .4f  % .4f  %d\n", cumLoss / weightedSampleSum,
                                    iter, sample.getLabel(), pred, sample.getVector().size());
                            if (exponentialProgress)
                                limit *= progressInterval;
                            else
                                limit += progressInterval;
                        }
                    }
                }
                if (numberPasses > 1) {
                    if (pass == 0) { // finished first pass of many
                        // write a null at the end of the files
                        ooutTr.writeObject(null);
                        ooutHO.writeObject(null);
                        ooutTr.flush();
                        ooutHO.flush();
                        ooutTr.close();
                        ooutHO.close();

                        System.out.println("finished first epoch");
                        System.out.println(numTrainingSample + " training samples");
                        System.out.println(numTestSample + " holdout samples saved");
                    }
                    lastHError = hError;
                    hError = evalHoldoutError();
                }
                if (numberPasses > 1) {
                    System.out.printf("Weighted loss on holdout on epoch %d = %.6f\n", pass + 1, hError);

                    oinTr.close();
                    oinTr = new ObjectInputStream(new FileInputStream("cache_training.bin"));

                    if (hError > lastHError) {
                        System.out.println("Early stopping");
                        break;
                    }
                }
                pass++;
            } while (pass < numberPasses);

            if (justPrinted == false) {
                System.out.printf("%.6f %12d  % .4f  % .4f  %d\n", cumLoss / weightedSampleSum, iter,
                        sample.getLabel(), pred, sample.getVector().size());
            }
            System.out.println("finished run");

            System.out.println(String.format("average loss best constant predictor: %.6f",
                    lossFnc.lossConstantBinaryLabels(sPlus, sMinus)));

            if (saveModelFile != null)
                IOLearner.saveLearner(learner, saveModelFile);
            if (invertHashName != null)
                IOLearner.saveInvertHash(learner.getWeights(), instanceParser.getInvertHashMap(),
                        invertHashName);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        // close the input stream
        try {
            br.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        // close the output stream
        if (predsFile != null) {
            try {
                bw.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
        long millis = System.nanoTime() - start;
        System.out.printf("Elapsed time: %d min, %d sec\n", TimeUnit.NANOSECONDS.toMinutes(millis),
                TimeUnit.NANOSECONDS.toSeconds(millis) - 60 * TimeUnit.NANOSECONDS.toMinutes(millis));
    } catch (

    FileNotFoundException e) {
        System.out.println("Error opening the input file");
        e.printStackTrace();
    }

}

From source file:HLA.java

public static void main(String[] args) throws IOException {

    if (!isVersionOrHigher()) {
        System.err.println("JRE of 1.8+ is required to run Kourami. Exiting.");
        System.exit(1);/* ww w.j a va  2  s .  com*/
    }

    CommandLineParser parser = new DefaultParser();
    Options options = HLA.createOption();
    Options helponlyOpts = HLA.createHelpOption();
    String[] bams = null;
    CommandLine line = null;
    boolean exitRun = false;
    try {
        CommandLine helpcheck = new DefaultParser().parse(helponlyOpts, args, true);
        if (helpcheck.getOptions().length > 0)
            HLA.help(options);
        else {
            line = parser.parse(options, args);
            if (line.hasOption("h"))//help"))
                HLA.help(options);
            else {
                if (line.hasOption("a"))
                    HLA.TYPEADDITIONAL = true;

                HLA.OUTPREFIX = line.getOptionValue("o");//outfilePrefix");
                String tmploc = line.getOptionValue("d");//msaDirectory");
                HLA.MSAFILELOC = tmploc;
                if (tmploc.endsWith(File.separator))
                    HLA.MSAFILELOC = tmploc.substring(0, tmploc.length() - 1);
                if (!new File(HLA.MSAFILELOC).exists() || !new File(HLA.MSAFILELOC).isDirectory()) {
                    System.err.println("Given msaDirectory: " + HLA.MSAFILELOC
                            + "\t does NOT exist or is NOT a directory.");
                    exitRun = true;
                } else if (!new File(HLA.MSAFILELOC + File.separator + "hla_nom_g.txt").exists()) {
                    System.err.println("hla_nom_g.txt NOT FOUND in " + HLA.MSAFILELOC);
                    System.err
                            .println("Please download hla_nom_g.txt from the same IMGT Release as msa files.");
                    exitRun = true;
                }
            }
            bams = line.getArgs();

            if (bams.length < 1 || (bams.length == 1 && bams[bams.length - 1].equals("DEBUG1228")))
                throw new ParseException("At least 1 bam file is required. See Usage:");
            else {
                if (bams.length > 1 && bams[bams.length - 1].equals("DEBUG1228")) {
                    String[] tmpbams = new String[bams.length - 1];
                    for (int i = 0; i < bams.length - 1; i++)
                        tmpbams[i] = bams[i];
                    bams = tmpbams;
                    HLA.DEBUG = true;
                }

                for (String b : bams)
                    if (!new File(b).exists()) {
                        System.err
                                .println("Input bam : " + b + " DOES NOT exist. Please check the bam exists.");
                        exitRun = true;
                    }
            }

        }
        if (exitRun)
            throw new ParseException("Exitting . . .");
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        //System.err.println("Failed to parse command line args. Check usage.");
        HLA.help(options);
    }

    String[] list = { "A", "B", "C", "DQA1", "DQB1", "DRB1" };

    String[] extList = { "A", "B", "C", "DQA1", "DQB1", "DRB1", "DOA", "DMA", "DMB", "DPA1", "DPB1", "DRA",
            "DRB3", "DRB5", "F", "G", "H", "J", "L" };
    //,"DPA1", "DPB1", "DRA",  "DRB4", "F", "G" , "H", "J" ,"K", "L", "V"};
    //,"DPA1", "DPB1", "DRA", "DRB3", "DRB4", "F", "G" , "H", "J" ,"K", "L", "V"};

    if (HLA.TYPEADDITIONAL)
        list = extList;

    File[] bamfiles = new File[bams.length];

    for (int i = 0; i < bams.length; i++)
        bamfiles[i] = new File(bams[i]);

    //check if <HLA.OUTPREFIX>.result is writable
    //if not exit.
    BufferedWriter resultWriter = null;
    try {
        resultWriter = new BufferedWriter(new FileWriter(HLA.OUTPREFIX + ".result"));
    } catch (IOException ioe) {
        ioe.printStackTrace();
        System.err.println("\n\n>>> CANNOT open output file: " + HLA.OUTPREFIX + ".result <<<\n\n");
        HLA.help(options);
    }

    HLA.log = new LogHandler();
    for (int i = 0; i < args.length; i++)
        HLA.log.append(" " + args[i]);
    HLA.log.appendln();

    try {
        System.err.println("----------------REF GRAPH CONSTRUCTION--------------");

        HLA.log.appendln("----------------REF GRAPH CONSTRUCTION--------------");
        HLA hla = new HLA(list, HLA.MSAFILELOC + File.separator + "hla_nom_g.txt");

        //1. bubble counting before loading reads.
        //System.err.println("----------------BUBBLE COUNTING: REF GRAPH--------------");
        //HLA.log.appendln("----------------BUBBLE COUNTING: REF GRAPH--------------");
        //hla.countStems();

        System.err.println("----------------     READ LOADING     --------------");

        HLA.log.appendln("----------------     READ LOADING     --------------");

        hla.loadReads(bamfiles);

        System.err.println("----------------    GRAPH CLEANING    --------------");
        HLA.log.appendln("----------------    GRAPH CLEANING    --------------");

        hla.flattenInsertionNodes(list);
        hla.removeUnused(list);
        hla.removeStems(list);

        /*updating error prob*/
        hla.updateErrorProb();

        hla.log.flush();

        StringBuffer resultBuffer = new StringBuffer();

        HLA.DEBUG3 = HLA.DEBUG;

        hla.countBubblesAndMerge(list, resultBuffer);

        hla.writeResults(resultBuffer, resultWriter);
    } catch (Exception e) {
        e.printStackTrace();
        HLA.log.outToFile();
        System.exit(-1);
    }
    /*printingWeights*/
    //hla.printWeights();
    HLA.log.outToFile();
    HLA.log.appendln("NEW_NODE_ADDED:\t" + HLA.NEW_NODE_ADDED);
    HLA.log.appendln("HOPPPING:\t" + HLA.HOPPING);
    HLA.log.appendln("INSERTION_NODE_ADDED:\t" + HLA.INSERTION_NODE_ADDED);
    HLA.log.appendln("INSERTION_WITH_NO_NEW_NODE:\t" + HLA.INSERTION_WITH_NO_NEW_NODE);
    HLA.log.appendln("INSERTION_COUNTS:\t" + HLA.INSERTION);
}

From source file:io.personium.recovery.Recovery.java

/**
 * main.//from  ww  w  .  j a  va  2s .c o  m
 * @param args 
 */
public static void main(String[] args) {
    loadProperties();
    Option optIndex = new Option("i", "index", true, "?");
    Option optProp = new Option("p", "prop", true, "");
    // t??????????????????????
    Option optType = new Option("t", "type", true, "??type");
    Option optClear = new Option("c", "clear", false,
            "???elasticsearch?");
    Option optReplicas = new Option("r", "replicas", true, "??");
    Option optVersion = new Option("v", "version", false, "??");
    // 
    // optIndex.setRequired(true);
    //        optProp.setRequired(true);
    Options options = new Options();
    options.addOption(optIndex);
    options.addOption(optProp);
    options.addOption(optType);
    options.addOption(optClear);
    options.addOption(optReplicas);
    options.addOption(optVersion);
    CommandLineParser parser = new GnuParser();
    CommandLine commandLine = null;
    try {
        commandLine = parser.parse(options, args, true);
    } catch (ParseException e) {
        (new HelpFormatter()).printHelp("io.personium.recovery.Recovery", options);
        log.warn("Recovery failure");
        System.exit(1);
    }

    if (commandLine.hasOption("v")) {
        log.info("Version:" + versionNumber);
        System.exit(0);
    }
    if (!commandLine.hasOption("p")) {
        (new HelpFormatter()).printHelp("io.personium.recovery.Recovery", options);
        log.warn("Recovery failure");
        System.exit(1);
    }
    if (commandLine.hasOption("t")) {
        log.info("Command line option \"t\" or \"type\" is deprecated. Option ignored.");
    }
    if (!commandLine.hasOption("r")) {
        (new HelpFormatter()).printHelp("io.personium.recovery.Recovery", options);
        log.warn("Command line option \"r\" is required.");
        System.exit(1);
    }

    RecoveryManager recoveryManager = new RecoveryManager();
    // ??index
    recoveryManager.setIndexNames(commandLine.getOptionValue("i"));
    // elasticsearch
    recoveryManager.setClear(commandLine.hasOption("c"));

    // ??
    // 0 ?ES??????????int???????
    try {
        int replicas = Integer.parseInt(commandLine.getOptionValue("r"));
        if (replicas < 0) {
            log.warn("Command line option \"r\"'s value is not integer.");
            System.exit(1);
        }
        recoveryManager.setReplicas(replicas);
    } catch (NumberFormatException e) {
        log.warn("Command line option \"r\"'s value is not integer.");
        System.exit(1);
    }

    try {
        // Properties?
        Properties properties = new Properties();
        // ?
        properties.load(new FileInputStream(commandLine.getOptionValue("p")));
        if ((!properties.containsKey(ES_HOSTS)) || (!properties.containsKey(ES_CLUSTER_NAME))
                || (!properties.containsKey(ADS_JDBC_URL)) || (!properties.containsKey(ADS_JDBC_USER))
                || (!properties.containsKey(ADS_JDBC_PASSWORD)) || (!properties.containsKey(ES_ROUTING_FLAG))) {
            log.warn("properties file error");
            log.warn("Recovery failure");
            System.exit(1);
        } else {
            recoveryManager.setEsHosts(properties.getProperty(ES_HOSTS));
            recoveryManager.setEsClusetrName(properties.getProperty(ES_CLUSTER_NAME));
            recoveryManager.setAdsJdbcUrl(properties.getProperty(ADS_JDBC_URL));
            recoveryManager.setAdsUser(properties.getProperty(ADS_JDBC_USER));
            recoveryManager.setAdsPassword(properties.getProperty(ADS_JDBC_PASSWORD));
            recoveryManager.setExecuteCnt(properties.getProperty(EXECUTE_COUNT));
            recoveryManager.setCheckCount(properties.getProperty(CHECK_COUNT));
            recoveryManager.setUnitPrefix(properties.getProperty(UNIT_PREFIX));
        }
    } catch (FileNotFoundException e) {
        e.printStackTrace();
        log.warn("properties file error");
        log.warn("Recovery failure");
        System.exit(1);
    } catch (IOException e) {
        e.printStackTrace();
        log.warn("properties file error");
        log.warn("Recovery failure");
        System.exit(1);
    }

    String[] indexList = recoveryManager.getIndexNames();
    boolean isClear = recoveryManager.isClear();
    if (isClear && (indexList != null && null != indexList[0])) {
        String ad = recoveryManager.getUnitPrefix() + "_" + EsIndex.CATEGORY_AD;
        if (Arrays.asList(indexList).contains(ad)) {
            log.warn("Cannot specify both -c and -i " + recoveryManager.getUnitPrefix() + "_ad option.");
            log.warn("Recovery failure");
            System.exit(1);
        }
    }

    // ??????
    try {
        LockUtility.lock();
    } catch (AlreadyStartedException e) {
        log.info("Recovery has already started");
        log.info("Recovery failure");
        return;
    } catch (Exception e) {
        log.error("Failed to get lock for the double start control");
        e.printStackTrace();
        LockUtility.release();
        log.error("Recovery failure");
        System.exit(1);
    }

    // ??
    try {
        recoveryManager.recovery();
    } catch (Exception e) {
        LockUtility.release();
        log.error("Recovery failure");
        System.exit(1);
    }
    LockUtility.release();
    log.info("Recovery Success");
    return;
}

From source file:com.netscape.cms.servlet.test.DRMTest.java

public static void main(String args[]) throws InvalidKeyException, NoSuchAlgorithmException,
        InvalidKeySpecException, SignatureException, IOException {
    String host = null;/*  w  ww. ja v a2s.  co m*/
    String port = null;
    String token_pwd = null;
    String db_dir = "./";
    String protocol = "http";
    String clientCertNickname = "KRA Administrator of Instance pki-kra's SjcRedhat Domain ID";

    // parse command line arguments
    Options options = new Options();
    options.addOption("h", true, "Hostname of the DRM");
    options.addOption("p", true, "Port of the DRM");
    options.addOption("w", true, "Token password");
    options.addOption("d", true, "Directory for tokendb");
    options.addOption("s", true, "Attempt Optional Secure SSL connection");
    options.addOption("c", true, "Optional SSL Client cert Nickname");

    try {
        CommandLineParser parser = new PosixParser();
        CommandLine cmd = parser.parse(options, args);

        if (cmd.hasOption("h")) {
            host = cmd.getOptionValue("h");
        } else {
            System.err.println("Error: no hostname provided.");
            usage(options);
        }

        if (cmd.hasOption("p")) {
            port = cmd.getOptionValue("p");
        } else {
            System.err.println("Error: no port provided");
            usage(options);
        }

        if (cmd.hasOption("w")) {
            token_pwd = cmd.getOptionValue("w");
        } else {
            System.err.println("Error: no token password provided");
            usage(options);
        }

        if (cmd.hasOption("d")) {
            db_dir = cmd.getOptionValue("d");
        }

        if (cmd.hasOption("s")) {
            if (cmd.getOptionValue("s") != null && cmd.getOptionValue("s").equals("true")) {
                protocol = "https";
            }
        }

        if (cmd.hasOption("c")) {
            String nick = cmd.getOptionValue("c");

            if (nick != null && protocol.equals("https")) {
                clientCertNickname = nick;
            }
        }

    } catch (ParseException e) {
        System.err.println("Error in parsing command line options: " + e.getMessage());
        usage(options);
    }

    // used for crypto operations
    byte iv[] = { 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1 };

    try {
        iv = genIV(8);
    } catch (Exception e) {
        log("Can't generate initialization vector use default: " + e.toString());
    }

    // used for wrapping to send data to DRM
    String transportCert = null;

    // Data to be archived
    SymmetricKey vek = null;
    String passphrase = null;

    // Session keys and passphrases for recovery
    SymmetricKey sessionKey = null;
    byte[] wrappedRecoveryKey = null;
    String recoveryPassphrase = null;
    byte[] wrappedRecoveryPassphrase = null;

    // retrieved data (should match archived data)
    byte[] encryptedData = null;
    String recoveredKey = null;

    // various ids used in recovery/archival operations
    KeyId keyId = null;
    String clientKeyId = null;
    RequestId recoveryRequestId = null;

    // Variables for data structures from calls
    KeyRequestResponse requestResponse = null;
    Key keyData = null;
    KeyInfo keyInfo = null;

    // Initialize token
    try {
        CryptoManager.initialize(db_dir);
    } catch (AlreadyInitializedException e) {
        // it is ok if it is already initialized
    } catch (Exception e) {
        log("INITIALIZATION ERROR: " + e.toString());
        System.exit(1);
    }

    // Set base URI and get client

    KRAClient client;
    SystemCertClient systemCertClient;
    KeyClient keyClient;
    NSSCryptoProvider nssCrypto;
    try {
        ClientConfig config = new ClientConfig();
        config.setServerURI(protocol + "://" + host + ":" + port + "/kra");
        config.setCertNickname(clientCertNickname);
        config.setCertDatabase(db_dir);
        config.setCertPassword(token_pwd);
        nssCrypto = new NSSCryptoProvider(config);

        client = new KRAClient(new PKIClient(config, nssCrypto));
        systemCertClient = (SystemCertClient) client.getClient("systemcert");
        keyClient = (KeyClient) client.getClient("key");

    } catch (Exception e) {
        e.printStackTrace();
        return;
    }

    // Test 1: Get transport certificate from DRM
    transportCert = systemCertClient.getTransportCert().getEncoded();
    transportCert = transportCert.substring(CertData.HEADER.length(), transportCert.indexOf(CertData.FOOTER));
    keyClient.setTransportCert(transportCert);

    log("Transport Cert retrieved from DRM: " + transportCert);

    // Test 2: Get list of completed key archival requests
    log("\n\nList of completed archival requests");
    KeyRequestInfoCollection list = keyClient.listRequests("complete", "securityDataEnrollment");
    if (list.getTotal() == 0) {
        log("No requests found");
    } else {
        Iterator<KeyRequestInfo> iter = list.getEntries().iterator();
        while (iter.hasNext()) {
            KeyRequestInfo info = iter.next();
            printRequestInfo(info);
        }
    }

    // Test 3: Get list of key recovery requests
    log("\n\nList of completed recovery requests");
    KeyRequestInfoCollection list2 = keyClient.listRequests("complete", "securityDataRecovery");
    if (list2.getTotal() == 0) {
        log("No requests found");
    } else {
        Iterator<KeyRequestInfo> iter2 = list2.getEntries().iterator();
        while (iter2.hasNext()) {
            KeyRequestInfo info = iter2.next();
            printRequestInfo(info);
        }
    }

    // Test 4: Generate and archive a symmetric key
    log("Archiving symmetric key");
    clientKeyId = "UUID: 123-45-6789 VEK " + Calendar.getInstance().getTime().toString();
    try {
        vek = nssCrypto.generateSessionKey();
        byte[] encoded = nssCrypto.createPKIArchiveOptions(transportCert, vek, null,
                KeyRequestResource.DES3_ALGORITHM, 0, iv);

        KeyRequestResponse info = keyClient.archivePKIOptions(clientKeyId,
                KeyRequestResource.SYMMETRIC_KEY_TYPE, KeyRequestResource.DES3_ALGORITHM, 0, encoded);
        log("Archival Results:");
        printRequestInfo(info.getRequestInfo());
        keyId = info.getKeyId();
    } catch (Exception e) {
        log("Exception in archiving symmetric key:" + e.getMessage());
        e.printStackTrace();
    }

    //Test 5: Get keyId for active key with client ID

    log("Getting key ID for symmetric key");
    keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
    printKeyInfo(keyInfo);
    KeyId keyId2 = keyInfo.getKeyId();
    if (keyId2 == null) {
        log("No archived key found");
    } else {
        log("Archived Key found: " + keyId);
    }

    if (!keyId.equals(keyId2)) {
        log("Error: key ids from search and archival do not match");
    } else {
        log("Success: keyids from search and archival match.");
    }

    // Test 6: Submit a recovery request for the symmetric key using a session key
    log("Submitting a recovery request for the  symmetric key using session key");
    try {
        sessionKey = nssCrypto.generateSessionKey();
        wrappedRecoveryKey = CryptoUtil.wrapSymmetricKey(nssCrypto.getManager(), nssCrypto.getToken(),
                transportCert, sessionKey);
        keyData = keyClient.retrieveKey(keyId, wrappedRecoveryKey);
    } catch (Exception e) {
        log("Exception in recovering symmetric key using session key: " + e.getMessage());
    }

    encryptedData = keyData.getEncryptedData();

    try {
        recoveredKey = Utils.base64encode(nssCrypto.unwrapWithSessionKey(encryptedData, sessionKey,
                KeyRequestResource.DES3_ALGORITHM, keyData.getNonceData()));
    } catch (Exception e) {
        log("Exception in unwrapping key: " + e.toString());
        e.printStackTrace();
    }

    if (!recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) {
        log("Error: recovered and archived keys do not match!");
    } else {
        log("Success: recoverd and archived keys match!");
    }

    // Test 7: Submit a recovery request for the symmetric key using a passphrase
    log("Submitting a recovery request for the  symmetric key using a passphrase");
    recoveryPassphrase = "Gimme me keys please";

    try {
        sessionKey = nssCrypto.generateSessionKey();
        wrappedRecoveryPassphrase = nssCrypto.wrapWithSessionKey(recoveryPassphrase, iv, sessionKey,
                KeyRequestResource.DES3_ALGORITHM);
        wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert);

        keyData = keyClient.retrieveKeyUsingWrappedPassphrase(keyId, wrappedRecoveryKey,
                wrappedRecoveryPassphrase, iv);
    } catch (Exception e) {
        log("Exception in recovering symmetric key using passphrase" + e.toString());
        e.printStackTrace();
    }

    encryptedData = keyData.getEncryptedData();

    try {
        recoveredKey = Utils.base64encode(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase));
    } catch (Exception e) {
        log("Error: unable to unwrap key using passphrase");
        e.printStackTrace();
    }

    if (recoveredKey == null || !recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) {
        log("Error: recovered and archived keys do not match!");
    } else {
        log("Success: recovered and archived keys do match!");
    }

    passphrase = "secret12345";
    // Test 8: Generate and archive a passphrase
    clientKeyId = "UUID: 123-45-6789 RKEK " + Calendar.getInstance().getTime().toString();
    try {
        requestResponse = keyClient.archivePassphrase(clientKeyId, passphrase);

        log("Archival Results:");
        printRequestInfo(requestResponse.getRequestInfo());
        keyId = requestResponse.getKeyId();
    } catch (Exception e) {
        log("Exception in archiving symmetric key:" + e.toString());
        e.printStackTrace();
    }

    //Test 9: Get keyId for active passphrase with client ID
    log("Getting key ID for passphrase");
    keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
    printKeyInfo(keyInfo);
    keyId2 = keyInfo.getKeyId();
    if (keyId2 == null) {
        log("No archived key found");
    } else {
        log("Archived Key found: " + keyId);
    }

    if (!keyId.equals(keyId2)) {
        log("Error: key ids from search and archival do not match");
    } else {
        log("Success: key ids from search and archival do match!");
    }

    // Test 10: Submit a recovery request for the passphrase using a session key
    log("Submitting a recovery request for the passphrase using session key");
    sessionKey = null;
    wrappedRecoveryKey = null;
    try {
        keyData = keyClient.retrieveKeyByPassphrase(keyId, recoveryPassphrase);
    } catch (Exception e) {
        log("Exception in recovering passphrase using session key: " + e.getMessage());
    }
    encryptedData = keyData.getEncryptedData();
    try {
        recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8");
    } catch (Exception e) {
        log("Exception in unwrapping key: " + e.toString());
        e.printStackTrace();
    }

    if (recoveredKey == null || !recoveredKey.equals(passphrase)) {
        log("Error: recovered and archived passphrases do not match!");
    } else {
        log("Success: recovered and archived passphrases do match!");
    }

    // Test 11: Submit a recovery request for the passphrase using a passphrase
    try {
        sessionKey = nssCrypto.generateSessionKey();
        wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert);
        wrappedRecoveryPassphrase = nssCrypto.wrapWithSessionKey(recoveryPassphrase, iv, sessionKey,
                KeyRequestResource.DES3_ALGORITHM);
        keyData = keyClient.retrieveKeyUsingWrappedPassphrase(keyId, wrappedRecoveryKey,
                wrappedRecoveryPassphrase, iv);
    } catch (Exception e1) {
        e1.printStackTrace();
        System.out.println("Test 17: " + e1.getMessage());
        System.exit(-1);
    }
    encryptedData = keyData.getEncryptedData();
    try {
        recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8");
    } catch (Exception e) {
        log("Error: cannot unwrap key using passphrase");
        e.printStackTrace();
    }

    if (recoveredKey == null || !recoveredKey.equals(passphrase)) {
        log("Error: recovered and archived passphrases do not match!");
    } else {
        log("Success: recovered and archived passphrases do match!");
    }

    // Test 12: Get key
    log("Getting passphrase: " + keyId);
    try {
        keyData = keyClient.retrieveKeyByPassphrase(keyId, recoveryPassphrase);
    } catch (Exception e1) {
        e1.printStackTrace();
    }
    encryptedData = keyData.getEncryptedData();
    try {
        recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8");
    } catch (Exception e) {
        log("Error: Can't unwrap recovered key using passphrase");
        e.printStackTrace();
    }

    if (recoveredKey == null || !recoveredKey.equals(passphrase)) {
        log("Error: recovered and archived passphrases do not match!");
    } else {
        log("Success: recovered and archived passphrases do match!");
    }

    // Test 13: Get non-existent request
    RequestId requestId = new RequestId("0xabcdef");
    log("Getting non-existent request: " + requestId.toHexString());
    try {
        keyClient.getRequestInfo(requestId);
        log("Error: getting non-existent request does not throw an exception");
    } catch (RequestNotFoundException e) {
        log("Success: getting non-existent request throws an exception: " + e.getMessage() + " ("
                + e.getRequestId().toHexString() + ")");
    }

    // Test 14: Request x509 key recovery
    // This test requires to retrieve keyId and matching certificate
    // from installed instances of CA and DRM
    String keyID = "1";
    String b64Certificate = "MIIC+TCCAeGgAwIBAgIBDDANBgkqhkiG9w0BAQsFADBOMSswKQYDVQQKDCJ1c2Vy"
            + "c3lzLnJlZGhhdC5jb20gU2VjdXJpdHkgRG9tYWluMR8wHQYDVQQDDBZDQSBTaWdu"
            + "aW5nIENlcnRpZmljYXRlMB4XDTEzMTAyNTE5MzQwM1oXDTE0MDQyMzE5MzQwM1ow"
            + "EzERMA8GCgmSJomT8ixkAQEMAXgwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB"
            + "ALhLfGmKvxFsKXPh49q1QsluXU3WlyS1XnpDLgOAhgTNgO4sG6CpPdv6hZYIvQBb"
            + "ZQ5bhuML+NXK+Q+EIiNk1cUTxgL3a30sPzy6QaFWxwM8i4uXm4nCBYv7T+n4V6/O"
            + "xHIM2Ch/dviAb3vz+M9trErv9t+d2H8jNXT3sHuDb/kvAgMBAAGjgaAwgZ0wHwYD"
            + "VR0jBBgwFoAUh1cxWFRY+nMsx4odQQI1GqyFxP8wSwYIKwYBBQUHAQEEPzA9MDsG"
            + "CCsGAQUFBzABhi9odHRwOi8vZG9ndGFnMjAudXNlcnN5cy5yZWRoYXQuY29tOjgw"
            + "ODAvY2Evb2NzcDAOBgNVHQ8BAf8EBAMCBSAwHQYDVR0lBBYwFAYIKwYBBQUHAwIG"
            + "CCsGAQUFBwMEMA0GCSqGSIb3DQEBCwUAA4IBAQCvmbUzQOouE2LgQQcKfmgwwJMJ"
            + "9tMrPwDUtyFdaIFoPL4uZaujSscaN4IWK2r5vIMJ65jwYCI7sI9En2ZfO28J9dQj"
            + "lpqu6TaJ+xtaMk7OvXpVB7lJk73HAttMGjETlkoq/6EjxcugmJsDqVD0b2tO7Vd0"
            + "hroBe2uPDHM2ASewZF415lUcRh0URtmxSazTInbyxpmy1wgSJQ0C6fMCeT+hUFlA"
            + "0P4k1TIprapGVq7FpKcqlhK2gTBfTSnoO7gmXG/9MxJiYpb/Aph8ptXq6quHz1Mj"
            + "greWr3xTsy6gF2yphUEkGHh4v22XvK+FLx9Jb6zloMWA2GG9gpUpvMnl1fH4";

    log("Requesting X509 key recovery.");
    recoveryRequestId = keyClient.recoverKey(new KeyId(keyID), null, null, null, b64Certificate)
            .getRequestInfo().getRequestId();
    log("Requesting X509 key recovery request: " + recoveryRequestId);

    // Test 55: Approve x509 key recovery
    log("Approving X509 key recovery request: " + recoveryRequestId);
    keyClient.approveRequest(recoveryRequestId);

    // Test 16: Recover x509 key
    log("Recovering X509 key based on request: " + recoveryRequestId);
    try {
        // KeyData recoveredX509Key = client.recoverKey(recoveryRequestId, "netscape");
        // log("Success: X509Key recovered: "+ recoveredX509Key.getP12Data());
    } catch (RequestNotFoundException e) {
        log("Error: recovering X509Key");
    }

    // Test 1: Get transport certificate from DRM
    transportCert = systemCertClient.getTransportCert().getEncoded();
    transportCert = transportCert.substring(CertData.HEADER.length(), transportCert.indexOf(CertData.FOOTER));

    log("Transport Cert retrieved from DRM: " + transportCert);

    // Test 17: Get list of completed key archival requests
    log("\n\nList of completed archival requests");
    list = keyClient.listRequests("complete", IRequest.SYMKEY_GENERATION_REQUEST);
    if (list.getTotal() == 0) {
        log("No requests found");
    } else {
        Iterator<KeyRequestInfo> iter = list.getEntries().iterator();
        while (iter.hasNext()) {
            KeyRequestInfo info = iter.next();
            printRequestInfo(info);
        }
    }

    // test 18: Generate symmetric key
    clientKeyId = "Symmetric Key #1234f " + Calendar.getInstance().getTime().toString();
    List<String> usages = new ArrayList<String>();
    usages.add(SymKeyGenerationRequest.DECRYPT_USAGE);
    usages.add(SymKeyGenerationRequest.ENCRYPT_USAGE);
    KeyRequestResponse genKeyResponse = keyClient.generateSymmetricKey(clientKeyId,
            KeyRequestResource.AES_ALGORITHM, 128, usages, null);
    printRequestInfo(genKeyResponse.getRequestInfo());
    keyId = genKeyResponse.getKeyId();

    // test 19: Get keyId for active key with client ID
    log("Getting key ID for symmetric key");
    keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
    printKeyInfo(keyInfo);
    keyId2 = keyInfo.getKeyId();
    if (keyId2 == null) {
        log("No archived key found");
    } else {
        log("Archived Key found: " + keyId);
    }

    if (!keyId.equals(keyId2)) {
        log("Error: key ids from search and archival do not match");
    } else {
        log("Success: keyids from search and archival match.");
    }

    // Test 20: Submit a recovery request for the symmetric key using a session key
    log("Submitting a recovery request for the  symmetric key using session key");
    try {
        sessionKey = nssCrypto.generateSessionKey();
        wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert);
        keyData = keyClient.retrieveKey(keyId, wrappedRecoveryKey);
    } catch (Exception e) {
        log("Exception in recovering symmetric key using session key: " + e.getMessage());
    }

    encryptedData = keyData.getEncryptedData();

    try {
        recoveredKey = new String(nssCrypto.unwrapWithSessionKey(encryptedData, sessionKey,
                KeyRequestResource.DES3_ALGORITHM, keyData.getNonceData()));
    } catch (Exception e) {
        log("Exception in unwrapping key: " + e.toString());
        e.printStackTrace();
    }

    // test 21: Generate symmetric key - invalid algorithm
    try {
        genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1235", "AFS", 128, usages, null);
    } catch (Exception e) {
        log("Exception: " + e);
    }

    // test 22: Generate symmetric key - invalid key size
    try {
        genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1236", "AES", 0, usages, null);
    } catch (Exception e) {
        log("Exception: " + e);
    }

    // test 23: Generate symmetric key - usages not defined
    try {
        genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1236", "DES", 56, null, null);
    } catch (Exception e) {
        log("Exception: " + e);
    }

    // Test 24: Generate and archive a symmetric key of type AES
    log("Archiving symmetric key");
    clientKeyId = "UUID: 123-45-6789 VEK " + Calendar.getInstance().getTime().toString();
    try {
        vek = nssCrypto.generateSymmetricKey(KeyRequestResource.AES_ALGORITHM, 128);

        byte[] encoded = CryptoUtil.createPKIArchiveOptions(nssCrypto.getManager(), nssCrypto.getToken(),
                transportCert, vek, null, KeyGenAlgorithm.DES3, 0, new IVParameterSpec(iv));

        KeyRequestResponse response = keyClient.archivePKIOptions(clientKeyId,
                KeyRequestResource.SYMMETRIC_KEY_TYPE, KeyRequestResource.AES_ALGORITHM, 128, encoded);
        log("Archival Results:");
        printRequestInfo(response.getRequestInfo());
        keyId = response.getKeyId();
    } catch (Exception e) {
        log("Exception in archiving symmetric key:" + e.getMessage());
        e.printStackTrace();
    }

    //Test 25: Get keyId for active key with client ID
    log("Getting key ID for symmetric key");
    keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
    printKeyInfo(keyInfo);
    keyId2 = keyInfo.getKeyId();
    if (keyId2 == null) {
        log("No archived key found");
    } else {
        log("Archived Key found: " + keyId);
    }

    if (!keyId.equals(keyId2)) {
        log("Error: key ids from search and archival do not match");
    } else {
        log("Success: keyids from search and archival match.");
    }

    // Test 26: Submit a recovery request for the symmetric key
    log("Submitting a recovery request for the  symmetric key without using session key");
    try {
        keyData = keyClient.retrieveKey(keyId);
    } catch (Exception e) {
        log("Exception in recovering symmetric key using session key: " + e.getMessage());
    }

    // Since no session key is provided externally, the retrieveKey method
    // generates a session key, wraps it with transport cert and completes the request.
    // The encrypted data is then unwrapped using the temporary session key and set to
    // the attribute privateData.
    recoveredKey = Utils.base64encode(keyData.getData());

    if (!recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) {
        log("Error: recovered and archived keys do not match!");
    } else {
        log("Success: recoverd and archived keys match!");
    }

    // Test 27: Get key info
    log("getting key info for existing key");
    printKeyInfo(keyClient.getKeyInfo(keyId));

    //Test 28: Modify status
    log("modify the key status");
    keyClient.modifyKeyStatus(keyId, KeyResource.KEY_STATUS_INACTIVE);
    keyInfo = keyClient.getKeyInfo(keyId);
    printKeyInfo(keyInfo);

    //Test 29:  Confirm no more active keys with this ID
    log("look for active keys with this id");
    clientKeyId = keyInfo.getClientKeyID();
    try {
        keyInfo = keyClient.getActiveKeyInfo(clientKeyId);
        printKeyInfo(keyInfo);
    } catch (ResourceNotFoundException e) {
        log("Success: ResourceNotFound exception thrown: " + e);
    }

    // Test asymmetric key generation.

    String[] algs = { "RSA", "DSA" };
    for (int i = 0; i < algs.length; i++) {
        // Test 30: Generate Asymmetric keys - RSA key
        System.out.println("\nTesting asymmetric key generation for algorithm " + algs[i]);
        clientKeyId = "AsymKey #" + Calendar.getInstance().getTimeInMillis();
        usages.clear();
        usages.add(AsymKeyGenerationRequest.SIGN);
        usages.add(AsymKeyGenerationRequest.VERIFY);
        KeyRequestResponse response = keyClient.generateAsymmetricKey(clientKeyId, algs[i], 1024, usages, null);
        printRequestInfo(response.getRequestInfo());
        System.out.println();

        // Test 31: Get information of the newly generated asymmetric keys
        System.out.println("Fetch information of the newly generated asymmetric keys.");
        System.out.println();
        KeyInfo info = keyClient.getKeyInfo(response.getKeyId());
        printKeyInfo(info);
        System.out.println();

        // Test 32: Retrieve private key data
        System.out.println("Retrieving and verifying the generated private key.");
        try {
            keyData = keyClient.retrieveKey(response.getKeyId());
        } catch (Exception e) {
            log("Exception retrieving the private key data.");
            e.printStackTrace();
        }

        // Test 33: Verify the generated key pair.
        if (isKeyPairValid(algs[i], keyData.getData(), info.getPublicKey())) {
            log("The key pair generated using " + algs[i] + " algorithm is valid.");
        } else {
            log("The key pair generated using " + algs[i] + " algorithm is invalid.");
        }
        System.out.println();
    }

    // Test 34:
}

From source file:com.versusoft.packages.ooo.odt2daisy.gui.CommandLineGUI.java

public static void main(String args[]) throws IOException {

    Handler fh = new FileHandler(LOG_FILENAME_PATTERN);
    fh.setFormatter(new SimpleFormatter());

    //removeAllLoggersHandlers(Logger.getLogger(""));

    Logger.getLogger("").addHandler(fh);
    Logger.getLogger("").setLevel(Level.FINEST);

    Options options = new Options();

    Option option1 = new Option("in", "name of ODT file (required)");
    option1.setRequired(true);// w w w. jav a2 s .co  m
    option1.setArgs(1);

    Option option2 = new Option("out", "name of DAISY DTB file (required)");
    option2.setRequired(true);
    option2.setArgs(1);

    Option option3 = new Option("h", "show this help");
    option3.setArgs(Option.UNLIMITED_VALUES);

    Option option4 = new Option("alt", "use alternate Level Markup");

    Option option5 = new Option("u", "UID of DAISY DTB (optional)");
    option5.setArgs(1);

    Option option6 = new Option("t", "Title of DAISY DTB");
    option6.setArgs(1);

    Option option7 = new Option("c", "Creator of DAISY DTB");
    option7.setArgs(1);

    Option option8 = new Option("p", "Publisher of DAISY DTB");
    option8.setArgs(1);

    Option option9 = new Option("pr", "Producer of DAISY DTB");
    option9.setArgs(1);

    Option option10 = new Option("pic", "set Picture directory");
    option10.setArgs(1);

    Option option11 = new Option("page", "enable pagination");
    option11.setArgs(0);

    Option option12 = new Option("css", "write CSS file");
    option12.setArgs(0);

    options.addOption(option1);
    options.addOption(option2);
    options.addOption(option3);
    options.addOption(option4);
    options.addOption(option5);
    options.addOption(option6);
    options.addOption(option7);
    options.addOption(option8);
    options.addOption(option9);
    options.addOption(option10);
    options.addOption(option11);
    options.addOption(option12);

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        //System.out.println("***ERROR: " + e.getClass() + ": " + e.getMessage());

        printHelp();
        return;
    }

    if (cmd.hasOption("help")) {
        printHelp();
        return;
    }

    try {

        Odt2Daisy odt2daisy = new Odt2Daisy(cmd.getOptionValue("in")); //@todo add initial output directory URL?
        odt2daisy.init();

        if (odt2daisy.isEmptyDocument()) {
            logger.info("Cannot convert empty documents. Export Aborted...");
            System.exit(1);
        }

        //System.out.println("Metadatas");
        //System.out.println("- title: " + odt2daisy.getTitleMeta());
        //System.out.println("- creator: " + odt2daisy.getCreatorMeta());

        if (!odt2daisy.isUsingHeadings()) {
            logger.info("You SHOULD use Heading styles in your document. Export in a single level.");
        }
        //@todo Warning for incompatible image formats should go here. See UnoGui.java.

        if (cmd.hasOption("u")) {
            //System.out.println("arg uid:"+cmd.getOptionValue("u"));
            odt2daisy.setUidParam(cmd.getOptionValue("u"));
        }

        if (cmd.hasOption("t")) {
            //System.out.println("arg title:"+cmd.getOptionValue("t"));
            odt2daisy.setTitleParam(cmd.getOptionValue("t"));
        }

        if (cmd.hasOption("c")) {
            //System.out.println("arg creator:"+cmd.getOptionValue("c"));
            odt2daisy.setCreatorParam(cmd.getOptionValue("c"));
        }

        if (cmd.hasOption("p")) {
            //System.out.println("arg publisher:"+cmd.getOptionValue("p"));
            odt2daisy.setPublisherParam(cmd.getOptionValue("p"));
        }

        if (cmd.hasOption("pr")) {
            //System.out.println("arg producer:"+cmd.getOptionValue("pr"));
            odt2daisy.setProducerParam(cmd.getOptionValue("pr"));
        }

        if (cmd.hasOption("alt")) {
            //System.out.println("arg alt:"+cmd.getOptionValue("alt"));
            odt2daisy.setUseAlternateLevelParam(true);
        }

        if (cmd.hasOption("css")) {
            odt2daisy.setWriteCSSParam(true);
        }

        if (cmd.hasOption("page")) {
            odt2daisy.paginationProcessing();
        }

        odt2daisy.correctionProcessing();

        if (cmd.hasOption("pic")) {

            odt2daisy.convertAsDTBook(cmd.getOptionValue("out"), cmd.getOptionValue("pic"));

        } else {

            logger.info("Language detected: " + odt2daisy.getLangParam());
            odt2daisy.convertAsDTBook(cmd.getOptionValue("out"), Configuration.DEFAULT_IMAGE_DIR);
        }

        boolean valid = odt2daisy.validateDTD(cmd.getOptionValue("out"));

        if (valid) {

            logger.info("DAISY DTBook produced is valid against DTD - Congratulations !");

        } else {

            logger.info("DAISY Book produced isn't valid against DTD - You SHOULD NOT use this DAISY Book !");
            logger.info("Error at line: " + odt2daisy.getErrorHandler().getLineNumber());
            logger.info("Error Message: " + odt2daisy.getErrorHandler().getMessage());
        }

    } catch (Exception e) {

        e.printStackTrace();

    } finally {

        if (fh != null) {
            fh.flush();
            fh.close();
        }
    }

}

From source file:com.emc.ecs.smart.SmartUploader.java

/**
 * Use commons-cli to parse command line arguments and start the upload.
 *///from  w  w w. j  a v  a2 s  . c om
public static void main(String[] args) {
    Options opts = new Options();

    // Optional params
    opts.addOption(Option.builder().longOpt(THREADS_OPT).hasArg().argName("thread-count")
            .desc("Sets the number of threads to upload concurrently.  Default is " + DEFAULT_THREADS).build());
    opts.addOption(Option.builder().longOpt(SIMPLE_OPT)
            .desc("For comparison purposes, do a simple single-stream upload instead of a concurrent upload.")
            .build());
    opts.addOption(Option.builder().longOpt(SEGMENT_SIZE_OPT).hasArg().argName("bytes")
            .desc("Size of each upload segment in bytes.  Defaults to 2MB for uploads less than 128MB and "
                    + "128MB for objects greater than or equal to 128MB.")
            .build());
    opts.addOption(Option.builder().longOpt(FILE_OPT).hasArg().argName("path-to-file").required()
            .desc("(Required) The path to the file to upload.").build());
    opts.addOption(Option.builder().longOpt(URL_OPT).hasArg().argName("presigned-upload-url").required()
            .desc("URL used to PUT the file to create an object").build());
    opts.addOption(Option.builder().longOpt(CONTENT_TYPE_OPT).hasArg().argName("mimetype")
            .desc("Value to use for the Content-Type header.  Defaults to application/octet-stream.").build());
    opts.addOption(Option.builder().longOpt(VERIFY_URL_OPT).hasArg().argName("url")
            .desc("If specified, read back the object from this URL and compare with the local file.").build());
    opts.addOption(Option.builder().longOpt(LOG_LEVEL_OPT).hasArg().argName("level").type(Level.class)
            .desc("Sets the log level: DEBUG, INFO, WARN, ERROR, or FATAL.  Default is ERROR.").build());
    opts.addOption(Option.builder().longOpt(LOG_FILE_OPT).hasArg().argName("filename")
            .desc("Filename to write log messages.  Setting to STDOUT or STDERR will write log messages to the "
                    + "appropriate process stream.  Default is STDERR.")
            .build());
    opts.addOption(Option.builder().longOpt(LOG_PATTERN_OPT).hasArg().argName("log4j-pattern")
            .desc("Sets the " + "Log4J pattern to use when writing log messages.  Defaults to "
                    + "'%d{yyyy-MM-dd HH:mm:ss} %-5p [%t] %c{1}:%L - %m%n'")
            .build());
    opts.addOption(Option.builder().longOpt(STANDARD_MD5)
            .desc("Use standard Java IO for local checksum " + "instead of NIO").build());
    opts.addOption(Option.builder().longOpt(RETRY_DELAY_OPT).hasArg().argName("milliseconds")
            .desc("Initial retry delay in ms.  Subsequent retries will continue to double this exponentially.  "
                    + "The default value is " + RETRY_DELAY)
            .build());
    opts.addOption(Option.builder().longOpt(RETRY_COUNT_OPT).hasArg().argName("count")
            .desc("Number of times to "
                    + "retry failed transactions.  Set to zero to disable retries.  Defaults to " + RETRY_COUNT)
            .build());
    opts.addOption(Option.builder().longOpt(SAVE_MD5).hasArg().argName("path-to-file")
            .desc("Sets the file location where the local MD5 hash will be written.").build());

    DefaultParser dp = new DefaultParser();

    CommandLine cmd = null;
    try {
        cmd = dp.parse(opts, args);
    } catch (ParseException e) {
        System.err.println("Error: " + e.getMessage());
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("java -jar SmartUploader.jar", opts, true);
        System.exit(255);
    }

    // Required options.
    String url = cmd.getOptionValue(URL_OPT);
    String file = cmd.getOptionValue(FILE_OPT);

    SmartUploader uploader = null;
    try {
        uploader = new SmartUploader(new URL(url), new File(file).toPath());
    } catch (MalformedURLException e) {
        System.err.println("Could not parse upload URL: " + e.getMessage());
        System.exit(2);
    }

    if (cmd.hasOption(SEGMENT_SIZE_OPT)) {
        uploader.setSegmentSize(Integer.parseInt(cmd.getOptionValue(SEGMENT_SIZE_OPT)));
    }
    if (cmd.hasOption(THREADS_OPT)) {
        uploader.setThreadCount(Integer.parseInt(cmd.getOptionValue(THREADS_OPT)));
    }
    if (cmd.hasOption(SIMPLE_OPT)) {
        uploader.setSimpleMode(true);
    }
    if (cmd.hasOption(CONTENT_TYPE_OPT)) {
        uploader.setContentType(cmd.getOptionValue(CONTENT_TYPE_OPT));
    }

    if (cmd.hasOption(VERIFY_URL_OPT)) {
        try {
            uploader.setVerifyUrl(new URL(cmd.getOptionValue(VERIFY_URL_OPT)));
        } catch (MalformedURLException e) {
            System.err.println("Could not parse verify URL: " + e.getMessage());
            System.exit(3);
        }
    }

    if (cmd.hasOption(STANDARD_MD5)) {
        uploader.setStandardChecksum(true);
    }

    if (cmd.hasOption(RETRY_COUNT_OPT)) {
        uploader.setRetryCount(Integer.parseInt(cmd.getOptionValue(RETRY_COUNT_OPT)));
    }

    if (cmd.hasOption(RETRY_DELAY_OPT)) {
        uploader.setRetryDelay(Integer.parseInt(cmd.getOptionValue(RETRY_DELAY_OPT)));
    }

    if (cmd.hasOption(SAVE_MD5)) {
        uploader.setSaveMD5(cmd.getOptionValue(SAVE_MD5));
    }

    //
    // Configure Logging
    //

    // Pattern
    String layoutString = "%d{yyyy-MM-dd HH:mm:ss} %-5p [%t] %c{1}:%L - %m%n";
    if (cmd.hasOption(LOG_PATTERN_OPT)) {
        layoutString = cmd.getOptionValue(LOG_PATTERN_OPT);
    }
    PatternLayout layout = new PatternLayout(layoutString);

    // Appender
    String logFileName = "STDERR";
    if (cmd.hasOption(LOG_FILE_OPT)) {
        logFileName = cmd.getOptionValue(LOG_FILE_OPT);
    }
    Appender appender = null;
    if (logFileName.equals("STDERR")) {
        appender = new ConsoleAppender(layout, "System.err");
    } else if (logFileName.equals("STDOUT")) {
        appender = new ConsoleAppender(layout, "System.out");
    } else {
        // Just a regular file.
        try {
            appender = new FileAppender(layout, logFileName);
        } catch (IOException e) {
            System.err.println("FATAL: Could not configure appender");
            e.printStackTrace();
            System.exit(8);
        }
    }
    LogManager.getRootLogger().addAppender(appender);

    // Log level
    String logLevel = "ERROR";
    if (cmd.hasOption(LOG_LEVEL_OPT)) {
        logLevel = cmd.getOptionValue(LOG_LEVEL_OPT);
    }
    LogManager.getRootLogger().setLevel(Level.toLevel(logLevel));

    //
    // Start the upload
    //
    uploader.doUpload();

    System.exit(0);
}

From source file:main.java.RMDupper.java

public static void main(String[] args) throws IOException {
    System.err.println("DeDup v" + VERSION);
    // the command line parameters
    Options helpOptions = new Options();
    helpOptions.addOption("h", "help", false, "show this help page");
    Options options = new Options();
    options.addOption("h", "help", false, "show this help page");
    options.addOption("i", "input", true,
            "the input file if this option is not specified,\nthe input is expected to be piped in");
    options.addOption("o", "output", true, "the output folder. Has to be specified if input is set.");
    options.addOption("m", "merged", false,
            "the input only contains merged reads.\n If this option is specified read names are not examined for prefixes.\n Both the start and end of the aligment are considered for all reads.");
    options.addOption("v", "version", false, "the version of DeDup.");
    HelpFormatter helpformatter = new HelpFormatter();
    CommandLineParser parser = new BasicParser();
    try {//from w w  w. j av  a  2s  .  co  m
        CommandLine cmd = parser.parse(helpOptions, args);
        if (cmd.hasOption('h')) {
            helpformatter.printHelp(CLASS_NAME, options);
            System.exit(0);
        }
    } catch (ParseException e1) {
    }

    String input = "";
    String outputpath = "";
    Boolean merged = Boolean.FALSE;
    try {
        CommandLine cmd = parser.parse(options, args);

        if (cmd.hasOption('i')) {
            input = cmd.getOptionValue('i');
            piped = false;
        }
        if (cmd.hasOption('o')) {
            outputpath = cmd.getOptionValue('o');
        }
        if (cmd.hasOption('m')) {
            merged = Boolean.TRUE;
        }
        if (cmd.hasOption('v')) {
            System.out.println("DeDup v" + VERSION);
            System.exit(0);
        }
    } catch (ParseException e) {
        helpformatter.printHelp(CLASS_NAME, options);
        System.err.println(e.getMessage());
        System.exit(0);
    }
    DecimalFormat df = new DecimalFormat("##.##");

    if (piped) {
        RMDupper rmdup = new RMDupper(System.in, System.out, merged);
        rmdup.readSAMFile();

        System.err.println("We are in piping mode!");
        System.err.println("Total reads: " + rmdup.dupStats.total + "\n");
        System.err.println("Reverse removed: " + rmdup.dupStats.removed_reverse + "\n");
        System.err.println("Forward removed: " + rmdup.dupStats.removed_forward + "\n");
        System.err.println("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
        System.err.println("Total removed: " + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_merged
                + rmdup.dupStats.removed_reverse) + "\n");
        if (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_forward
                + rmdup.dupStats.removed_reverse == 0) {
            System.err.println("Duplication Rate: " + df.format(0.00));
        } else {
            System.err.println("Duplication Rate: "
                    + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                            + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
        }

    } else {
        if (outputpath.length() == 0) {
            System.err.println("The output folder has to be specified");
            helpformatter.printHelp(CLASS_NAME, options);
            System.exit(0);
        }

        //Check whether we have a directory as output path, else produce error message and quit!

        File f = new File(outputpath);
        if (!f.isDirectory()) {
            System.err.println("The output folder should be a folder and not a file!");
            System.exit(0);
        }

        File inputFile = new File(input);
        File outputFile = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + "_rmdup.bam");
        File outputlog = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + ".log");
        File outputhist = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + ".hist");

        try {
            FileWriter fw = new FileWriter(outputlog);
            FileWriter histfw = new FileWriter(outputhist);
            BufferedWriter bfw = new BufferedWriter(fw);
            BufferedWriter histbfw = new BufferedWriter(histfw);

            RMDupper rmdup = new RMDupper(inputFile, outputFile, merged);
            rmdup.readSAMFile();
            rmdup.inputSam.close();
            rmdup.outputSam.close();

            bfw.write("Total reads: " + rmdup.dupStats.total + "\n");
            bfw.write("Reverse removed: " + rmdup.dupStats.removed_reverse + "\n");
            bfw.write("Forward removed: " + rmdup.dupStats.removed_forward + "\n");
            bfw.write("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
            bfw.write("Total removed: " + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_merged
                    + rmdup.dupStats.removed_reverse) + "\n");
            bfw.write("Duplication Rate: "
                    + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                            + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
            bfw.flush();
            bfw.close();

            histbfw.write(rmdup.oc.getHistogram());
            histbfw.flush();
            histbfw.close();

            System.out.println("Total reads: " + rmdup.dupStats.total + "\n");
            System.out.println("Unmerged removed: "
                    + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_reverse) + "\n");
            System.out.println("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
            System.out.println("Total removed: " + (rmdup.dupStats.removed_forward
                    + rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse) + "\n");
            if (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_forward
                    + rmdup.dupStats.removed_reverse == 0) {
                System.out.println("Duplication Rate: " + df.format(0.00));
            } else {
                System.out.println("Duplication Rate: "
                        + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                                + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}