Example usage for org.apache.commons.cli ParseException getMessage

List of usage examples for org.apache.commons.cli ParseException getMessage

Introduction

In this page you can find the example usage for org.apache.commons.cli ParseException getMessage.

Prototype

public String getMessage() 

Source Link

Document

Returns the detail message string of this throwable.

Usage

From source file:io.anserini.index.UpdateIndex.java

@SuppressWarnings("static-access")
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(new Option(HELP_OPTION, "show help"));
    options.addOption(new Option(OPTIMIZE_OPTION, "merge indexes into a single segment"));
    options.addOption(new Option(STORE_TERM_VECTORS_OPTION, "store term vectors"));

    options.addOption(/*from  w  w  w  . j  a  v  a2s.  co m*/
            OptionBuilder.withArgName("dir").hasArg().withDescription("index location").create(INDEX_OPTION));
    options.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("file with deleted tweetids")
            .create(DELETES_OPTION));
    options.addOption(OptionBuilder.withArgName("id").hasArg().withDescription("max id").create(MAX_ID_OPTION));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (cmdline.hasOption(HELP_OPTION) || !cmdline.hasOption(INDEX_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(UpdateIndex.class.getName(), options);
        System.exit(-1);
    }

    String indexPath = cmdline.getOptionValue(INDEX_OPTION);

    final FieldType textOptions = new FieldType();
    textOptions.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
    textOptions.setStored(true);
    textOptions.setTokenized(true);
    textOptions.setStoreTermVectors(true);

    LOG.info("index: " + indexPath);

    File file = new File("PittsburghUserTimeline");
    if (!file.exists()) {
        System.err.println("Error: " + file + " does not exist!");
        System.exit(-1);
    }

    final StatusStream stream = new JsonStatusCorpusReader(file);

    Status status;
    String s;
    HashMap<Long, String> hm = new HashMap<Long, String>();
    try {
        while ((s = stream.nextRaw()) != null) {
            try {
                status = DataObjectFactory.createStatus(s);

                if (status.getText() == null) {
                    continue;
                }

                hm.put(status.getUser().getId(),
                        hm.get(status.getUser().getId()) + status.getText().replaceAll("[\\r\\n]+", " "));

            } catch (Exception e) {

            }
        }

    } catch (Exception e) {
        e.printStackTrace();
    } finally {

        stream.close();
    }

    ArrayList<String> userIDList = new ArrayList<String>();
    try (BufferedReader br = new BufferedReader(new FileReader(new File("userID")))) {
        String line;
        while ((line = br.readLine()) != null) {
            userIDList.add(line.replaceAll("[\\r\\n]+", ""));

            // process the line.
        }
    }

    try {
        reader = DirectoryReader
                .open(FSDirectory.open(new File(cmdline.getOptionValue(INDEX_OPTION)).toPath()));
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    final Directory dir = new SimpleFSDirectory(Paths.get(cmdline.getOptionValue(INDEX_OPTION)));
    final IndexWriterConfig config = new IndexWriterConfig(ANALYZER);

    config.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);

    final IndexWriter writer = new IndexWriter(dir, config);

    IndexSearcher searcher = new IndexSearcher(reader);
    System.out.println("The total number of docs indexed "
            + searcher.collectionStatistics(TweetStreamReader.StatusField.TEXT.name).docCount());

    for (int city = 0; city < cityName.length; city++) {

        // Pittsburgh's coordinate -79.976389, 40.439722

        Query q_long = NumericRangeQuery.newDoubleRange(TweetStreamReader.StatusField.LONGITUDE.name,
                new Double(longitude[city] - 0.05), new Double(longitude[city] + 0.05), true, true);
        Query q_lat = NumericRangeQuery.newDoubleRange(TweetStreamReader.StatusField.LATITUDE.name,
                new Double(latitude[city] - 0.05), new Double(latitude[city] + 0.05), true, true);

        BooleanQuery bqCityName = new BooleanQuery();

        Term t = new Term("place", cityName[city]);
        TermQuery query = new TermQuery(t);
        bqCityName.add(query, BooleanClause.Occur.SHOULD);
        System.out.println(query.toString());

        for (int i = 0; i < cityNameAlias[city].length; i++) {
            t = new Term("place", cityNameAlias[city][i]);
            query = new TermQuery(t);
            bqCityName.add(query, BooleanClause.Occur.SHOULD);
            System.out.println(query.toString());
        }

        BooleanQuery bq = new BooleanQuery();

        BooleanQuery finalQuery = new BooleanQuery();

        // either a coordinate match
        bq.add(q_long, BooleanClause.Occur.MUST);
        bq.add(q_lat, BooleanClause.Occur.MUST);

        finalQuery.add(bq, BooleanClause.Occur.SHOULD);
        // or a place city name match
        finalQuery.add(bqCityName, BooleanClause.Occur.SHOULD);

        TotalHitCountCollector totalHitCollector = new TotalHitCountCollector();

        // Query hasFieldQuery = new ConstantScoreQuery(new
        // FieldValueFilter("timeline"));
        //
        // searcher.search(hasFieldQuery, totalHitCollector);
        //
        // if (totalHitCollector.getTotalHits() > 0) {
        // TopScoreDocCollector collector =
        // TopScoreDocCollector.create(Math.max(0,
        // totalHitCollector.getTotalHits()));
        // searcher.search(finalQuery, collector);
        // ScoreDoc[] hits = collector.topDocs().scoreDocs;
        //
        //
        // HashMap<String, Integer> hasHit = new HashMap<String, Integer>();
        // int dupcount = 0;
        // for (int i = 0; i < hits.length; ++i) {
        // int docId = hits[i].doc;
        // Document d;
        //
        // d = searcher.doc(docId);
        //
        // System.out.println(d.getFields());
        // }
        // }

        // totalHitCollector = new TotalHitCountCollector();
        searcher.search(finalQuery, totalHitCollector);

        if (totalHitCollector.getTotalHits() > 0) {
            TopScoreDocCollector collector = TopScoreDocCollector
                    .create(Math.max(0, totalHitCollector.getTotalHits()));
            searcher.search(finalQuery, collector);
            ScoreDoc[] hits = collector.topDocs().scoreDocs;

            System.out.println("City " + cityName[city] + " " + collector.getTotalHits() + " hits.");

            HashMap<String, Integer> hasHit = new HashMap<String, Integer>();
            int dupcount = 0;
            for (int i = 0; i < hits.length; ++i) {
                int docId = hits[i].doc;
                Document d;

                d = searcher.doc(docId);

                if (userIDList.contains(d.get(IndexTweets.StatusField.USER_ID.name))
                        && hm.containsKey(Long.parseLong(d.get(IndexTweets.StatusField.USER_ID.name)))) {
                    //            System.out.println("Has timeline field?" + (d.get("timeline") != null));
                    //            System.out.println(reader.getDocCount("timeline"));
                    //            d.add(new Field("timeline", hm.get(Long.parseLong(d.get(IndexTweets.StatusField.USER_ID.name))),
                    //                textOptions));
                    System.out.println("Found a user hit");
                    BytesRefBuilder brb = new BytesRefBuilder();
                    NumericUtils.longToPrefixCodedBytes(Long.parseLong(d.get(IndexTweets.StatusField.ID.name)),
                            0, brb);
                    Term term = new Term(IndexTweets.StatusField.ID.name, brb.get());
                    //            System.out.println(reader.getDocCount("timeline"));

                    Document d_new = new Document();
                    //            for (IndexableField field : d.getFields()) {
                    //              d_new.add(field);
                    //            }
                    // System.out.println(d_new.getFields());
                    d_new.add(new StringField("userBackground", d.get(IndexTweets.StatusField.USER_ID.name),
                            Store.YES));
                    d_new.add(new Field("timeline",
                            hm.get(Long.parseLong(d.get(IndexTweets.StatusField.USER_ID.name))), textOptions));
                    // System.out.println(d_new.get());
                    writer.addDocument(d_new);
                    writer.commit();

                    //            t = new Term("label", "why");
                    //            TermQuery tqnew = new TermQuery(t);
                    //
                    //            totalHitCollector = new TotalHitCountCollector();
                    //
                    //            searcher.search(tqnew, totalHitCollector);
                    //
                    //            if (totalHitCollector.getTotalHits() > 0) {
                    //              collector = TopScoreDocCollector.create(Math.max(0, totalHitCollector.getTotalHits()));
                    //              searcher.search(tqnew, collector);
                    //              hits = collector.topDocs().scoreDocs;
                    //
                    //              System.out.println("City " + cityName[city] + " " + collector.getTotalHits() + " hits.");
                    //
                    //              for (int k = 0; k < hits.length; k++) {
                    //                docId = hits[k].doc;
                    //                d = searcher.doc(docId);
                    //                System.out.println(d.get(IndexTweets.StatusField.ID.name));
                    //                System.out.println(d.get(IndexTweets.StatusField.PLACE.name));
                    //              }
                    //            }

                    // writer.deleteDocuments(term);
                    // writer.commit();
                    // writer.addDocument(d);
                    // writer.commit();

                    //            System.out.println(reader.getDocCount("timeline"));
                    // writer.updateDocument(term, d);
                    // writer.commit();

                }

            }
        }
    }
    reader.close();
    writer.close();

}

From source file:LineageSimulator.java

public static void main(String[] args) {
    Options options = new Options();
    // commands//  w w  w .  ja  va2s. co m
    //options.addOption("simulate", false, "Simulate lineage trees");
    //options.addOption("sample", false, "Sample from the simulated trees");
    //options.addOption("evaluate", false, "Evaluate trees");

    // tree simulation
    options.addOption("t", "nTrees", true, "Number of trees to simulate (default: 100)");
    options.addOption("i", "nIter", true, "Number of tree growth iterations (default: 50)");
    options.addOption("snv", "probSNV", true,
            "Per node probablity of generating a descendant cell population with an acquired SNV during a tree growth iteration (default: 0.15)");
    options.addOption("cnv", "probCNV", true,
            "Per node probablity of generating a descendant cell population with an acquired CNV during a tree growth iteration (default: 0.02)");
    options.addOption("probDeath", true,
            "Probablity of a cell population death in each tree growth iteration (default: 0.06)");
    options.addOption("maxPopulationSize", true, "Max size of a cell population (default: 1000000)");
    options.addOption("minNodes", true,
            "Minimum number of undead cell population nodes in a valid tree, tree growth will continue beyond the defined number of iterations until this value is reached (default: 10)");
    options.addOption("maxNodes", true,
            "Maximum number of undead cell population nodes in a tree, tree growth will stop after the iteration in which this value is reached/first surpassed (default: 1000)");

    // sampling
    Option samplesOption = new Option("s", "nSamples", true,
            "Number of samples to collect, accepts multiple values, e.g. 5 10 15 (default: 5)");
    samplesOption.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(samplesOption);
    Option covOption = new Option("c", "coverage", true,
            "Simulated coverage to generate the VAFs, accepts multiple values, e.g. 500 1000 (default: 1000)");
    covOption.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(covOption);
    options.addOption("maxSubclones", true, "Max number of subclones per sample (default: 5)");
    options.addOption("sampleSize", true, "Number of cells per sample (default: 100000)");
    options.addOption("e", true, "Sequencing error (default: 0.001)");
    options.addOption("minNC", true,
            "Minimum percentage of normal contamination per sample; the percentage will be randomly generated from the range [minNC maxNC] for each sample (default: 0)");
    options.addOption("maxNC", true,
            "Maximum percentage of normal contamination per sample; if maxNC < minNC, maxNC will be automatically set to minNC; the percentage will be randomly generated from the range [minNC maxNC] for each sample (default: 20)");
    //options.addOption("localized", false, "Enable localized sampling (default: random sampling)");
    //options.addOption("mixSubclone", false, "With localized sampling, add an additional subclone from a different subtree to each sample; by default, the sample is localized to a single disjoint subtree");

    // input/output/display
    options.addOption("dir", "outputDir", true,
            "Directory where the output files should be created [required]");
    options.addOption("dot", false, "Produce DOT files for the simulated trees");
    options.addOption("sdot", "sampledDot", false,
            "Produce DOT files for the simulated trees with indicated samples");
    options.addOption("sampleProfile", false,
            "Output VAF file includes an additional column with the binary sample profile for each SNV");

    // other
    options.addOption("v", "verbose", false, "Verbose mode");
    options.addOption("h", "help", false, "Print usage");

    // display order
    ArrayList<Option> optionsList = new ArrayList<Option>();
    optionsList.add(options.getOption("dir"));
    optionsList.add(options.getOption("t"));
    optionsList.add(options.getOption("i"));
    optionsList.add(options.getOption("snv"));
    optionsList.add(options.getOption("cnv"));
    optionsList.add(options.getOption("probDeath"));
    optionsList.add(options.getOption("maxPopulationSize"));
    optionsList.add(options.getOption("minNodes"));
    optionsList.add(options.getOption("maxNodes"));
    optionsList.add(options.getOption("s"));
    optionsList.add(options.getOption("c"));
    optionsList.add(options.getOption("maxSubclones"));
    optionsList.add(options.getOption("sampleSize"));
    optionsList.add(options.getOption("e"));
    optionsList.add(options.getOption("minNC"));
    optionsList.add(options.getOption("maxNC"));
    optionsList.add(options.getOption("dot"));
    optionsList.add(options.getOption("sdot"));
    optionsList.add(options.getOption("sampleProfile"));
    optionsList.add(options.getOption("v"));
    optionsList.add(options.getOption("h"));

    CommandLineParser parser = new BasicParser();
    CommandLine cmdLine = null;
    HelpFormatter hf = new HelpFormatter();
    hf.setOptionComparator(new OptionComarator<Option>(optionsList));
    try {
        cmdLine = parser.parse(options, args);
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        hf.printHelp(PROG_NAME, options);
        System.exit(-1);
    }
    Args params = new Args();
    if (cmdLine.hasOption("dir")) {
        params.simPath = cmdLine.getOptionValue("dir") + "/" + SIMULATION_DATA_DIR;
    } else {
        System.err.println("Required parameter: output directory path [-dir]");
        hf.printHelp(PROG_NAME, options);
        System.exit(-1);
    }
    if (cmdLine.hasOption("t")) {
        Parameters.NUM_TREES = Integer.parseInt(cmdLine.getOptionValue("t"));
    }
    if (cmdLine.hasOption("i")) {
        Parameters.NUM_ITERATIONS = Integer.parseInt(cmdLine.getOptionValue("i"));
    }
    if (cmdLine.hasOption("snv")) {
        Parameters.PROB_SNV = Double.parseDouble(cmdLine.getOptionValue("snv"));
    }
    if (cmdLine.hasOption("cnv")) {
        Parameters.PROB_CNV = Double.parseDouble(cmdLine.getOptionValue("cnv"));
    }
    if (cmdLine.hasOption("probDeath")) {
        Parameters.PROB_DEATH = Double.parseDouble(cmdLine.getOptionValue("probDeath"));
    }
    if (cmdLine.hasOption("maxPopulationSize")) {
        Parameters.MAX_POPULATION_SIZE = Integer.parseInt(cmdLine.getOptionValue("maxPopulationSize"));
    }
    if (cmdLine.hasOption("minNodes")) {
        Parameters.MIN_NUM_NODES = Integer.parseInt(cmdLine.getOptionValue("minNodes"));
        if (Parameters.MIN_NUM_NODES < 1) {
            System.err.println("Minimum number of nodes [-minNodes] must be at least 1");
            System.exit(-1);
        }
    }
    if (cmdLine.hasOption("maxNodes")) {
        Parameters.MAX_NUM_NODES = Integer.parseInt(cmdLine.getOptionValue("maxNodes"));
        if (Parameters.MAX_NUM_NODES < 1 || Parameters.MAX_NUM_NODES < Parameters.MIN_NUM_NODES) {
            System.err.println(
                    "Maximum number of nodes [-maxNodes] must be at least 1 and not less than [-minNodes]");
            System.exit(-1);
        }
    }
    if (cmdLine.hasOption("s")) {
        String[] samples = cmdLine.getOptionValues("s");
        Parameters.NUM_SAMPLES_ARRAY = new int[samples.length];
        for (int i = 0; i < samples.length; i++) {
            Parameters.NUM_SAMPLES_ARRAY[i] = Integer.parseInt(samples[i]);
        }
    }
    if (cmdLine.hasOption("c")) {
        String[] cov = cmdLine.getOptionValues("c");
        Parameters.COVERAGE_ARRAY = new int[cov.length];
        for (int i = 0; i < cov.length; i++) {
            Parameters.COVERAGE_ARRAY[i] = Integer.parseInt(cov[i]);
        }
    }
    if (cmdLine.hasOption("maxSubclones")) {
        Parameters.MAX_NUM_SUBCLONES = Integer.parseInt(cmdLine.getOptionValue("maxSubclones"));
    }
    if (cmdLine.hasOption("sampleSize")) {
        Parameters.NUM_CELLS_PER_SAMPLE = Integer.parseInt(cmdLine.getOptionValue("sampleSize"));
    }
    if (cmdLine.hasOption("e")) {
        Parameters.SEQUENCING_ERROR = Double.parseDouble(cmdLine.getOptionValue("e"));
    }
    if (cmdLine.hasOption("minNC")) {
        Parameters.MIN_PERCENT_NORMAL_CONTAMINATION = Double.parseDouble(cmdLine.getOptionValue("minNC"));
    }
    if (cmdLine.hasOption("maxNC")) {
        Parameters.MAX_PERCENT_NORMAL_CONTAMINATION = Double.parseDouble(cmdLine.getOptionValue("maxNC"));
    }
    if (Parameters.MAX_PERCENT_NORMAL_CONTAMINATION < Parameters.MIN_PERCENT_NORMAL_CONTAMINATION) {
        Parameters.MAX_PERCENT_NORMAL_CONTAMINATION = Parameters.MIN_PERCENT_NORMAL_CONTAMINATION;
    }

    /*if(cmdLine.hasOption("localized")) {
       Parameters.LOCALIZED_SAMPLING = true;
    }
    if(cmdLine.hasOption("mixSubclone")) {
       Parameters.MIX_NBR_SUBTREE_SUBCLONE = true;
    }*/

    if (cmdLine.hasOption("dot")) {
        params.generateDOT = true;
    }
    if (cmdLine.hasOption("sampledDot")) {
        params.generateSampledDOT = true;
    }
    if (cmdLine.hasOption("sampleProfile")) {
        params.outputSampleProfile = true;
    }
    if (cmdLine.hasOption("h")) {
        new HelpFormatter().printHelp(" ", options);
    }
    // logger
    ConsoleHandler h = new ConsoleHandler();
    h.setFormatter(new LogFormatter());
    h.setLevel(Level.INFO);
    logger.setLevel(Level.INFO);
    if (cmdLine.hasOption("v")) {
        h.setLevel(Level.FINEST);
        logger.setLevel(Level.FINEST);
    }
    logger.addHandler(h);
    logger.setUseParentHandlers(false);

    // validate settings
    if (Parameters.PROB_SNV + Parameters.PROB_CNV + Parameters.PROB_DEATH > 1) {
        System.err.println("The sum of SSNV, CNV, and cell death probabilities cannot exceed 1");
        hf.printHelp(PROG_NAME, options);
        System.exit(-1);
    }
    simulateLineageTrees(params);
}

From source file:com.act.lcms.v2.TraceIndexExtractor.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/* w  ww . j  ava 2 s.c  o m*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HELP_FORMATTER.printHelp(TraceIndexExtractor.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(TraceIndexExtractor.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        return;
    }

    // Not enough memory available?  We're gonna need a bigger heap.
    long maxMemory = Runtime.getRuntime().maxMemory();
    if (maxMemory < 1 << 34) { // 16GB
        String msg = StringUtils.join(
                String.format(
                        "You have run this class with a maximum heap size of less than 16GB (%d to be exact). ",
                        maxMemory),
                "There is no way this process will complete with that much space available. ",
                "Crank up your heap allocation with -Xmx and try again.", "");
        throw new RuntimeException(msg);
    }

    File inputFile = new File(cl.getOptionValue(OPTION_SCAN_FILE));
    if (!inputFile.exists()) {
        System.err.format("Cannot find input scan file at %s\n", inputFile.getAbsolutePath());
        HELP_FORMATTER.printHelp(TraceIndexExtractor.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    File rocksDBFile = new File(cl.getOptionValue(OPTION_INDEX_PATH));
    if (rocksDBFile.exists()) {
        System.err.format("Index file at %s already exists--remove and retry\n", rocksDBFile.getAbsolutePath());
        HELP_FORMATTER.printHelp(TraceIndexExtractor.class.getCanonicalName(), HELP_MESSAGE, opts, null, true);
        System.exit(1);
    }

    List<Double> targetMZs = new ArrayList<>();
    try (BufferedReader reader = new BufferedReader(new FileReader(cl.getOptionValue(OPTION_TARGET_MASSES)))) {
        String line;
        while ((line = reader.readLine()) != null) {
            targetMZs.add(Double.valueOf(line));
        }
    }

    TraceIndexExtractor extractor = new TraceIndexExtractor();
    extractor.processScan(targetMZs, inputFile, rocksDBFile);
}

From source file:com.github.braully.graph.UtilResultMerge.java

public static void main(String... args) throws Exception {
    Options options = new Options();

    Option input = new Option("i", "input", true, "input file path");
    input.setRequired(false);/*  ww w.  j av a  2  s . co m*/
    options.addOption(input);

    Option output = new Option("o", "output", true, "output file");
    output.setRequired(false);
    options.addOption(output);

    Option verb = new Option("v", "verbose", false, "verbose");
    output.setRequired(false);
    options.addOption(verb);

    Option exluces = new Option("x", "exclude", true, "exclude operations");
    exluces.setRequired(false);
    options.addOption(exluces);

    CommandLineParser parser = new DefaultParser();
    HelpFormatter formatter = new HelpFormatter();
    CommandLine cmd;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.out.println(e.getMessage());
        formatter.printHelp("UtilResult", options);

        System.exit(1);
        return;
    }

    String[] excludes = cmd.getOptionValues("exclude");
    String[] inputs = cmd.getOptionValues("input");
    if (inputs == null) {
        inputs = new String[] {
                "/home/strike/Dropbox/workspace/graph-caratheodory-np3/grafos-processamento/Almost_hypohamiltonian"
                //                "/media/dados/documentos/grafos-processamento/Almost_hypohamiltonian",
                //                "/home/strike/Documentos/grafos-processamento/Cubic",
                //                "/home/strike/Documentos/grafos-processamento/Critical_H-free",
                //                "/home/strike/Documentos/grafos-processamento/Highly_irregular",
                //                "/home/strike/Documentos/grafos-processamento/Hypohamiltonian_graphs",
                //                "/home/strike/Documentos/grafos-processamento/Maximal_triangle-free",
                //                "/home/strike/Documentos/grafos-processamento/Minimal_Ramsey",
                //                "/home/strike/Documentos/grafos-processamento/Strongly_regular",
                //                "/home/strike/Documentos/grafos-processamento/Vertex-transitive",
                //                "/home/strike/Documentos/grafos-processamento/Trees"
        };
        excludes = new String[] { "carathe" };
        verbose = true;
    }

    if (cmd.hasOption(verb.getOpt())) {
        verbose = true;
    }

    if (inputs != null) {
        processInputs(inputs, excludes);
    }
}

From source file:com.act.lcms.db.io.ExportStandardIonResultsFromDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());// w w w  .  ja v  a2  s. c o  m
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                null, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                null, true);
        return;
    }

    try (DB db = DB.openDBFromCLI(cl)) {
        List<String> chemicalNames = new ArrayList<>();
        if (cl.hasOption(OPTION_CONSTRUCT)) {
            // Extract the chemicals in the pathway and their product masses, then look up info on those chemicals
            List<Pair<ChemicalAssociatedWithPathway, Double>> productMasses = Utils
                    .extractMassesForChemicalsAssociatedWithConstruct(db, cl.getOptionValue(OPTION_CONSTRUCT));

            for (Pair<ChemicalAssociatedWithPathway, Double> pair : productMasses) {
                chemicalNames.add(pair.getLeft().getChemical());
            }
        }

        if (cl.hasOption(OPTION_CHEMICALS)) {
            chemicalNames.addAll(Arrays.asList(cl.getOptionValues(OPTION_CHEMICALS)));
        }

        if (chemicalNames.size() == 0) {
            System.err.format("No chemicals can be found from the input query.\n");
            System.exit(-1);
        }

        List<String> standardIonHeaderFields = new ArrayList<String>() {
            {
                add(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name());
                add(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name());
                add(STANDARD_ION_HEADER_FIELDS.MANUAL_PICK.name());
                add(STANDARD_ION_HEADER_FIELDS.AUTHOR.name());
                add(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name());
                add(STANDARD_ION_HEADER_FIELDS.NOTE.name());
            }
        };

        String outAnalysis;
        if (cl.hasOption(OPTION_OUTPUT_PREFIX)) {
            outAnalysis = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + "." + TSV_FORMAT;
        } else {
            outAnalysis = String.join("-", chemicalNames) + "." + TSV_FORMAT;
        }

        File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY));
        if (!lcmsDir.isDirectory()) {
            System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath());
            HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                    null, true);
            System.exit(1);
        }

        String plottingDirectory = cl.getOptionValue(OPTION_PLOTTING_DIR);

        TSVWriter<String, String> resultsWriter = new TSVWriter<>(standardIonHeaderFields);
        resultsWriter.open(new File(outAnalysis));

        // For each chemical, create a TSV row and a corresponding diagnostic plot
        for (String chemicalName : chemicalNames) {
            List<String> graphLabels = new ArrayList<>();
            List<Double> yMaxList = new ArrayList<>();

            String outData = plottingDirectory + "/" + chemicalName + ".data";
            String outImg = plottingDirectory + "/" + chemicalName + ".pdf";

            // For each diagnostic plot, open a new file stream.
            try (FileOutputStream fos = new FileOutputStream(outData)) {

                List<StandardIonResult> getResultByChemicalName = StandardIonResult.getByChemicalName(db,
                        chemicalName);

                if (getResultByChemicalName != null && getResultByChemicalName.size() > 0) {

                    // PART 1: Get the best metlin ion across all standard ion results for a given chemical
                    String bestGlobalMetlinIon = AnalysisHelper
                            .scoreAndReturnBestMetlinIonFromStandardIonResults(getResultByChemicalName,
                                    new HashMap<>(), true, true);

                    // PART 2: Plot all the graphs related to the chemical. The plots are structured as follows:
                    //
                    // Page 1: All graphs (water, MeOH, Yeast) for Global ion picked (best ion among ALL standard ion runs for
                    // the given chemical) by the algorithm
                    // Page 2: All graphs for M+H
                    // Page 3: All graphs for Local ions picked (best ion within a SINGLE standard ion run) + negative controls
                    // for Yeast.
                    //
                    // Each page is demarcated by a blank graph.

                    // Arrange results based on media
                    Map<String, List<StandardIonResult>> categories = StandardIonResult
                            .categorizeListOfStandardWellsByMedia(db, getResultByChemicalName);

                    // This set contains all the best metlin ions corresponding to all the standard ion runs.
                    Set<String> bestLocalIons = new HashSet<>();
                    bestLocalIons.add(bestGlobalMetlinIon);
                    bestLocalIons.add(DEFAULT_ION);

                    for (StandardIonResult result : getResultByChemicalName) {
                        bestLocalIons.add(result.getBestMetlinIon());
                    }

                    // We sort the best local ions are follows:
                    // 1) Global best ion spectra 2) M+H spectra 3) Local best ion spectra
                    List<String> bestLocalIonsArray = new ArrayList<>(bestLocalIons);
                    Collections.sort(bestLocalIonsArray, new Comparator<String>() {
                        @Override
                        public int compare(String o1, String o2) {
                            if (o1.equals(bestGlobalMetlinIon) && !o2.equals(bestGlobalMetlinIon)) {
                                return -1;
                            } else if (o1.equals(DEFAULT_ION) && !o2.equals(bestGlobalMetlinIon)) {
                                return -1;
                            } else {
                                return 1;
                            }
                        }
                    });

                    // This variable stores the index of the array at which all the remaining spectra are contained in one
                    // page. This happens right after the M+H ion spectra.
                    Integer combineAllSpectraIntoPageThreeFromIndex = 0;
                    for (int i = 0; i < bestLocalIonsArray.size(); i++) {
                        if (bestLocalIonsArray.get(i).equals(DEFAULT_ION)) {
                            combineAllSpectraIntoPageThreeFromIndex = i + 1;
                        }
                    }

                    for (int i = 0; i < bestLocalIonsArray.size(); i++) {

                        String ion = bestLocalIonsArray.get(i);
                        for (Map.Entry<String, List<StandardIonResult>> mediaToListOfIonResults : categories
                                .entrySet()) {

                            for (StandardIonResult result : mediaToListOfIonResults.getValue()) {

                                // For every standard ion result, we plot the best global metlin ion and M+H. These plots are in the
                                // pages 1 and 2. For all page 3 (aka miscellaneous spectra), we only plot the best local ion
                                // corresponding to it's spectra and not some other graph's spectra. In the below condition,
                                // we reach the page 3 case with not the same best ion as the spectra, in which case we just continue
                                // and not draw anything on the page.
                                if (i >= combineAllSpectraIntoPageThreeFromIndex
                                        && !(result.getBestMetlinIon().equals(ion))) {
                                    continue;
                                }

                                StandardWell positiveWell = StandardWell.getInstance().getById(db,
                                        result.getStandardWellId());
                                String positiveControlChemical = positiveWell.getChemical();

                                ScanData<StandardWell> encapsulatedDataForPositiveControl = AnalysisHelper
                                        .getScanDataForWell(db, lcmsDir, positiveWell, positiveControlChemical,
                                                positiveControlChemical);

                                Set<String> singletonSet = Collections.singleton(ion);
                                String additionalInfo = generateAdditionalLabelInformation(positiveWell, result,
                                        ion);

                                List<String> labels = AnalysisHelper
                                        .writeScanData(fos, lcmsDir, MAX_INTENSITY,
                                                encapsulatedDataForPositiveControl, false, false, singletonSet)
                                        .stream().map(label -> label + additionalInfo)
                                        .collect(Collectors.toList());

                                yMaxList.add(encapsulatedDataForPositiveControl.getMs1ScanResults()
                                        .getMaxIntensityForIon(ion));

                                List<String> negativeLabels = null;
                                // Only do the negative control in the miscellaneous page (page 3) and if the well is in yeast media.
                                if (mediaToListOfIonResults.getKey()
                                        .equals(StandardWell.MEDIA_TYPE.YEAST.name())
                                        && (i >= combineAllSpectraIntoPageThreeFromIndex
                                                && (result.getBestMetlinIon().equals(ion)))) {
                                    //TODO: Change the representative negative well to one that displays the highest noise in the future.
                                    // For now, we just use the first index among the negative wells.
                                    int representativeIndex = 0;
                                    StandardWell representativeNegativeControlWell = StandardWell.getInstance()
                                            .getById(db, result.getNegativeWellIds().get(representativeIndex));

                                    ScanData encapsulatedDataForNegativeControl = AnalysisHelper
                                            .getScanDataForWell(db, lcmsDir, representativeNegativeControlWell,
                                                    positiveWell.getChemical(),
                                                    representativeNegativeControlWell.getChemical());

                                    String negativePlateAdditionalInfo = generateAdditionalLabelInformation(
                                            representativeNegativeControlWell, null, null);

                                    negativeLabels = AnalysisHelper.writeScanData(fos, lcmsDir, MAX_INTENSITY,
                                            encapsulatedDataForNegativeControl, false, false, singletonSet)
                                            .stream().map(label -> label + negativePlateAdditionalInfo)
                                            .collect(Collectors.toList());

                                    yMaxList.add(encapsulatedDataForNegativeControl.getMs1ScanResults()
                                            .getMaxIntensityForIon(ion));
                                }

                                graphLabels.addAll(labels);

                                if (negativeLabels != null) {
                                    graphLabels.addAll(negativeLabels);
                                }
                            }
                        }

                        // Add a blank graph to demarcate pages.
                        if (i < combineAllSpectraIntoPageThreeFromIndex) {
                            graphLabels.addAll(AnalysisHelper.writeScanData(fos, lcmsDir, 0.0, BLANK_SCAN,
                                    false, false, new HashSet<>()));
                            yMaxList.add(0.0d);
                        }
                    }

                    // We need to pass the yMax values as an array to the Gnuplotter.
                    Double fontScale = null;
                    if (cl.hasOption(FONT_SCALE)) {
                        try {
                            fontScale = Double.parseDouble(cl.getOptionValue(FONT_SCALE));
                        } catch (IllegalArgumentException e) {
                            System.err.format("Argument for font-scale must be a floating point number.\n");
                            System.exit(1);
                        }
                    }

                    Double[] yMaxes = yMaxList.toArray(new Double[yMaxList.size()]);
                    Gnuplotter plotter = fontScale == null ? new Gnuplotter() : new Gnuplotter(fontScale);
                    plotter.plot2D(outData, outImg, graphLabels.toArray(new String[graphLabels.size()]), "time",
                            null, "intensity", "pdf", null, null, yMaxes, outImg + ".gnuplot");

                    Map<String, String> row = new HashMap<>();
                    row.put(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name(), chemicalName);
                    row.put(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name(), bestGlobalMetlinIon);
                    row.put(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name(), outImg);

                    resultsWriter.append(row);
                    resultsWriter.flush();
                }
            }
        }

        resultsWriter.flush();
        resultsWriter.close();
    }
}

From source file:CTmousetrack.java

public static void main(String[] args) {

    String outLoc = new String("." + File.separator + "CTdata"); // Location of the base output data folder; only used when writing out CT data to a local folder
    String srcName = "CTmousetrack"; // name of the output CT source
    long blockPts = 10; // points per block flush
    long sampInterval = 10; // time between sampling updates, msec
    double trimTime = 0.0; // amount of data to keep (trim time), sec
    boolean debug = false; // turn on debug?

    // Specify the CT output connection
    CTWriteMode writeMode = CTWriteMode.LOCAL; // The selected mode for writing out CT data
    String serverHost = ""; // Server (FTP or HTTP/S) host:port
    String serverUser = ""; // Server (FTP or HTTPS) username
    String serverPassword = ""; // Server (FTP or HTTPS) password

    // For UDP output mode
    DatagramSocket udpServerSocket = null;
    InetAddress udpServerAddress = null;
    String udpHost = "";
    int udpPort = -1;

    // Concatenate all of the CTWriteMode types
    String possibleWriteModes = "";
    for (CTWriteMode wm : CTWriteMode.values()) {
        possibleWriteModes = possibleWriteModes + ", " + wm.name();
    }/*from w w  w  . j  av  a  2  s .  co m*/
    // Remove ", " from start of string
    possibleWriteModes = possibleWriteModes.substring(2);

    //
    // Argument processing using Apache Commons CLI
    //
    // 1. Setup command line options
    Options options = new Options();
    options.addOption("h", "help", false, "Print this message.");
    options.addOption(Option.builder("o").argName("base output dir").hasArg().desc(
            "Base output directory when writing data to local folder (i.e., this is the location of CTdata folder); default = \""
                    + outLoc + "\".")
            .build());
    options.addOption(Option.builder("s").argName("source name").hasArg()
            .desc("Name of source to write data to; default = \"" + srcName + "\".").build());
    options.addOption(Option.builder("b").argName("points per block").hasArg()
            .desc("Number of points per block; UDP output mode will use 1 point/block; default = "
                    + Long.toString(blockPts) + ".")
            .build());
    options.addOption(Option.builder("dt").argName("samp interval msec").hasArg()
            .desc("Sampling period in msec; default = " + Long.toString(sampInterval) + ".").build());
    options.addOption(Option.builder("t").argName("trim time sec").hasArg().desc(
            "Trim (ring-buffer loop) time (sec); this is only used when writing data to local folder; specify 0 for indefinite; default = "
                    + Double.toString(trimTime) + ".")
            .build());
    options.addOption(
            Option.builder("w").argName("write mode").hasArg()
                    .desc("Type of write connection; one of " + possibleWriteModes
                            + "; all but UDP mode write out to CT; default = " + writeMode.name() + ".")
                    .build());
    options.addOption(Option.builder("host").argName("host[:port]").hasArg()
            .desc("Host:port when writing via FTP, HTTP, HTTPS, UDP.").build());
    options.addOption(Option.builder("u").argName("username,password").hasArg()
            .desc("Comma-delimited username and password when writing to CT via FTP or HTTPS.").build());
    options.addOption("x", "debug", false, "Enable CloudTurbine debug output.");

    // 2. Parse command line options
    CommandLineParser parser = new DefaultParser();
    CommandLine line = null;
    try {
        line = parser.parse(options, args);
    } catch (ParseException exp) { // oops, something went wrong
        System.err.println("Command line argument parsing failed: " + exp.getMessage());
        return;
    }

    // 3. Retrieve the command line values
    if (line.hasOption("help")) { // Display help message and quit
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp("CTmousetrack", "", options,
                "NOTE: UDP output is a special non-CT output mode where single x,y points are sent via UDP to the specified host:port.");
        return;
    }

    outLoc = line.getOptionValue("o", outLoc);
    if (!outLoc.endsWith("\\") && !outLoc.endsWith("/")) {
        outLoc = outLoc + File.separator;
    }
    // Make sure the base output folder location ends in "CTdata"
    if (!outLoc.endsWith("CTdata\\") && !outLoc.endsWith("CTdata/")) {
        outLoc = outLoc + "CTdata" + File.separator;
    }

    srcName = line.getOptionValue("s", srcName);

    blockPts = Long.parseLong(line.getOptionValue("b", Long.toString(blockPts)));

    sampInterval = Long.parseLong(line.getOptionValue("dt", Long.toString(sampInterval)));

    trimTime = Double.parseDouble(line.getOptionValue("t", Double.toString(trimTime)));

    // Type of output connection
    String writeModeStr = line.getOptionValue("w", writeMode.name());
    boolean bMatch = false;
    for (CTWriteMode wm : CTWriteMode.values()) {
        if (wm.name().toLowerCase().equals(writeModeStr.toLowerCase())) {
            writeMode = wm;
            bMatch = true;
        }
    }
    if (!bMatch) {
        System.err.println("Unrecognized write mode, \"" + writeModeStr + "\"; write mode must be one of "
                + possibleWriteModes);
        System.exit(0);
    }
    if (writeMode != CTWriteMode.LOCAL) {
        // User must have specified the host
        // If FTP or HTTPS, they may also specify username/password
        serverHost = line.getOptionValue("host", serverHost);
        if (serverHost.isEmpty()) {
            System.err.println(
                    "When using write mode \"" + writeModeStr + "\", you must specify the server host.");
            System.exit(0);
        }
        if (writeMode == CTWriteMode.UDP) {
            // Force blockPts to be 1
            blockPts = 1;
            // User must have specified both host and port
            int colonIdx = serverHost.indexOf(':');
            if ((colonIdx == -1) || (colonIdx >= serverHost.length() - 1)) {
                System.err.println(
                        "For UDP output mode, both the host and port (<host>:<port>)) must be specified.");
                System.exit(0);
            }
            udpHost = serverHost.substring(0, colonIdx);
            String udpPortStr = serverHost.substring(colonIdx + 1);
            try {
                udpPort = Integer.parseInt(udpPortStr);
            } catch (NumberFormatException nfe) {
                System.err.println("The UDP port must be a positive integer.");
                System.exit(0);
            }
        }
        if ((writeMode == CTWriteMode.FTP) || (writeMode == CTWriteMode.HTTPS)) {
            String userpassStr = line.getOptionValue("u", "");
            if (!userpassStr.isEmpty()) {
                // This string should be comma-delimited username and password
                String[] userpassCSV = userpassStr.split(",");
                if (userpassCSV.length != 2) {
                    System.err.println("When specifying a username and password for write mode \""
                            + writeModeStr + "\", separate the username and password by a comma.");
                    System.exit(0);
                }
                serverUser = userpassCSV[0];
                serverPassword = userpassCSV[1];
            }
        }
    }

    debug = line.hasOption("debug");

    System.err.println("CTmousetrack parameters:");
    System.err.println("\toutput mode = " + writeMode.name());
    if (writeMode == CTWriteMode.UDP) {
        System.err.println("\twrite to " + udpHost + ":" + udpPort);
    } else {
        System.err.println("\tsource = " + srcName);
        System.err.println("\ttrim time = " + trimTime + " sec");
    }
    System.err.println("\tpoints per block = " + blockPts);
    System.err.println("\tsample interval = " + sampInterval + " msec");

    try {
        //
        // Setup CTwriter or UDP output
        //
        CTwriter ctw = null;
        CTinfo.setDebug(debug);
        if (writeMode == CTWriteMode.LOCAL) {
            ctw = new CTwriter(outLoc + srcName, trimTime);
            System.err.println("\tdata will be written to local folder \"" + outLoc + "\"");
        } else if (writeMode == CTWriteMode.FTP) {
            CTftp ctftp = new CTftp(srcName);
            try {
                ctftp.login(serverHost, serverUser, serverPassword);
            } catch (Exception e) {
                throw new IOException(
                        new String("Error logging into FTP server \"" + serverHost + "\":\n" + e.getMessage()));
            }
            ctw = ctftp; // upcast to CTWriter
            System.err.println("\tdata will be written to FTP server at " + serverHost);
        } else if (writeMode == CTWriteMode.HTTP) {
            // Don't send username/pw in HTTP mode since they will be unencrypted
            CThttp cthttp = new CThttp(srcName, "http://" + serverHost);
            ctw = cthttp; // upcast to CTWriter
            System.err.println("\tdata will be written to HTTP server at " + serverHost);
        } else if (writeMode == CTWriteMode.HTTPS) {
            CThttp cthttp = new CThttp(srcName, "https://" + serverHost);
            // Username/pw are optional for HTTPS mode; only use them if username is not empty
            if (!serverUser.isEmpty()) {
                try {
                    cthttp.login(serverUser, serverPassword);
                } catch (Exception e) {
                    throw new IOException(new String(
                            "Error logging into HTTP server \"" + serverHost + "\":\n" + e.getMessage()));
                }
            }
            ctw = cthttp; // upcast to CTWriter
            System.err.println("\tdata will be written to HTTPS server at " + serverHost);
        } else if (writeMode == CTWriteMode.UDP) {
            try {
                udpServerSocket = new DatagramSocket();
            } catch (SocketException se) {
                System.err.println("Error creating socket for UDP:\n" + se);
                System.exit(0);
            }
            try {
                udpServerAddress = InetAddress.getByName(udpHost);
            } catch (UnknownHostException uhe) {
                System.err.println("Error getting UDP server host address:\n" + uhe);
                System.exit(0);
            }
        }
        if (writeMode != CTWriteMode.UDP) {
            ctw.setBlockMode(blockPts > 1, blockPts > 1);
            ctw.autoFlush(0); // no autoflush
            ctw.autoSegment(1000);
        }

        // screen dims
        Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
        double width = screenSize.getWidth();
        double height = screenSize.getHeight();

        // use Map for consolidated putData
        Map<String, Object> cmap = new LinkedHashMap<String, Object>();

        // loop and write some output
        for (int i = 0; i < 1000000; i++) { // go until killed
            long currentTime = System.currentTimeMillis();
            Point mousePos = MouseInfo.getPointerInfo().getLocation();
            float x_pt = (float) (mousePos.getX() / width); // normalize
            float y_pt = (float) ((height - mousePos.getY()) / height); // flip Y (so bottom=0)
            if (writeMode != CTWriteMode.UDP) {
                // CT output mode
                ctw.setTime(currentTime);
                cmap.clear();
                cmap.put("x", x_pt);
                cmap.put("y", y_pt);
                ctw.putData(cmap);
                if (((i + 1) % blockPts) == 0) {
                    ctw.flush();
                    System.err.print(".");
                }
            } else {
                // UDP output mode
                // We force blockPts to be 1 for UDP output mode, i.e. we "flush" the data every time
                // Write the following data (21 bytes total):
                //     header = "MOUSE", 5 bytes
                //     current time, long, 8 bytes
                //     2 floats (x,y) 4 bytes each, 8 bytes
                int len = 21;
                ByteBuffer bb = ByteBuffer.allocate(len);
                String headerStr = "MOUSE";
                bb.put(headerStr.getBytes("UTF-8"));
                bb.putLong(currentTime);
                bb.putFloat(x_pt);
                bb.putFloat(y_pt);
                // Might be able to use the following, but not sure:
                //     byte[] sendData = bb.array();
                byte[] sendData = new byte[len];
                bb.position(0);
                bb.get(sendData, 0, len);
                DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, udpServerAddress,
                        udpPort);
                try {
                    udpServerSocket.send(sendPacket);
                } catch (IOException e) {
                    System.err.println("Test server caught exception trying to send data to UDP client:\n" + e);
                }
                System.err.print(".");
            }
            try {
                Thread.sleep(sampInterval);
            } catch (Exception e) {
            }
            ;
        }
        if (writeMode != CTWriteMode.UDP) {
            ctw.flush(); // wrap up
        }
    } catch (Exception e) {
        System.err.println("CTmousetrack exception: " + e);
        e.printStackTrace();
    }
}

From source file:com.ingby.socbox.bischeck.Execute.java

public static void main(String[] args) {

    // create the command line parser
    CommandLineParser parser = new GnuParser();
    CommandLine line = null;//from  w  w  w. j av a  2 s .c  o  m

    // create the Options
    Options options = new Options();
    options.addOption("u", "usage", false, "show usage.");
    options.addOption("d", "deamon", false, "start as a deamon");

    try {
        // parse the command line arguments
        line = parser.parse(options, args);

    } catch (org.apache.commons.cli.ParseException e) {
        System.out.println("Command parse error:" + e.getMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("Bischeck", options);
        Util.ShellExit(Util.FAILED);
    }

    if (line.hasOption("usage")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("Bischeck", options);
        Util.ShellExit(Util.OKAY);
    }

    dumpthread = new Thread() {
        public void run() {
            try {
                CacheFactory.destroy();
            } catch (CacheException e) {
                LOGGER.warn("Cache could not be destoryed", e);
            }
        }
    };

    dumpthread.setName("dumpcache");

    int retStat = Util.OKAY;
    do {
        try {
            if (line.hasOption("deamon")) {
                ConfigurationManager.init();
            } else {
                ConfigurationManager.initonce();
            }
        } catch (Exception e) {
            LOGGER.error("Creating bischeck Configuration Manager failed with: {}", e.getMessage(), e);
            Util.ShellExit(Util.FAILED);
        }

        retStat = Execute.getInstance().deamon();
        LOGGER.debug("Method Execute returned {}", retStat);
    } while (retStat == RESTART);

    dumpthread.start();

    LOGGER.info("******************* Shutdown ********************");

    Util.ShellExit(retStat);
}

From source file:main.java.RMDupper.java

public static void main(String[] args) throws IOException {
    System.err.println("DeDup v" + VERSION);
    // the command line parameters
    Options helpOptions = new Options();
    helpOptions.addOption("h", "help", false, "show this help page");
    Options options = new Options();
    options.addOption("h", "help", false, "show this help page");
    options.addOption("i", "input", true,
            "the input file if this option is not specified,\nthe input is expected to be piped in");
    options.addOption("o", "output", true, "the output folder. Has to be specified if input is set.");
    options.addOption("m", "merged", false,
            "the input only contains merged reads.\n If this option is specified read names are not examined for prefixes.\n Both the start and end of the aligment are considered for all reads.");
    options.addOption("v", "version", false, "the version of DeDup.");
    HelpFormatter helpformatter = new HelpFormatter();
    CommandLineParser parser = new BasicParser();
    try {//from   www  .j  av  a 2s  .c o  m
        CommandLine cmd = parser.parse(helpOptions, args);
        if (cmd.hasOption('h')) {
            helpformatter.printHelp(CLASS_NAME, options);
            System.exit(0);
        }
    } catch (ParseException e1) {
    }

    String input = "";
    String outputpath = "";
    Boolean merged = Boolean.FALSE;
    try {
        CommandLine cmd = parser.parse(options, args);

        if (cmd.hasOption('i')) {
            input = cmd.getOptionValue('i');
            piped = false;
        }
        if (cmd.hasOption('o')) {
            outputpath = cmd.getOptionValue('o');
        }
        if (cmd.hasOption('m')) {
            merged = Boolean.TRUE;
        }
        if (cmd.hasOption('v')) {
            System.out.println("DeDup v" + VERSION);
            System.exit(0);
        }
    } catch (ParseException e) {
        helpformatter.printHelp(CLASS_NAME, options);
        System.err.println(e.getMessage());
        System.exit(0);
    }
    DecimalFormat df = new DecimalFormat("##.##");

    if (piped) {
        RMDupper rmdup = new RMDupper(System.in, System.out, merged);
        rmdup.readSAMFile();

        System.err.println("We are in piping mode!");
        System.err.println("Total reads: " + rmdup.dupStats.total + "\n");
        System.err.println("Reverse removed: " + rmdup.dupStats.removed_reverse + "\n");
        System.err.println("Forward removed: " + rmdup.dupStats.removed_forward + "\n");
        System.err.println("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
        System.err.println("Total removed: " + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_merged
                + rmdup.dupStats.removed_reverse) + "\n");
        if (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_forward
                + rmdup.dupStats.removed_reverse == 0) {
            System.err.println("Duplication Rate: " + df.format(0.00));
        } else {
            System.err.println("Duplication Rate: "
                    + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                            + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
        }

    } else {
        if (outputpath.length() == 0) {
            System.err.println("The output folder has to be specified");
            helpformatter.printHelp(CLASS_NAME, options);
            System.exit(0);
        }

        //Check whether we have a directory as output path, else produce error message and quit!

        File f = new File(outputpath);
        if (!f.isDirectory()) {
            System.err.println("The output folder should be a folder and not a file!");
            System.exit(0);
        }

        File inputFile = new File(input);
        File outputFile = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + "_rmdup.bam");
        File outputlog = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + ".log");
        File outputhist = new File(
                outputpath + "/" + Files.getNameWithoutExtension(inputFile.getAbsolutePath()) + ".hist");

        try {
            FileWriter fw = new FileWriter(outputlog);
            FileWriter histfw = new FileWriter(outputhist);
            BufferedWriter bfw = new BufferedWriter(fw);
            BufferedWriter histbfw = new BufferedWriter(histfw);

            RMDupper rmdup = new RMDupper(inputFile, outputFile, merged);
            rmdup.readSAMFile();
            rmdup.inputSam.close();
            rmdup.outputSam.close();

            bfw.write("Total reads: " + rmdup.dupStats.total + "\n");
            bfw.write("Reverse removed: " + rmdup.dupStats.removed_reverse + "\n");
            bfw.write("Forward removed: " + rmdup.dupStats.removed_forward + "\n");
            bfw.write("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
            bfw.write("Total removed: " + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_merged
                    + rmdup.dupStats.removed_reverse) + "\n");
            bfw.write("Duplication Rate: "
                    + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                            + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
            bfw.flush();
            bfw.close();

            histbfw.write(rmdup.oc.getHistogram());
            histbfw.flush();
            histbfw.close();

            System.out.println("Total reads: " + rmdup.dupStats.total + "\n");
            System.out.println("Unmerged removed: "
                    + (rmdup.dupStats.removed_forward + rmdup.dupStats.removed_reverse) + "\n");
            System.out.println("Merged removed: " + rmdup.dupStats.removed_merged + "\n");
            System.out.println("Total removed: " + (rmdup.dupStats.removed_forward
                    + rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse) + "\n");
            if (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_forward
                    + rmdup.dupStats.removed_reverse == 0) {
                System.out.println("Duplication Rate: " + df.format(0.00));
            } else {
                System.out.println("Duplication Rate: "
                        + df.format((double) (rmdup.dupStats.removed_merged + rmdup.dupStats.removed_reverse
                                + rmdup.dupStats.removed_forward) / (double) rmdup.dupStats.total));
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:com.bericotech.clavin.index.IndexDirectoryBuilder.java

/**
 * Turns a GeoNames gazetteer file into a Lucene index, and adds
 * some supplementary gazetteer records at the end.
 *
 * @param args              not used//from ww w  . ja v a  2 s  . co  m
 * @throws IOException
 */
public static void main(String[] args) throws IOException {
    Options options = getOptions();
    CommandLine cmd = null;
    CommandLineParser parser = new GnuParser();
    try {
        cmd = parser.parse(options, args);
    } catch (ParseException pe) {
        LOG.error(pe.getMessage());
        printHelp(options);
        System.exit(-1);
    }

    if (cmd.hasOption(HELP_OPTION)) {
        printHelp(options);
        System.exit(0);
    }

    String indexPath = cmd.getOptionValue(INDEX_PATH_OPTION, DEFAULT_INDEX_DIRECTORY);
    String[] gazetteerPaths = cmd.getOptionValues(GAZETTEER_FILES_OPTION);
    if (gazetteerPaths == null || gazetteerPaths.length == 0) {
        gazetteerPaths = DEFAULT_GAZETTEER_FILES;
    }
    boolean replaceIndex = cmd.hasOption(REPLACE_INDEX_OPTION);
    boolean fullAncestry = cmd.hasOption(FULL_ANCESTRY_OPTION);

    File idir = new File(indexPath);
    // if the index directory exists, delete it if we are replacing, otherwise
    // exit gracefully
    if (idir.exists()) {
        if (replaceIndex) {
            LOG.info("Replacing index: {}", idir.getAbsolutePath());
            FileUtils.deleteDirectory(idir);
        } else {
            LOG.info("{} exists. Remove the directory and try again.", idir.getAbsolutePath());
            System.exit(-1);
        }
    }

    List<File> gazetteerFiles = new ArrayList<File>();
    for (String gp : gazetteerPaths) {
        File gf = new File(gp);
        if (gf.isFile() && gf.canRead()) {
            gazetteerFiles.add(gf);
        } else {
            LOG.info("Unable to read Gazetteer file: {}", gf.getAbsolutePath());
        }
    }
    if (gazetteerFiles.isEmpty()) {
        LOG.error("No Gazetteer files found.");
        System.exit(-1);
    }

    String altNamesPath = cmd.getOptionValue(ALTERNATE_NAMES_OPTION);
    File altNamesFile = altNamesPath != null ? new File(altNamesPath) : null;
    if (altNamesFile != null && !(altNamesFile.isFile() && altNamesFile.canRead())) {
        LOG.error("Unable to read alternate names file: {}", altNamesPath);
        System.exit(-1);
    }

    new IndexDirectoryBuilder(fullAncestry).buildIndex(idir, gazetteerFiles, altNamesFile);
}

From source file:de.unisb.cs.st.javaslicer.jung.ShowJungGraph.java

public static void main(String[] args) throws InterruptedException {
    Options options = createOptions();/* w w  w  .j  a va  2  s.com*/
    CommandLineParser parser = new GnuParser();
    CommandLine cmdLine;

    try {
        cmdLine = parser.parse(options, args, true);
    } catch (ParseException e) {
        System.err.println("Error parsing the command line arguments: " + e.getMessage());
        return;
    }

    if (cmdLine.hasOption('h')) {
        printHelp(options, System.out);
        System.exit(0);
    }

    String[] additionalArgs = cmdLine.getArgs();
    if (additionalArgs.length != 2) {
        printHelp(options, System.err);
        System.exit(-1);
    }
    File traceFile = new File(additionalArgs[0]);
    String slicingCriterionString = additionalArgs[1];

    Long threadId = null;
    if (cmdLine.hasOption('t')) {
        try {
            threadId = Long.parseLong(cmdLine.getOptionValue('t'));
        } catch (NumberFormatException e) {
            System.err.println("Illegal thread id: " + cmdLine.getOptionValue('t'));
            System.exit(-1);
        }
    }

    TraceResult trace;
    try {
        trace = TraceResult.readFrom(traceFile);
    } catch (IOException e) {
        System.err.format("Could not read the trace file \"%s\": %s%n", traceFile, e);
        System.exit(-1);
        return;
    }

    List<SlicingCriterion> sc = null;
    try {
        sc = StaticSlicingCriterion.parseAll(slicingCriterionString, trace.getReadClasses());
    } catch (IllegalArgumentException e) {
        System.err.println("Error parsing slicing criterion: " + e.getMessage());
        System.exit(-1);
        return;
    }

    List<ThreadId> threads = trace.getThreads();
    if (threads.size() == 0) {
        System.err.println("The trace file contains no tracing information.");
        System.exit(-1);
    }

    ThreadId tracing = null;
    for (ThreadId t : threads) {
        if (threadId == null) {
            if ("main".equals(t.getThreadName())
                    && (tracing == null || t.getJavaThreadId() < tracing.getJavaThreadId()))
                tracing = t;
        } else if (t.getJavaThreadId() == threadId.longValue()) {
            tracing = t;
        }
    }

    if (tracing == null) {
        System.err.println(threadId == null ? "Couldn't find the main thread."
                : "The thread you specified was not found.");
        System.exit(-1);
        return;
    }

    Transformer<InstructionInstance, Object> transformer;
    Transformer<Object, String> vertexLabelTransformer;
    Transformer<Object, String> vertexTooltipTransformer;

    String granularity = cmdLine.getOptionValue("granularity");
    if (granularity == null || "instance".equals(granularity)) {
        transformer = new Transformer<InstructionInstance, Object>() {
            @Override
            public InstructionInstance transform(InstructionInstance inst) {
                return inst;
            }
        };
        vertexLabelTransformer = new Transformer<Object, String>() {
            @Override
            public String transform(Object inst) {
                return getShortInstructionText(((InstructionInstance) inst).getInstruction());
            }
        };
        vertexTooltipTransformer = new Transformer<Object, String>() {
            @Override
            public String transform(Object inst) {
                return getInstructionTooltip(((InstructionInstance) inst).getInstruction());
            }
        };
    } else if ("instruction".equals(granularity)) {
        transformer = new Transformer<InstructionInstance, Object>() {
            @Override
            public Instruction transform(InstructionInstance inst) {
                return inst.getInstruction();
            }
        };
        vertexLabelTransformer = new Transformer<Object, String>() {
            @Override
            public String transform(Object inst) {
                return getShortInstructionText(((Instruction) inst));
            }
        };
        vertexTooltipTransformer = new Transformer<Object, String>() {
            @Override
            public String transform(Object inst) {
                return getInstructionTooltip(((Instruction) inst));
            }
        };
    } else if ("line".equals(granularity)) {
        transformer = new Transformer<InstructionInstance, Object>() {
            @Override
            public Line transform(InstructionInstance inst) {
                return new Line(inst.getInstruction().getMethod(), inst.getInstruction().getLineNumber());
            }
        };
        vertexLabelTransformer = new Transformer<Object, String>() {
            @Override
            public String transform(Object inst) {
                Line line = (Line) inst;
                return line.getMethod().getName() + ":" + line.getLineNr();
            }
        };
        vertexTooltipTransformer = new Transformer<Object, String>() {
            @Override
            public String transform(Object inst) {
                Line line = (Line) inst;
                return "Line " + line.getLineNr() + " in method " + line.getMethod().getReadClass().getName()
                        + "." + line.getMethod();
            }
        };
    } else {
        System.err.println("Illegal granularity specification: " + granularity);
        System.exit(-1);
        return;
    }

    int maxLevel = Integer.MAX_VALUE;
    if (cmdLine.hasOption("maxlevel")) {
        try {
            maxLevel = Integer.parseInt(cmdLine.getOptionValue("maxlevel"));
        } catch (NumberFormatException e) {
            System.err.println("Argument to \"maxlevel\" must be an integer.");
            System.exit(-1);
            return;
        }
    }

    long startTime = System.nanoTime();
    ShowJungGraph<Object> showGraph = new ShowJungGraph<Object>(trace, transformer);
    showGraph.setMaxLevel(maxLevel);
    showGraph.setVertexLabelTransformer(vertexLabelTransformer);
    showGraph.setVertexTooltipTransformer(vertexTooltipTransformer);
    if (cmdLine.hasOption("progress"))
        showGraph.addProgressMonitor(new ConsoleProgressMonitor());
    boolean multithreaded;
    if (cmdLine.hasOption("multithreaded")) {
        String multithreadedStr = cmdLine.getOptionValue("multithreaded");
        multithreaded = ("1".equals(multithreadedStr) || "true".equals(multithreadedStr));
    } else {
        multithreaded = Runtime.getRuntime().availableProcessors() > 1;
    }

    DirectedGraph<Object, SliceEdge<Object>> graph = showGraph.getGraph(tracing, sc, multithreaded);
    long endTime = System.nanoTime();

    System.out.format((Locale) null, "%nSlice graph consists of %d nodes.%n", graph.getVertexCount());
    System.out.format((Locale) null, "Computation took %.2f seconds.%n", 1e-9 * (endTime - startTime));

    showGraph.displayGraph(graph);
}