Example usage for java.io File getAbsolutePath

List of usage examples for java.io File getAbsolutePath

Introduction

In this page you can find the example usage for java.io File getAbsolutePath.

Prototype

public String getAbsolutePath() 

Source Link

Document

Returns the absolute pathname string of this abstract pathname.

Usage

From source file:de.unileipzig.ub.indexer.App.java

public static void main(String[] args) throws IOException {

    // create Options object
    Options options = new Options();

    options.addOption("h", "help", false, "display this help");

    options.addOption("f", "filename", true, "name of the JSON file whose content should be indexed");
    options.addOption("i", "index", true, "the name of the target index");
    options.addOption("d", "doctype", true, "the name of the doctype (title, local, ...)");

    options.addOption("t", "host", true, "elasticsearch hostname (default: 0.0.0.0)");
    options.addOption("p", "port", true, "transport port (that's NOT the http port, default: 9300)");
    options.addOption("c", "cluster", true, "cluster name (default: elasticsearch_mdma)");

    options.addOption("b", "bulksize", true, "number of docs sent in one request (default: 3000)");
    options.addOption("v", "verbose", false, "show processing speed while indexing");
    options.addOption("s", "status", false, "only show status of index for file");

    options.addOption("r", "repair", false, "attempt to repair recoverable inconsistencies on the go");
    options.addOption("e", "debug", false, "set logging level to debug");
    options.addOption("l", "logfile", true, "logfile - in not specified only log to stdout");

    options.addOption("m", "memcached", true, "host and port of memcached (default: localhost:11211)");
    options.addOption("z", "latest-flag-on", true,
            "enable latest flag according to field (within content, e.g. 001)");
    options.addOption("a", "flat", false, "flat-mode: do not check for inconsistencies");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;/*from w w  w. j a va2  s.  co m*/

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException ex) {
        logger.error(ex);
        System.exit(1);
    }

    // setup logging
    Properties systemProperties = System.getProperties();
    systemProperties.put("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.Log4JLogger");
    System.setProperties(systemProperties);
    Logger.getLogger("net.spy.memcached").setLevel(Level.ERROR);

    Properties props = new Properties();
    props.load(props.getClass().getResourceAsStream("/log4j.properties"));

    if (cmd.hasOption("debug")) {
        props.setProperty("log4j.logger.de.unileipzig", "DEBUG");
    }

    if (cmd.hasOption("logfile")) {
        props.setProperty("log4j.rootLogger", "INFO, stdout, F");
        props.setProperty("log4j.appender.F", "org.apache.log4j.FileAppender");
        props.setProperty("log4j.appender.F.File", cmd.getOptionValue("logfile"));
        props.setProperty("log4j.appender.F.layout", "org.apache.log4j.PatternLayout");
        props.setProperty("log4j.appender.F.layout.ConversionPattern", "%5p | %d | %F | %L | %m%n");
    }

    PropertyConfigurator.configure(props);

    InetAddress addr = InetAddress.getLocalHost();
    String memcachedHostAndPort = addr.getHostAddress() + ":11211";
    if (cmd.hasOption("m")) {
        memcachedHostAndPort = cmd.getOptionValue("m");
    }

    // setup caching
    try {
        if (memcachedClient == null) {
            memcachedClient = new MemcachedClient(
                    new ConnectionFactoryBuilder().setFailureMode(FailureMode.Cancel).build(),
                    AddrUtil.getAddresses("0.0.0.0:11211"));
            try {
                // give client and server 500ms
                Thread.sleep(300);
            } catch (InterruptedException ex) {
            }

            Collection availableServers = memcachedClient.getAvailableServers();
            logger.info(availableServers);
            if (availableServers.size() == 0) {
                logger.info("no memcached servers found");
                memcachedClient.shutdown();
                memcachedClient = null;
            } else {
                logger.info(availableServers.size() + " memcached server(s) detected, fine.");
            }
        }
    } catch (IOException ex) {
        logger.warn("couldn't create a connection, bailing out: " + ex.getMessage());
    }

    // process options

    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("indexer", options, true);
        quit(0);
    }

    boolean verbose = false;
    if (cmd.hasOption("verbose")) {
        verbose = true;
    }

    // ES options
    String[] hosts = new String[] { "0.0.0.0" };
    int port = 9300;
    String clusterName = "elasticsearch_mdma";
    int bulkSize = 3000;

    if (cmd.hasOption("host")) {
        hosts = cmd.getOptionValues("host");
    }
    if (cmd.hasOption("port")) {
        port = Integer.parseInt(cmd.getOptionValue("port"));
    }
    if (cmd.hasOption("cluster")) {
        clusterName = cmd.getOptionValue("cluster");
    }
    if (cmd.hasOption("bulksize")) {
        bulkSize = Integer.parseInt(cmd.getOptionValue("bulksize"));
        if (bulkSize < 1 || bulkSize > 100000) {
            logger.error("bulksize must be between 1 and 100,000");
            quit(1);
        }
    }

    // ES Client
    final Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "elasticsearch_mdma")
            .build();
    final TransportClient client = new TransportClient(settings);
    for (String host : hosts) {
        client.addTransportAddress(new InetSocketTransportAddress(host, port));
    }

    if (cmd.hasOption("filename") && cmd.hasOption("index") && cmd.hasOption("doctype")) {

        final String filename = cmd.getOptionValue("filename");

        final File _file = new File(filename);
        if (_file.length() == 0) {
            logger.info(_file.getAbsolutePath() + " is empty, skipping");
            quit(0); // file is empty
        }

        // for flat mode: leave a stampfile beside the json to 
        // indicate previous successful processing
        File directory = new File(filename).getParentFile();
        File stampfile = new File(directory, DigestUtils.shaHex(filename) + ".indexed");

        long start = System.currentTimeMillis();
        long lineCount = 0;

        final String indexName = cmd.getOptionValue("index");
        final String docType = cmd.getOptionValue("doctype");
        BulkRequestBuilder bulkRequest = client.prepareBulk();

        try {
            if (cmd.hasOption("flat")) {
                // flat mode
                // .........
                if (stampfile.exists()) {
                    logger.info("SKIPPING, since it seems this file has already " + "been imported (found: "
                            + stampfile.getAbsolutePath() + ")");
                    quit(0);
                }
            } else {

                final String srcSHA1 = extractSrcSHA1(filename);

                logger.debug(filename + " srcsha1: " + srcSHA1);

                long docsInIndex = getIndexedRecordCount(client, indexName, srcSHA1);
                logger.debug(filename + " indexed: " + docsInIndex);

                long docsInFile = getLineCount(filename);
                logger.debug(filename + " lines: " + docsInFile);

                // in non-flat-mode, indexing would take care
                // of inconsistencies
                if (docsInIndex == docsInFile) {
                    logger.info("UP-TO DATE: " + filename + " (" + docsInIndex + ", " + srcSHA1 + ")");
                    client.close();
                    quit(0);
                }

                if (docsInIndex > 0) {
                    logger.warn("INCONSISTENCY DETECTED: " + filename + ": indexed:" + docsInIndex + " lines:"
                            + docsInFile);

                    if (!cmd.hasOption("r")) {
                        logger.warn(
                                "Please re-run indexer with --repair flag or delete residues first with: $ curl -XDELETE "
                                        + hosts[0] + ":9200/" + indexName
                                        + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1
                                        + "\" }}'");
                        client.close();
                        quit(1);
                    } else {
                        logger.info("Attempting to clear residues...");
                        // attempt to repair once
                        DeleteByQueryResponse dbqr = client.prepareDeleteByQuery(indexName)
                                .setQuery(termQuery("meta.srcsha1", srcSHA1)).execute().actionGet();

                        Iterator<IndexDeleteByQueryResponse> it = dbqr.iterator();
                        long deletions = 0;
                        while (it.hasNext()) {
                            IndexDeleteByQueryResponse response = it.next();
                            deletions += 1;
                        }
                        logger.info("Deleted residues of " + filename);
                        logger.info("Refreshing [" + indexName + "]");
                        RefreshResponse refreshResponse = client.admin().indices()
                                .refresh(new RefreshRequest(indexName)).actionGet();

                        long indexedAfterDelete = getIndexedRecordCount(client, indexName, srcSHA1);
                        logger.info(indexedAfterDelete + " docs remained");
                        if (indexedAfterDelete > 0) {
                            logger.warn("Not all residues cleaned. Try to fix this manually: $ curl -XDELETE "
                                    + hosts[0] + ":9200/" + indexName
                                    + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'");
                            quit(1);
                        } else {
                            logger.info("Residues are gone. Now trying to reindex: " + filename);
                        }
                    }
                }
            }

            logger.info("INDEXING-REQUIRED: " + filename);
            if (cmd.hasOption("status")) {
                quit(0);
            }

            HashSet idsInBatch = new HashSet();

            String idField = null;
            if (cmd.hasOption("z")) {
                idField = cmd.getOptionValue("z");
            }

            final FileReader fr = new FileReader(filename);
            final BufferedReader br = new BufferedReader(fr);

            String line;
            // one line is one document
            while ((line = br.readLine()) != null) {

                // "Latest-Flag" machine
                // This gets obsolete with a "flat" index
                if (cmd.hasOption("z")) {
                    // flag that indicates, whether the document
                    // about to be indexed will be the latest
                    boolean willBeLatest = true;

                    // check if there is a previous (lower meta.timestamp) document with 
                    // the same identifier (whatever that may be - queried under "content")
                    final String contentIdentifier = getContentIdentifier(line, idField);
                    idsInBatch.add(contentIdentifier);

                    // assumed in meta.timestamp
                    final Long timestamp = Long.parseLong(getTimestamp(line));

                    logger.debug("Checking whether record is latest (line: " + lineCount + ")");
                    logger.debug(contentIdentifier + ", " + timestamp);

                    // get all docs, which match the contentIdentifier
                    // by filter, which doesn't score
                    final TermFilterBuilder idFilter = new TermFilterBuilder("content." + idField,
                            contentIdentifier);
                    final TermFilterBuilder kindFilter = new TermFilterBuilder("meta.kind", docType);
                    final AndFilterBuilder afb = new AndFilterBuilder();
                    afb.add(idFilter).add(kindFilter);
                    final FilteredQueryBuilder fb = filteredQuery(matchAllQuery(), afb);

                    final SearchResponse searchResponse = client.prepareSearch(indexName)
                            .setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(fb).setFrom(0)
                            .setSize(1200) // 3 years and 105 days assuming daily updates at the most
                            .setExplain(false).execute().actionGet();

                    final SearchHits searchHits = searchResponse.getHits();

                    logger.debug("docs with this id in the index: " + searchHits.getTotalHits());

                    for (final SearchHit hit : searchHits.getHits()) {
                        final String docId = hit.id();
                        final Map<String, Object> source = hit.sourceAsMap();
                        final Map meta = (Map) source.get("meta");
                        final Long docTimestamp = Long.parseLong(meta.get("timestamp").toString());
                        // if the indexed doc timestamp is lower the the current one, 
                        // remove any latest flag
                        if (timestamp >= docTimestamp) {
                            source.remove("latest");
                            final ObjectMapper mapper = new ObjectMapper();
                            // put the updated doc back
                            // IndexResponse response = 
                            client.prepareIndex(indexName, docType).setCreate(false).setId(docId)
                                    .setSource(mapper.writeValueAsBytes(source))
                                    .execute(new ActionListener<IndexResponse>() {
                                        public void onResponse(IndexResponse rspns) {
                                            logger.debug("Removed latest flag from " + contentIdentifier + ", "
                                                    + docTimestamp + ", " + hit.id() + " since (" + timestamp
                                                    + " > " + docTimestamp + ")");
                                        }

                                        public void onFailure(Throwable thrwbl) {
                                            logger.error("Could not remove flag from " + hit.id() + ", "
                                                    + contentIdentifier);
                                        }
                                    });
                            // .execute()
                            //.actionGet();
                        } else {
                            logger.debug("Doc " + hit.id() + " is newer (" + docTimestamp + ")");
                            willBeLatest = false;
                        }
                    }

                    if (willBeLatest) {
                        line = setLatestFlag(line);
                        logger.info("Setting latest flag on " + contentIdentifier + ", " + timestamp);
                    }

                    // end of latest-flag machine
                    // beware - this will be correct as long as there
                    // are no dups within one bulk!
                }

                bulkRequest.add(client.prepareIndex(indexName, docType).setSource(line));
                lineCount++;
                logger.debug("Added line " + lineCount + " to BULK");
                logger.debug(line);

                if (lineCount % bulkSize == 0) {

                    if (idsInBatch.size() != bulkSize && cmd.hasOption("z")) {
                        logger.error(
                                "This batch has duplications in the ID. That's not bad for the index, just makes the latest flag fuzzy");
                        logger.error(
                                "Bulk size was: " + bulkSize + ", but " + idsInBatch.size() + " IDs (only)");
                    }
                    idsInBatch.clear();

                    logger.debug("Issuing BULK request");

                    final long actionCount = bulkRequest.numberOfActions();
                    final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                    final long tookInMillis = bulkResponse.getTookInMillis();

                    if (bulkResponse.hasFailures()) {
                        logger.fatal("FAILED, bulk not indexed. exiting now.");
                        Iterator<BulkItemResponse> it = bulkResponse.iterator();
                        while (it.hasNext()) {
                            BulkItemResponse bir = it.next();
                            if (bir.isFailed()) {
                                Failure failure = bir.getFailure();
                                logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                        + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                            }
                        }
                        quit(1);
                    } else {
                        if (verbose) {
                            final double elapsed = System.currentTimeMillis() - start;
                            final double speed = (lineCount / elapsed * 1000);
                            logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount
                                    + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                        }
                    }
                    bulkRequest = client.prepareBulk();
                }
            }

            // handle the remaining items
            final long actionCount = bulkRequest.numberOfActions();
            if (actionCount > 0) {
                final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                final long tookInMillis = bulkResponse.getTookInMillis();

                if (bulkResponse.hasFailures()) {
                    logger.fatal("FAILED, bulk not indexed. exiting now.");
                    Iterator<BulkItemResponse> it = bulkResponse.iterator();
                    while (it.hasNext()) {
                        BulkItemResponse bir = it.next();
                        if (bir.isFailed()) {
                            Failure failure = bir.getFailure();
                            logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                    + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                        }
                    }
                    quit(1);
                } else {

                    // trigger update now
                    RefreshResponse refreshResponse = client.admin().indices()
                            .refresh(new RefreshRequest(indexName)).actionGet();

                    if (verbose) {
                        final double elapsed = System.currentTimeMillis() - start;
                        final double speed = (lineCount / elapsed * 1000);
                        logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/"
                                + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                    }

                }

            }

            br.close();
            client.close();
            final double elapsed = (System.currentTimeMillis() - start) / 1000;
            final double speed = (lineCount / elapsed);
            logger.info("indexing (" + filename + ") " + lineCount + " docs took " + elapsed + "s (speed: "
                    + String.format("%.2f", speed) + "r/s)");
            if (cmd.hasOption("flat")) {
                try {
                    FileUtils.touch(stampfile);
                } catch (IOException ioe) {
                    logger.warn(".indexed files not created. Will reindex everything everytime.");
                }
            }
        } catch (IOException e) {
            client.close();
            logger.error(e);
            quit(1);
        } finally {
            client.close();
        }
    }
    quit(0);
}

From source file:edu.isi.karma.research.modeling.ModelLearner_LOD.java

public static void main(String[] args) throws Exception {

    ServletContextParameterMap contextParameters = ContextParametersRegistry.getInstance().getDefault();
    contextParameters.setParameterValue(ContextParameter.USER_CONFIG_DIRECTORY, "/Users/mohsen/karma/config");

    OntologyManager ontologyManager = new OntologyManager(contextParameters.getId());
    File ff = new File(Params.ONTOLOGY_DIR);
    File[] files = ff.listFiles();
    if (files == null) {
        logger.error("no ontology to import at " + ff.getAbsolutePath());
        return;//w  w  w.  ja  v a  2 s .  c o m
    }

    for (File f : files) {
        if (f.getName().endsWith(".owl") || f.getName().endsWith(".rdf") || f.getName().endsWith(".n3")
                || f.getName().endsWith(".ttl") || f.getName().endsWith(".xml")) {
            logger.info("Loading ontology file: " + f.getAbsolutePath());
            ontologyManager.doImport(f, "UTF-8");
        }
    }
    ontologyManager.updateCache();

    String outputPath = Params.OUTPUT_DIR;
    String graphPath = Params.GRAPHS_DIR;

    FileUtils.cleanDirectory(new File(graphPath));

    List<SemanticModel> semanticModels = ModelReader.importSemanticModelsFromJsonFiles(Params.MODEL_DIR,
            Params.MODEL_MAIN_FILE_EXT);

    ModelLearner_LOD modelLearner = null;

    boolean onlyGenerateSemanticTypeStatistics = false;
    boolean onlyUseOntology = false;
    boolean useCorrectType = false;
    int numberOfCandidates = 4;
    boolean onlyEvaluateInternalLinks = false;
    int maxPatternSize = 3;

    if (onlyGenerateSemanticTypeStatistics) {
        getStatistics(semanticModels);
        return;
    }

    String filePath = Params.RESULTS_DIR + "temp/";
    String filename = "";

    filename += "lod-results";
    filename += useCorrectType ? "-correct" : "-k=" + numberOfCandidates;
    filename += onlyUseOntology ? "-ontology" : "-p" + maxPatternSize;
    filename += onlyEvaluateInternalLinks ? "-internal" : "-all";
    filename += ".csv";

    PrintWriter resultFile = new PrintWriter(new File(filePath + filename));

    resultFile.println("source \t p \t r \t t \n");

    for (int i = 0; i < semanticModels.size(); i++) {
        //      for (int i = 0; i <= 10; i++) {
        //      int i = 1; {

        int newSourceIndex = i;
        SemanticModel newSource = semanticModels.get(newSourceIndex);

        logger.info("======================================================");
        logger.info(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")");
        System.out.println(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")");
        logger.info("======================================================");

        SemanticModel correctModel = newSource;
        List<ColumnNode> columnNodes = correctModel.getColumnNodes();

        List<Node> steinerNodes = new LinkedList<Node>(columnNodes);

        String graphName = graphPath + "lod" + Params.GRAPH_FILE_EXT;

        if (onlyUseOntology) {
            modelLearner = new ModelLearner_LOD(new GraphBuilder(ontologyManager, false), steinerNodes);
        } else if (new File(graphName).exists()) {
            // read graph from file
            try {
                logger.info("loading the graph ...");
                DirectedWeightedMultigraph<Node, DefaultLink> graph = GraphUtil.importJson(graphName);
                modelLearner = new ModelLearner_LOD(new GraphBuilderTopK(ontologyManager, graph), steinerNodes);
            } catch (Exception e) {
                e.printStackTrace();
                resultFile.close();
                return;
            }
        } else {
            logger.info("building the graph ...");
            // create and save the graph to file
            //            GraphBuilder_Popularity b = new GraphBuilder_Popularity(ontologyManager, 
            //                  Params.LOD_OBJECT_PROPERIES_FILE, 
            //                  Params.LOD_DATA_PROPERIES_FILE);
            GraphBuilder_LOD_Pattern b = new GraphBuilder_LOD_Pattern(ontologyManager, Params.PATTERNS_DIR,
                    maxPatternSize);
            modelLearner = new ModelLearner_LOD(b.getGraphBuilder(), steinerNodes);
        }

        long start = System.currentTimeMillis();

        List<SortableSemanticModel> hypothesisList = modelLearner.hypothesize(useCorrectType,
                numberOfCandidates);

        long elapsedTimeMillis = System.currentTimeMillis() - start;
        float elapsedTimeSec = elapsedTimeMillis / 1000F;

        List<SortableSemanticModel> topHypotheses = null;
        if (hypothesisList != null) {

            //            for (SortableSemanticModel sss : hypothesisList) {
            //               ModelEvaluation mmm = sss.evaluate(correctModel);
            //               System.out.println(mmm.getPrecision() + ", " + mmm.getRecall());
            //            }
            topHypotheses = hypothesisList.size() > 10 ? hypothesisList.subList(0, 10) : hypothesisList;
        }

        Map<String, SemanticModel> models = new TreeMap<String, SemanticModel>();

        ModelEvaluation me;
        models.put("1-correct model", correctModel);
        if (topHypotheses != null)
            for (int k = 0; k < topHypotheses.size(); k++) {

                SortableSemanticModel m = topHypotheses.get(k);

                me = m.evaluate(correctModel, onlyEvaluateInternalLinks, false);

                String label = "candidate " + k + "\n" +
                //                     (m.getSteinerNodes() == null ? "" : m.getSteinerNodes().getScoreDetailsString()) +
                        "link coherence:"
                        + (m.getLinkCoherence() == null ? "" : m.getLinkCoherence().getCoherenceValue()) + "\n";
                label += (m.getSteinerNodes() == null || m.getSteinerNodes().getCoherence() == null) ? ""
                        : "node coherence:" + m.getSteinerNodes().getCoherence().getCoherenceValue() + "\n";
                label += "confidence:" + m.getConfidenceScore() + "\n";
                label += m.getSteinerNodes() == null ? ""
                        : "mapping score:" + m.getSteinerNodes().getScore() + "\n";
                label += "cost:" + roundDecimals(m.getCost(), 6) + "\n" +
                //                        "-distance:" + me.getDistance() + 
                        "-precision:" + me.getPrecision() + "-recall:" + me.getRecall();

                models.put(label, m);

                if (k == 0) { // first rank model
                    System.out.println("precision: " + me.getPrecision() + ", recall: " + me.getRecall()
                            + ", time: " + elapsedTimeSec);
                    logger.info("precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: "
                            + elapsedTimeSec);
                    String s = newSource.getName() + "\t" + me.getPrecision() + "\t" + me.getRecall() + "\t"
                            + elapsedTimeSec;
                    resultFile.println(s);

                }
            }

        String outName = outputPath + newSource.getName() + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT;

        GraphVizUtil.exportSemanticModelsToGraphviz(models, newSource.getName(), outName,
                GraphVizLabelType.LocalId, GraphVizLabelType.LocalUri, true, true);

    }

    resultFile.close();

}

From source file:de.prozesskraft.ptest.Fingerprint.java

public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException {

    //      try//  w w w  . j  a v a2 s.  co  m
    //      {
    //         if (args.length != 3)
    //         {
    //            System.out.println("Please specify processdefinition file (xml) and an outputfilename");
    //         }
    //         
    //      }
    //      catch (ArrayIndexOutOfBoundsException e)
    //      {
    //         System.out.println("***ArrayIndexOutOfBoundsException: Please specify processdefinition.xml, openoffice_template.od*, newfile_for_processdefinitions.odt\n" + e.toString());
    //      }

    /*----------------------------
      get options from ini-file
    ----------------------------*/
    File inifile = new java.io.File(
            WhereAmI.getInstallDirectoryAbsolutePath(Fingerprint.class) + "/" + "../etc/ptest-fingerprint.ini");

    if (inifile.exists()) {
        try {
            ini = new Ini(inifile);
        } catch (InvalidFileFormatException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    } else {
        System.err.println("ini file does not exist: " + inifile.getAbsolutePath());
        System.exit(1);
    }

    /*----------------------------
      create boolean options
    ----------------------------*/
    Option ohelp = new Option("help", "print this message");
    Option ov = new Option("v", "prints version and build-date");

    /*----------------------------
      create argument options
    ----------------------------*/
    Option opath = OptionBuilder.withArgName("PATH").hasArg()
            .withDescription(
                    "[mandatory; default: .] the root path for the tree you want to make a fingerprint from.")
            //            .isRequired()
            .create("path");

    Option osizetol = OptionBuilder.withArgName("FLOAT").hasArg().withDescription(
            "[optional; default: 0.02] the sizeTolerance (as factor in percent) of all file entries will be set to this value. [0.0 < sizetol < 1.0]")
            //            .isRequired()
            .create("sizetol");

    Option omd5 = OptionBuilder.withArgName("no|yes").hasArg()
            .withDescription("[optional; default: yes] should be the md5sum of files determined? no|yes")
            //            .isRequired()
            .create("md5");

    Option oignore = OptionBuilder.withArgName("STRING").hasArgs()
            .withDescription("[optional] path-pattern that should be ignored when creating the fingerprint")
            //            .isRequired()
            .create("ignore");

    Option oignorefile = OptionBuilder.withArgName("FILE").hasArg().withDescription(
            "[optional] file with path-patterns (one per line) that should be ignored when creating the fingerprint")
            //            .isRequired()
            .create("ignorefile");

    Option ooutput = OptionBuilder.withArgName("FILE").hasArg()
            .withDescription("[mandatory; default: <path>/fingerprint.xml] fingerprint file")
            //            .isRequired()
            .create("output");

    Option of = new Option("f", "[optional] force overwrite fingerprint file if it already exists");

    /*----------------------------
      create options object
    ----------------------------*/
    Options options = new Options();

    options.addOption(ohelp);
    options.addOption(ov);
    options.addOption(opath);
    options.addOption(osizetol);
    options.addOption(omd5);
    options.addOption(oignore);
    options.addOption(oignorefile);
    options.addOption(ooutput);
    options.addOption(of);

    /*----------------------------
      create the parser
    ----------------------------*/
    CommandLineParser parser = new GnuParser();
    try {
        // parse the command line arguments
        commandline = parser.parse(options, args);

    } catch (Exception exp) {
        // oops, something went wrong
        System.err.println("Parsing failed. Reason: " + exp.getMessage());
        exiter();
    }

    /*----------------------------
      usage/help
    ----------------------------*/
    if (commandline.hasOption("help")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("fingerprint", options);
        System.exit(0);
    }

    else if (commandline.hasOption("v")) {
        System.out.println("web:     " + web);
        System.out.println("author: " + author);
        System.out.println("version:" + version);
        System.out.println("date:     " + date);
        System.exit(0);
    }

    /*----------------------------
      ueberpruefen ob eine schlechte kombination von parametern angegeben wurde
    ----------------------------*/
    String path = "";
    String sizetol = "";
    boolean md5 = false;
    Float sizetolFloat = null;
    String output = "";
    java.io.File ignorefile = null;
    ArrayList<String> ignore = new ArrayList<String>();

    if (!(commandline.hasOption("path"))) {
        System.err.println("setting default for -path=.");
        path = ".";
    } else {
        path = commandline.getOptionValue("path");
    }

    if (!(commandline.hasOption("sizetol"))) {
        System.err.println("setting default for -sizetol=0.02");
        sizetol = "0.02";
        sizetolFloat = 0.02F;
    } else {
        sizetol = commandline.getOptionValue("sizetol");
        sizetolFloat = Float.parseFloat(sizetol);

        if ((sizetolFloat > 1) || (sizetolFloat < 0)) {
            System.err.println("use only values >=0.0 and <1.0 for -sizetol");
            System.exit(1);
        }
    }

    if (!(commandline.hasOption("md5"))) {
        System.err.println("setting default for -md5=yes");
        md5 = true;
    } else if (commandline.getOptionValue("md5").equals("no")) {
        md5 = false;
    } else if (commandline.getOptionValue("md5").equals("yes")) {
        md5 = true;
    } else {
        System.err.println("use only values no|yes for -md5");
        System.exit(1);
    }

    if (commandline.hasOption("ignore")) {
        ignore.addAll(Arrays.asList(commandline.getOptionValues("ignore")));
    }

    if (commandline.hasOption("ignorefile")) {
        ignorefile = new java.io.File(commandline.getOptionValue("ignorefile"));
        if (!ignorefile.exists()) {
            System.err.println("warn: ignore file does not exist: " + ignorefile.getCanonicalPath());
        }
    }

    if (!(commandline.hasOption("output"))) {
        System.err.println("setting default for -output=" + path + "/fingerprint.xml");
        output = path + "/fingerprint.xml";
    } else {
        output = commandline.getOptionValue("output");
    }

    // wenn output bereits existiert -> abbruch
    java.io.File outputFile = new File(output);
    if (outputFile.exists()) {
        if (commandline.hasOption("f")) {
            outputFile.delete();
        } else {
            System.err
                    .println("error: output file (" + output + ") already exists. use -f to force overwrite.");
            System.exit(1);
        }
    }

    //      if ( !( commandline.hasOption("output")) )
    //      {
    //         System.err.println("option -output is mandatory.");
    //         exiter();
    //      }

    /*----------------------------
      die lizenz ueberpruefen und ggf abbrechen
    ----------------------------*/

    // check for valid license
    ArrayList<String> allPortAtHost = new ArrayList<String>();
    allPortAtHost.add(ini.get("license-server", "license-server-1"));
    allPortAtHost.add(ini.get("license-server", "license-server-2"));
    allPortAtHost.add(ini.get("license-server", "license-server-3"));

    MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1");

    // lizenz-logging ausgeben
    for (String actLine : (ArrayList<String>) lic.getLog()) {
        System.err.println(actLine);
    }

    // abbruch, wenn lizenz nicht valide
    if (!lic.isValid()) {
        System.exit(1);
    }

    /*----------------------------
      die eigentliche business logic
    ----------------------------*/
    Dir dir = new Dir();
    dir.setBasepath(path);
    dir.setOutfilexml(output);

    // ignore file in ein Array lesen
    if ((ignorefile != null) && (ignorefile.exists())) {
        Scanner sc = new Scanner(ignorefile);
        while (sc.hasNextLine()) {
            ignore.add(sc.nextLine());
        }
        sc.close();
    }

    //      // autoignore hinzufuegen
    //      String autoIgnoreString = ini.get("autoignore", "autoignore");
    //      ignoreLines.addAll(Arrays.asList(autoIgnoreString.split(",")));

    //      // debug
    //      System.out.println("ignorefile content:");
    //      for(String actLine : ignore)
    //      {
    //         System.out.println("line: "+actLine);
    //      }

    try {
        dir.genFingerprint(sizetolFloat, md5, ignore);
    } catch (NullPointerException e) {
        System.err.println("file/dir does not exist " + path);
        e.printStackTrace();
        exiter();
    } catch (IOException e) {
        e.printStackTrace();
        exiter();
    }

    System.out.println("writing to file: " + dir.getOutfilexml());
    dir.writeXml();

}

From source file:de.prozesskraft.ptest.Compare.java

public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException {

    //      try/*from   ww  w .  j a va 2 s  .c o  m*/
    //      {
    //         if (args.length != 3)
    //         {
    //            System.out.println("Please specify processdefinition file (xml) and an outputfilename");
    //         }
    //         
    //      }
    //      catch (ArrayIndexOutOfBoundsException e)
    //      {
    //         System.out.println("***ArrayIndexOutOfBoundsException: Please specify processdefinition.xml, openoffice_template.od*, newfile_for_processdefinitions.odt\n" + e.toString());
    //      }

    /*----------------------------
      get options from ini-file
    ----------------------------*/
    File inifile = new java.io.File(
            WhereAmI.getInstallDirectoryAbsolutePath(Compare.class) + "/" + "../etc/ptest-compare.ini");

    if (inifile.exists()) {
        try {
            ini = new Ini(inifile);
        } catch (InvalidFileFormatException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    } else {
        System.err.println("ini file does not exist: " + inifile.getAbsolutePath());
        System.exit(1);
    }

    /*----------------------------
      create boolean options
    ----------------------------*/
    Option ohelp = new Option("help", "print this message");
    Option ov = new Option("v", "prints version and build-date");

    /*----------------------------
      create argument options
    ----------------------------*/
    Option oref = OptionBuilder.withArgName("PATH").hasArg()
            .withDescription("[mandatory] directory or fingerprint, that the --exam will be checked against")
            //            .isRequired()
            .create("ref");

    Option oexam = OptionBuilder.withArgName("PATH").hasArg().withDescription(
            "[optional; default: parent directory of -ref] directory or fingerprint, that will be checked against --ref")
            //            .isRequired()
            .create("exam");

    Option oresult = OptionBuilder.withArgName("FILE").hasArg().withDescription(
            "[mandatory; default: result.txt] the result (success|failed) of the comparison will be printed to this file")
            //            .isRequired()
            .create("result");

    Option osummary = OptionBuilder.withArgName("all|error|debug").hasArg().withDescription(
            "[optional] 'error' prints a summary reduced to failed matches. 'all' prints a full summary. 'debug' is like 'all' plus debug statements")
            //            .isRequired()
            .create("summary");

    Option omd5 = OptionBuilder.withArgName("no|yes").hasArg()
            .withDescription("[optional; default: yes] to ignore md5 information in comparison use -md5=no")
            //            .isRequired()
            .create("md5");

    /*----------------------------
      create options object
    ----------------------------*/
    Options options = new Options();

    options.addOption(ohelp);
    options.addOption(ov);
    options.addOption(oref);
    options.addOption(oexam);
    options.addOption(oresult);
    options.addOption(osummary);
    options.addOption(omd5);

    /*----------------------------
      create the parser
    ----------------------------*/
    CommandLineParser parser = new GnuParser();
    try {
        // parse the command line arguments
        commandline = parser.parse(options, args);

    } catch (Exception exp) {
        // oops, something went wrong
        System.err.println("Parsing failed. Reason: " + exp.getMessage());
        exiter();
    }

    /*----------------------------
      usage/help
    ----------------------------*/
    if (commandline.hasOption("help")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("compare", options);
        System.exit(0);
    }

    else if (commandline.hasOption("v")) {
        System.err.println("web:     " + web);
        System.err.println("author: " + author);
        System.err.println("version:" + version);
        System.err.println("date:     " + date);
        System.exit(0);
    }

    /*----------------------------
      ueberpruefen ob eine schlechte kombination von parametern angegeben wurde
    ----------------------------*/
    boolean error = false;
    String result = "";
    boolean md5 = false;
    String ref = null;
    String exam = null;

    if (!(commandline.hasOption("ref"))) {
        System.err.println("option -ref is mandatory");
        error = true;
    } else {
        ref = commandline.getOptionValue("ref");
    }

    if (!(commandline.hasOption("exam"))) {
        java.io.File refFile = new java.io.File(ref).getCanonicalFile();
        java.io.File examFile = refFile.getParentFile();
        exam = examFile.getCanonicalPath();

        System.err.println("setting default: -exam=" + exam);
    } else {
        exam = commandline.getOptionValue("exam");
    }

    if (error) {
        exiter();
    }

    if (!(commandline.hasOption("result"))) {
        System.err.println("setting default: -result=result.txt");
        result = "result.txt";
    }

    if (!(commandline.hasOption("md5"))) {
        System.err.println("setting default: -md5=yes");
        md5 = true;
    } else if (commandline.getOptionValue("md5").equals("no")) {
        md5 = false;
    } else if (commandline.getOptionValue("md5").equals("yes")) {
        md5 = true;
    } else {
        System.err.println("use only values no|yes for -md5");
        System.exit(1);
    }

    /*----------------------------
      die lizenz ueberpruefen und ggf abbrechen
    ----------------------------*/

    // check for valid license
    ArrayList<String> allPortAtHost = new ArrayList<String>();
    allPortAtHost.add(ini.get("license-server", "license-server-1"));
    allPortAtHost.add(ini.get("license-server", "license-server-2"));
    allPortAtHost.add(ini.get("license-server", "license-server-3"));

    MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1");

    // lizenz-logging ausgeben
    for (String actLine : (ArrayList<String>) lic.getLog()) {
        System.err.println(actLine);
    }

    // abbruch, wenn lizenz nicht valide
    if (!lic.isValid()) {
        System.exit(1);
    }

    /*----------------------------
      die eigentliche business logic
    ----------------------------*/

    // einlesen der referenzdaten
    java.io.File refPath = new java.io.File(ref);

    Dir refDir = new Dir();

    // wenn es ein directory ist, muss der fingerprint erzeugt werden
    if (refPath.exists() && refPath.isDirectory()) {
        refDir.setBasepath(refPath.getCanonicalPath());
        refDir.genFingerprint(0f, true, new ArrayList<String>());
        refDir.setRespectMd5Recursive(md5);
        System.err.println("-ref is a directory");
    }
    // wenn es ein fingerprint ist, muss er eingelesen werden
    else if (refPath.exists()) {
        refDir.setInfilexml(refPath.getCanonicalPath());
        System.err.println("-ref is a fingerprint");
        try {
            refDir.readXml();
        } catch (JAXBException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        refDir.setRespectMd5Recursive(md5);
    } else if (!refPath.exists()) {
        System.err.println("-ref does not exist! " + refPath.getAbsolutePath());
        exiter();
    }

    // einlesen der prueflingsdaten
    java.io.File examPath = new java.io.File(exam);

    Dir examDir = new Dir();

    // wenn es ein directory ist, muss der fingerprint erzeugt werden
    if (examPath.exists() && examPath.isDirectory()) {
        examDir.setBasepath(examPath.getCanonicalPath());
        examDir.genFingerprint(0f, true, new ArrayList<String>());
        examDir.setRespectMd5Recursive(md5);
        System.err.println("-exam is a directory");
    }
    // wenn es ein fingerprint ist, muss er eingelesen werden
    else if (examPath.exists()) {
        examDir.setInfilexml(examPath.getCanonicalPath());
        System.err.println("-exam is a fingerprint");
        try {
            examDir.readXml();
        } catch (JAXBException e) {
            System.err.println("error while reading xml");
            e.printStackTrace();
        }
        examDir.setRespectMd5Recursive(md5);
    } else if (!examPath.exists()) {
        System.err.println("-exam does not exist! " + examPath.getAbsolutePath());
        exiter();
    }

    // durchfuehren des vergleichs
    refDir.runCheck(examDir);

    //      if(examDir.isMatchSuccessfullRecursive() && refDir.isMatchSuccessfullRecursive())
    if (refDir.isMatchSuccessfullRecursive()) {
        System.out.println("SUCCESS");
    } else {
        System.out.println("FAILED");
    }

    // printen der csv-ergebnis-tabelle
    if (commandline.hasOption("summary")) {
        if (commandline.getOptionValue("summary").equals("error")) {
            System.err.println("the results of the reference are crucial for result FAILED|SUCCESS");
            System.err.println(refDir.sprintSummaryAsCsv("error"));
            System.err.println(examDir.sprintSummaryAsCsv("error"));
        } else if (commandline.getOptionValue("summary").equals("all")) {
            System.err.println(refDir.sprintSummaryAsCsv("all"));
            System.err.println(examDir.sprintSummaryAsCsv("all"));
        } else if (commandline.getOptionValue("summary").equals("debug")) {
            System.err.println(refDir.sprintSummaryAsCsv("all"));
            System.err.println(examDir.sprintSummaryAsCsv("all"));
            // printen des loggings
            System.err.println("------ logging of reference --------");
            System.err.println(refDir.getLogAsStringRecursive());
            System.err.println("------ logging of examinee --------");
            System.err.println(examDir.getLogAsStringRecursive());
        } else {
            System.err.println("for option -summary you only may use all|error");
            exiter();
        }
    }

}

From source file:de.huxhorn.lilith.Lilith.java

public static void main(String[] args) {
    {//w w  w  .  j a v a2s .co m
        // initialize java.util.logging to use slf4j...
        Handler handler = new Slf4JHandler();
        java.util.logging.Logger rootLogger = java.util.logging.Logger.getLogger("");
        rootLogger.addHandler(handler);
        rootLogger.setLevel(java.util.logging.Level.WARNING);
    }

    StringBuilder appTitle = new StringBuilder();
    appTitle.append(APP_NAME).append(" V").append(APP_VERSION);
    if (APP_SNAPSHOT) {
        // always append timestamp for SNAPSHOT
        appTitle.append(" (").append(APP_TIMESTAMP_DATE).append(")");
    }

    CommandLineArgs cl = new CommandLineArgs();
    JCommander commander = new JCommander(cl);
    Cat cat = new Cat();
    commander.addCommand(Cat.NAME, cat);
    Tail tail = new Tail();
    commander.addCommand(Tail.NAME, tail);
    Filter filter = new Filter();
    commander.addCommand(Filter.NAME, filter);
    Index index = new Index();
    commander.addCommand(Index.NAME, index);
    Md5 md5 = new Md5();
    commander.addCommand(Md5.NAME, md5);
    Help help = new Help();
    commander.addCommand(Help.NAME, help);

    try {
        commander.parse(args);
    } catch (ParameterException ex) {
        printAppInfo(appTitle.toString(), false);
        System.out.println(ex.getMessage() + "\n");
        printHelp(commander);
        System.exit(-1);
    }
    if (cl.verbose) {
        if (!APP_SNAPSHOT) {
            // timestamp is always appended for SNAPSHOT
            // don't append it twice
            appTitle.append(" (").append(APP_TIMESTAMP_DATE).append(")");
        }
        appTitle.append(" - ").append(APP_REVISION);
    }

    String appTitleString = appTitle.toString();
    if (cl.showHelp) {
        printAppInfo(appTitleString, false);
        printHelp(commander);
        System.exit(0);
    }

    String command = commander.getParsedCommand();
    if (!Tail.NAME.equals(command) && !Cat.NAME.equals(command) && !Filter.NAME.equals(command)) // don't print info in case of cat, tail or filter
    {
        printAppInfo(appTitleString, true);
    }

    if (cl.logbackConfig != null) {
        File logbackFile = new File(cl.logbackConfig);
        if (!logbackFile.isFile()) {
            System.out.println(logbackFile.getAbsolutePath() + " is not a valid file.");
            System.exit(-1);
        }
        try {
            initLogbackConfig(logbackFile.toURI().toURL());
        } catch (MalformedURLException e) {
            System.out.println("Failed to convert " + logbackFile.getAbsolutePath() + " to URL. " + e);
            System.exit(-1);
        }
    } else if (cl.verbose) {
        initVerboseLogging();
    }

    if (cl.printBuildTimestamp) {
        System.out.println("Build-Date     : " + APP_TIMESTAMP_DATE);
        System.out.println("Build-Revision : " + APP_REVISION);
        System.out.println("Build-Timestamp: " + APP_TIMESTAMP);
        System.exit(0);
    }

    if (Help.NAME.equals(command)) {
        commander.usage();
        if (help.commands == null || help.commands.size() == 0) {
            commander.usage(Help.NAME);
        } else {
            Map<String, JCommander> commands = commander.getCommands();
            for (String current : help.commands) {
                if (commands.containsKey(current)) {
                    commander.usage(current);
                } else {
                    System.out.println("Unknown command '" + current + "'!");
                }
            }
        }
        System.exit(0);
    }

    if (Md5.NAME.equals(command)) {
        List<String> files = md5.files;
        if (files == null || files.isEmpty()) {
            printHelp(commander);
            System.out.println("Missing files!");
            System.exit(-1);
        }
        boolean error = false;
        for (String current : files) {
            if (!CreateMd5Command.createMd5(new File(current))) {
                error = true;
            }
        }
        if (error) {
            System.exit(-1);
        }
        System.exit(0);
    }

    if (Index.NAME.equals(command)) {
        if (!cl.verbose && cl.logbackConfig == null) {
            initCLILogging();
        }
        List<String> files = index.files;
        if (files == null || files.size() == 0) {
            printHelp(commander);
            System.exit(-1);
        }
        boolean error = false;
        for (String current : files) {
            if (!IndexCommand.indexLogFile(new File(current))) {
                error = true;
            }
        }
        if (error) {
            System.exit(-1);
        }
        System.exit(0);
    }

    if (Cat.NAME.equals(command)) {
        if (!cl.verbose && cl.logbackConfig == null) {
            initCLILogging();
        }
        List<String> files = cat.files;
        if (files == null || files.size() != 1) {
            printHelp(commander);
            System.exit(-1);
        }
        if (CatCommand.catFile(new File(files.get(0)), cat.pattern, cat.numberOfLines)) {
            System.exit(0);
        }
        System.exit(-1);
    }

    if (Tail.NAME.equals(command)) {
        if (!cl.verbose && cl.logbackConfig == null) {
            initCLILogging();
        }
        List<String> files = tail.files;
        if (files == null || files.size() != 1) {
            printHelp(commander);
            System.exit(-1);
        }
        if (TailCommand.tailFile(new File(files.get(0)), tail.pattern, tail.numberOfLines, tail.keepRunning)) {
            System.exit(0);
        }
        System.exit(-1);
    }

    if (Filter.NAME.equals(command)) {
        if (!cl.verbose && cl.logbackConfig == null) {
            initCLILogging();
        }
        if (FilterCommand.filterFile(new File(filter.input), new File(filter.output),
                new File(filter.condition), filter.searchString, filter.pattern, filter.overwrite,
                filter.keepRunning, filter.exclusive)) {
            System.exit(0);
        }
        System.exit(-1);
    }

    if (cl.flushPreferences) {
        flushPreferences();
    }

    if (cl.exportPreferencesFile != null) {
        exportPreferences(cl.exportPreferencesFile);
    }

    if (cl.importPreferencesFile != null) {
        importPreferences(cl.importPreferencesFile);
    }

    if (cl.exportPreferencesFile != null || cl.importPreferencesFile != null) {
        System.exit(0);
    }

    if (cl.flushLicensed) {
        flushLicensed();
    }

    startLilith(appTitleString);
}

From source file:edu.brown.benchmark.seats.util.GenerateHistograms.java

public static void main(String[] vargs) throws Exception {
    ArgumentsParser args = ArgumentsParser.load(vargs);

    File csv_path = new File(args.getOptParam(0));
    File output_path = new File(args.getOptParam(1));

    GenerateHistograms gh = GenerateHistograms.generate(csv_path);

    Map<String, Object> m = new ListOrderedMap<String, Object>();
    m.put("Airport Codes", gh.flights_per_airport.size());
    m.put("Airlines", gh.flights_per_airline.getValueCount());
    m.put("Departure Times", gh.flights_per_time.getValueCount());
    LOG.info(StringUtil.formatMaps(m));//from ww  w.ja  v  a2s  . c  om

    System.err.println(StringUtil.join("\n", gh.flights_per_airport.keySet()));

    Map<String, Histogram<?>> histograms = new HashMap<String, Histogram<?>>();
    histograms.put(SEATSConstants.HISTOGRAM_FLIGHTS_PER_DEPART_TIMES, gh.flights_per_time);
    // histograms.put(SEATSConstants.HISTOGRAM_FLIGHTS_PER_AIRLINE, gh.flights_per_airline);
    histograms.put(SEATSConstants.HISTOGRAM_FLIGHTS_PER_AIRPORT,
            SEATSHistogramUtil.collapseAirportFlights(gh.flights_per_airport));

    for (Entry<String, Histogram<?>> e : histograms.entrySet()) {
        File output_file = new File(output_path.getAbsolutePath() + "/" + e.getKey() + ".histogram");
        LOG.info(String.format("Writing out %s data to '%s' [samples=%d, values=%d]", e.getKey(), output_file,
                e.getValue().getSampleCount(), e.getValue().getValueCount()));
        e.getValue().save(output_file);
    } // FOR
}

From source file:DIA_Umpire_SE.DIA_Umpire_SE.java

/**
 * @param args the command line arguments DIA_Umpire parameterfile
 *///  ww  w. ja  v a  2s  .  co m
public static void main(String[] args) throws InterruptedException, FileNotFoundException, ExecutionException,
        IOException, ParserConfigurationException, DataFormatException, SAXException, Exception {
    System.out.println(
            "=================================================================================================");
    System.out.println(
            "DIA-Umpire singal extraction analysis  (version: " + UmpireInfo.GetInstance().Version + ")");
    if (args.length < 2 || args.length > 3) {
        System.out.println(
                "command format error, the correct format is: java -jar -Xmx8G DIA_Umpire_SE.jar mzMXL_file diaumpire_se.params");
        System.out.println(
                "To fix DIA setting, use : java -jar -Xmx8G DIA_Umpire_SE.jar mzMXL_file diaumpire_se.params -f");
        return;
    }
    try {
        //Define logger level for console
        ConsoleLogger.SetConsoleLogger(Level.INFO);
        //Define logger level and file path for text log file
        ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_se.log");
    } catch (Exception e) {
    }

    boolean Fix = false;
    boolean Resume = false;

    if (args.length == 3 && args[2].equals("-f")) {
        Fix = true;
    }
    String parameterfile = args[1];
    String MSFilePath = args[0];
    Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version);
    Logger.getRootLogger().info("Parameter file:" + parameterfile);
    Logger.getRootLogger().info("Spectra file:" + MSFilePath);
    BufferedReader reader = new BufferedReader(new FileReader(parameterfile));

    String line = "";
    InstrumentParameter param = new InstrumentParameter(InstrumentParameter.InstrumentType.TOF5600);
    param.DetermineBGByID = false;
    param.EstimateBG = true;
    int NoCPUs = 2;

    SpectralDataType.DataType dataType = SpectralDataType.DataType.DIA_F_Window;
    String WindowType = "";
    int WindowSize = 25;

    ArrayList<XYData> WindowList = new ArrayList<>();

    boolean ExportPrecursorPeak = false;
    boolean ExportFragmentPeak = false;

    //<editor-fold defaultstate="collapsed" desc="Read parameter file">
    while ((line = reader.readLine()) != null) {
        Logger.getRootLogger().info(line);
        if (!"".equals(line) && !line.startsWith("#")) {
            //System.out.println(line);
            if (line.equals("==window setting begin")) {
                while (!(line = reader.readLine()).equals("==window setting end")) {
                    if (!"".equals(line)) {
                        WindowList.add(new XYData(Float.parseFloat(line.split("\t")[0]),
                                Float.parseFloat(line.split("\t")[1])));
                    }
                }
                continue;
            }
            if (line.split("=").length < 2) {
                continue;
            }
            String type = line.split("=")[0].trim();
            if (type.startsWith("para.")) {
                type = type.replace("para.", "SE.");
            }
            String value = line.split("=")[1].trim();
            switch (type) {
            case "Thread": {
                NoCPUs = Integer.parseInt(value);
                break;
            }
            case "ExportPrecursorPeak": {
                ExportPrecursorPeak = Boolean.parseBoolean(value);
                break;
            }
            case "ExportFragmentPeak": {
                ExportFragmentPeak = Boolean.parseBoolean(value);
                break;
            }

            //<editor-fold defaultstate="collapsed" desc="instrument parameters">
            case "RPmax": {
                param.PrecursorRank = Integer.parseInt(value);
                break;
            }
            case "RFmax": {
                param.FragmentRank = Integer.parseInt(value);
                break;
            }
            case "CorrThreshold": {
                param.CorrThreshold = Float.parseFloat(value);
                break;
            }
            case "DeltaApex": {
                param.ApexDelta = Float.parseFloat(value);
                break;
            }
            case "RTOverlap": {
                param.RTOverlapThreshold = Float.parseFloat(value);
                break;
            }
            case "BoostComplementaryIon": {
                param.BoostComplementaryIon = Boolean.parseBoolean(value);
                break;
            }
            case "AdjustFragIntensity": {
                param.AdjustFragIntensity = Boolean.parseBoolean(value);
                break;
            }
            case "SE.MS1PPM": {
                param.MS1PPM = Float.parseFloat(value);
                break;
            }
            case "SE.MS2PPM": {
                param.MS2PPM = Float.parseFloat(value);
                break;
            }
            case "SE.SN": {
                param.SNThreshold = Float.parseFloat(value);
                break;
            }
            case "SE.MS2SN": {
                param.MS2SNThreshold = Float.parseFloat(value);
                break;
            }
            case "SE.MinMSIntensity": {
                param.MinMSIntensity = Float.parseFloat(value);
                break;
            }
            case "SE.MinMSMSIntensity": {
                param.MinMSMSIntensity = Float.parseFloat(value);
                break;
            }
            case "SE.MinRTRange": {
                param.MinRTRange = Float.parseFloat(value);
                break;
            }
            case "SE.MaxNoPeakCluster": {
                param.MaxNoPeakCluster = Integer.parseInt(value);
                param.MaxMS2NoPeakCluster = Integer.parseInt(value);
                break;
            }
            case "SE.MinNoPeakCluster": {
                param.MinNoPeakCluster = Integer.parseInt(value);
                param.MinMS2NoPeakCluster = Integer.parseInt(value);
                break;
            }
            case "SE.MinMS2NoPeakCluster": {
                param.MinMS2NoPeakCluster = Integer.parseInt(value);
                break;
            }
            case "SE.MaxCurveRTRange": {
                param.MaxCurveRTRange = Float.parseFloat(value);
                break;
            }
            case "SE.Resolution": {
                param.Resolution = Integer.parseInt(value);
                break;
            }
            case "SE.RTtol": {
                param.RTtol = Float.parseFloat(value);
                break;
            }
            case "SE.NoPeakPerMin": {
                param.NoPeakPerMin = Integer.parseInt(value);
                break;
            }
            case "SE.StartCharge": {
                param.StartCharge = Integer.parseInt(value);
                break;
            }
            case "SE.EndCharge": {
                param.EndCharge = Integer.parseInt(value);
                break;
            }
            case "SE.MS2StartCharge": {
                param.MS2StartCharge = Integer.parseInt(value);
                break;
            }
            case "SE.MS2EndCharge": {
                param.MS2EndCharge = Integer.parseInt(value);
                break;
            }
            case "SE.NoMissedScan": {
                param.NoMissedScan = Integer.parseInt(value);
                break;
            }
            case "SE.Denoise": {
                param.Denoise = Boolean.valueOf(value);
                break;
            }
            case "SE.EstimateBG": {
                param.EstimateBG = Boolean.valueOf(value);
                break;
            }
            case "SE.RemoveGroupedPeaks": {
                param.RemoveGroupedPeaks = Boolean.valueOf(value);
                break;
            }
            case "SE.MinFrag": {
                param.MinFrag = Integer.parseInt(value);
                break;
            }
            case "SE.IsoPattern": {
                param.IsoPattern = Float.valueOf(value);
                break;
            }
            case "SE.StartRT": {
                param.startRT = Float.valueOf(value);
                break;
            }
            case "SE.EndRT": {
                param.endRT = Float.valueOf(value);
                break;
            }
            case "SE.RemoveGroupedPeaksRTOverlap": {
                param.RemoveGroupedPeaksRTOverlap = Float.valueOf(value);
                break;
            }
            case "SE.RemoveGroupedPeaksCorr": {
                param.RemoveGroupedPeaksCorr = Float.valueOf(value);
                break;
            }
            case "SE.MinMZ": {
                param.MinMZ = Float.valueOf(value);
                break;
            }
            case "SE.MinPrecursorMass": {
                param.MinPrecursorMass = Float.valueOf(value);
                break;
            }
            case "SE.MaxPrecursorMass": {
                param.MaxPrecursorMass = Float.valueOf(value);
                break;
            }
            case "SE.IsoCorrThreshold": {
                param.IsoCorrThreshold = Float.valueOf(value);
                break;
            }
            case "SE.MassDefectFilter": {
                param.MassDefectFilter = Boolean.parseBoolean(value);
                break;
            }
            case "SE.MassDefectOffset": {
                param.MassDefectOffset = Float.valueOf(value);
                break;
            }

            //</editor-fold>//</editor-fold>

            case "WindowType": {
                WindowType = value;
                switch (WindowType) {
                case "SWATH": {
                    dataType = SpectralDataType.DataType.DIA_F_Window;
                    break;
                }
                case "V_SWATH": {
                    dataType = SpectralDataType.DataType.DIA_V_Window;
                    break;
                }
                case "MSX": {
                    dataType = SpectralDataType.DataType.MSX;
                    break;
                }
                case "MSE": {
                    dataType = SpectralDataType.DataType.MSe;
                    break;
                }
                }
                break;
            }
            case "WindowSize": {
                WindowSize = Integer.parseInt(value);
                break;
            }
            }
        }
    }
    //</editor-fold>

    try {
        File MSFile = new File(MSFilePath);
        if (MSFile.exists()) {
            long time = System.currentTimeMillis();
            Logger.getRootLogger().info(
                    "=================================================================================================");
            Logger.getRootLogger().info("Processing " + MSFilePath + "....");

            //Initialize a DIA file data structure                
            DIAPack DiaFile = new DIAPack(MSFile.getAbsolutePath(), NoCPUs);
            DiaFile.Resume = Resume;
            DiaFile.SetDataType(dataType);
            DiaFile.SetParameter(param);

            //Set DIA isolation window setting
            if (dataType == SpectralDataType.DataType.DIA_F_Window) {
                DiaFile.SetWindowSize(WindowSize);
            } else if (dataType == SpectralDataType.DataType.DIA_V_Window) {
                for (XYData window : WindowList) {
                    DiaFile.AddVariableWindow(window);
                }
            }
            DiaFile.SaveDIASetting();
            DiaFile.SaveParams();

            if (Fix) {
                DiaFile.FixScanidx();
                return;
            }
            DiaFile.ExportPrecursorPeak = ExportPrecursorPeak;
            DiaFile.ExportFragmentPeak = ExportFragmentPeak;
            Logger.getRootLogger().info("Module A: Signal extraction");
            //Start DIA signal extraction process to generate pseudo MS/MS files
            DiaFile.process();
            time = System.currentTimeMillis() - time;
            Logger.getRootLogger().info(MSFilePath + " processed time:"
                    + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time),
                            TimeUnit.MILLISECONDS.toMinutes(time)
                                    - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)),
                            TimeUnit.MILLISECONDS.toSeconds(time)
                                    - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time))));
        } else {
            throw new RuntimeException("file: " + MSFile + "? does not exist!");
        }
        Logger.getRootLogger().info("Job complete");
        Logger.getRootLogger().info(
                "=================================================================================================");

    } catch (Exception e) {
        Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e));
        throw e;
    }
}

From source file:de.prozesskraft.pkraft.Merge.java

public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException {

    /*----------------------------
      get options from ini-file/*from  w ww.  j av a 2  s.  c om*/
    ----------------------------*/
    java.io.File inifile = new java.io.File(
            WhereAmI.getInstallDirectoryAbsolutePath(Merge.class) + "/" + "../etc/pkraft-merge.ini");

    if (inifile.exists()) {
        try {
            ini = new Ini(inifile);
        } catch (InvalidFileFormatException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    } else {
        System.err.println("ini file does not exist: " + inifile.getAbsolutePath());
        System.exit(1);
    }

    /*----------------------------
      create boolean options
    ----------------------------*/
    Option ohelp = new Option("help", "print this message");
    Option ov = new Option("v", "prints version and build-date");

    /*----------------------------
      create argument options
    ----------------------------*/
    Option oinstance = OptionBuilder.withArgName("FILE").hasArg()
            .withDescription("[mandatory] instance you want to merge another instance into.")
            //            .isRequired()
            .create("instance");

    Option oguest = OptionBuilder.withArgName("FILE").hasArg()
            .withDescription("[mandatory] this instance will be merged into -instance.")
            //            .isRequired()
            .create("guest");

    Option obasedir = OptionBuilder.withArgName("DIR").hasArg().withDescription(
            "[optional] in this base-directory the result instance (merge of -instance and -guest) will be placed. this directory has to exist. omit to use the base-directory of -instance.")
            //            .isRequired()
            .create("basedir");

    /*----------------------------
      create options object
    ----------------------------*/
    Options options = new Options();

    options.addOption(ohelp);
    options.addOption(ov);
    options.addOption(oinstance);
    options.addOption(oguest);
    options.addOption(obasedir);

    /*----------------------------
      create the parser
    ----------------------------*/
    CommandLineParser parser = new GnuParser();
    // parse the command line arguments
    commandline = parser.parse(options, args);

    /*----------------------------
      usage/help
    ----------------------------*/
    if (commandline.hasOption("help")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("merge", options);
        System.exit(0);
    }

    if (commandline.hasOption("v")) {
        System.out.println("author:  alexander.vogel@caegroup.de");
        System.out.println("version: [% version %]");
        System.out.println("date:    [% date %]");
        System.exit(0);
    }
    /*----------------------------
      ueberpruefen ob eine schlechte kombination von parametern angegeben wurde
    ----------------------------*/
    if (!(commandline.hasOption("instance"))) {
        System.err.println("option -instance is mandatory");
        exiter();
    }
    if (!(commandline.hasOption("guest"))) {
        System.err.println("at least one option -guest is mandatory");
        exiter();
    }

    /*----------------------------
      die lizenz ueberpruefen und ggf abbrechen
    ----------------------------*/

    // check for valid license
    ArrayList<String> allPortAtHost = new ArrayList<String>();
    allPortAtHost.add(ini.get("license-server", "license-server-1"));
    allPortAtHost.add(ini.get("license-server", "license-server-2"));
    allPortAtHost.add(ini.get("license-server", "license-server-3"));

    MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1");

    // lizenz-logging ausgeben
    for (String actLine : (ArrayList<String>) lic.getLog()) {
        System.err.println(actLine);
    }

    // abbruch, wenn lizenz nicht valide
    if (!lic.isValid()) {
        System.exit(1);
    }

    /*----------------------------
      die eigentliche business logic
    ----------------------------*/
    String pathToInstance = commandline.getOptionValue("instance");
    java.io.File fileInstance = new java.io.File(pathToInstance);

    String[] pathToGuest = commandline.getOptionValues("guest");

    String baseDir = null;
    if (commandline.hasOption("basedir")) {
        java.io.File fileBaseDir = new java.io.File(commandline.getOptionValue("basedir"));
        if (!fileBaseDir.exists()) {
            System.err.println("basedir does not exist: " + fileBaseDir.getAbsolutePath());
            exiter();
        } else if (!fileBaseDir.isDirectory()) {
            System.err.println("basedir is not a directory: " + fileBaseDir.getAbsolutePath());
            exiter();
        }
        baseDir = commandline.getOptionValue("basedir");
    }

    // ueberpruefen ob die process.pmb files vorhanden sind
    // wenn es nicht vorhanden ist, dann mit fehlermeldung abbrechen
    if (!fileInstance.exists()) {
        System.err.println("instance file does not exist: " + fileInstance.getAbsolutePath());
        exiter();
    }
    for (String pathGuest : pathToGuest) {
        java.io.File fileGuest = new java.io.File(pathGuest);

        // wenn es nicht vorhanden ist, dann mit fehlermeldung abbrechen
        if (!fileGuest.exists()) {
            System.err.println("guest file does not exist: " + fileGuest.getAbsolutePath());
            exiter();
        }
    }

    // base - instance einlesen
    Process p1 = new Process();
    p1.setInfilebinary(pathToInstance);
    p1.setOutfilebinary(pathToInstance);
    Process p2 = p1.readBinary();

    // alle guests einlesen
    ArrayList<Process> alleGuests = new ArrayList<Process>();
    for (String actPathGuest : pathToGuest) {
        Process p30 = new Process();
        p30.setInfilebinary(actPathGuest);
        Process pGuest = p30.readBinary();

        // testen ob base-instanz und aktuelle guestinstanz vom gleichen typ sind
        if (!p2.getName().equals(pGuest.getName())) {
            System.err.println("error: instances are not from the same type (-instance=" + p2.getName()
                    + " != -guest=" + pGuest.getName());
            exiter();
        }

        // testen ob base-instanz und aktuelle guestinstanz von gleicher version sind
        if (!p2.getVersion().equals(pGuest.getVersion())) {
            System.err.println("error: instances are not from the same version (" + p2.getVersion() + "!="
                    + pGuest.getVersion());
            exiter();
        }

        alleGuests.add(pGuest);
    }

    // den main-prozess trotzdem nochmal einlesen um subprozesse extrahieren zu koennen
    Process p3 = new Process();
    p3.setInfilebinary(pathToInstance);
    Process process = p3.readBinary();

    // den main-prozess ueber die static function klonen
    // das anmelden bei pradar erfolgt erst ganz zum schluss, denn beim clonen werden nachfolgende steps resettet, die zu diesem zeitpunkt noch intakt sind
    Process clonedProcess = cloneProcess(process, null);

    // alle steps durchgehen und falls subprocesses existieren auch fuer diese ein cloning durchfuehren
    for (Step actStep : process.getStep()) {
        if (actStep.getSubprocess() != null) {
            Process pDummy = new Process();
            pDummy.setInfilebinary(actStep.getAbsdir() + "/process.pmb");
            Process processInSubprocess = pDummy.readBinary();
            //            System.err.println("info: reading process freshly from file: " + actStep.getAbsdir() + "/process.pmb");
            if (processInSubprocess != null) {
                Process clonedSubprocess = cloneProcess(processInSubprocess, clonedProcess);
                // den prozess in pradar anmelden durch aufruf des tools: pradar-attend
                String call2 = ini.get("apps", "pradar-attend") + " -instance " + clonedSubprocess.getRootdir()
                        + "/process.pmb";
                System.err.println("info: calling: " + call2);

                try {
                    java.lang.Process sysproc = Runtime.getRuntime().exec(call2);
                } catch (IOException e) {
                    System.err.println("error: " + e.getMessage());
                }
            }
        }
    }

    // alle dependent steps der zielinstanz einsammeln
    // dies wird zum resetten benoetigt, damit steps nicht doppelt resettet werden
    Map<Step, String> dependentSteps = new HashMap<Step, String>();

    // alle guest prozesse merge durchfuehren
    for (Process actGuestProcess : alleGuests) {
        System.err.println("info: merging guest process " + actGuestProcess.getInfilebinary());

        // alle fanned steps (ehemalige multisteps) des zu mergenden prozesses in die fanned multisteps des bestehenden prozesses integrieren
        for (Step actStep : actGuestProcess.getStep()) {
            if (actStep.isAFannedMultistep()) {
                System.err.println("info: merging from guest instance step " + actStep.getName());
                Step clonedStepForIntegrationInClonedProcess = actStep.clone();
                if (clonedProcess.integrateStep(clonedStepForIntegrationInClonedProcess)) {
                    System.err.println("info: merging step successfully.");
                    // die downstream steps vom merge-punkt merken
                    for (Step actStepToResetBecauseOfDependency : clonedProcess
                            .getStepDependent(actStep.getName())) {
                        dependentSteps.put(actStepToResetBecauseOfDependency, "dummy");
                    }

                    // der step einen subprocess enthaelt muss der subprocess nach der integration bei pradar gemeldet werden
                    // den prozess in pradar anmelden durch aufruf des tools: pradar-attend
                    if (clonedStepForIntegrationInClonedProcess.getSubprocess() != null
                            && clonedStepForIntegrationInClonedProcess.getSubprocess().getProcess() != null) {
                        String call5 = ini.get("apps", "pradar-attend") + " -instance "
                                + clonedStepForIntegrationInClonedProcess.getAbsdir() + "/process.pmb";
                        System.err.println("info: calling: " + call5);
                        try {
                            java.lang.Process sysproc = Runtime.getRuntime().exec(call5);
                        } catch (IOException e) {
                            System.err.println("error: " + e.getMessage());
                        }
                    }
                } else {
                    System.err.println("error: merging step failed.");
                }
            } else {
                System.err.println("debug: because it's not a multistep, ignoring from guest instance step "
                        + actStep.getName());
            }
        }
    }

    // alle steps downstream der merge-positionen resetten
    for (Step actStep : dependentSteps.keySet()) {
        actStep.resetBecauseOfDependency();
    }

    // speichern der ergebnis instanz
    clonedProcess.writeBinary();

    // den prozess in pradar anmelden durch aufruf des tools: pradar-attend
    String call2 = ini.get("apps", "pradar-attend") + " -instance " + clonedProcess.getRootdir()
            + "/process.pmb";
    System.err.println("info: calling: " + call2);

    try {
        java.lang.Process sysproc = Runtime.getRuntime().exec(call2);
    } catch (IOException e) {
        System.err.println("error: " + e.getMessage());
    }

}

From source file:malware_classification.Malware_Classification.java

/**
 * @param args the command line arguments. Order is malicious_filename,
 * benign filename, (optional) bin_size/*from   ww w. j a v a 2s  . c o m*/
 */
public static void main(String[] args) {
    String malicious_file_path = args[0];
    String benign_file_path = args[1];
    int curr_bin_size;
    if (args.length > 2) {
        curr_bin_size = Integer.parseInt(args[2]);
    } else {
        curr_bin_size = -1;
    }
    String pid_str = ManagementFactory.getRuntimeMXBean().getName();
    logger.setLevel(Level.CONFIG);
    logger.log(Level.INFO, pid_str);
    boolean found_file = false;
    String output_base = "std_output";
    File output_file = null;
    for (int i = 0; !found_file; i++) {
        output_file = new File(output_base + i + ".txt");
        found_file = !output_file.exists();
    }

    FileHandler fh = null;
    try {
        fh = new FileHandler(output_file.getAbsolutePath());
    } catch (IOException ex) {
        Logger.getLogger(Malware_Classification.class.getName()).log(Level.SEVERE, null, ex);
    } catch (SecurityException ex) {
        Logger.getLogger(Malware_Classification.class.getName()).log(Level.SEVERE, null, ex);
    }
    logger.addHandler(fh);
    logger.info("Writing output in " + output_file.getAbsolutePath());

    Malware_Classification classifier = new Malware_Classification(malicious_file_path, benign_file_path,
            curr_bin_size);
    //        classifier.run_tests();
}

From source file:com.chaordicsystems.sstableconverter.SSTableConverter.java

public static void main(String[] args) throws Exception {
    LoaderOptions options = LoaderOptions.parseArgs(args);
    OutputHandler handler = new OutputHandler.SystemOutput(options.verbose, options.debug);

    File srcDir = options.sourceDir;
    File dstDir = options.destDir;
    IPartitioner srcPart = options.srcPartitioner;
    IPartitioner dstPart = options.dstPartitioner;
    String keyspace = options.ks;
    String cf = options.cf;/*from   w w w. j  a v  a 2  s  . c o  m*/

    if (keyspace == null) {
        keyspace = srcDir.getParentFile().getName();
    }
    if (cf == null) {
        cf = srcDir.getName();
    }

    CFMetaData metadata = new CFMetaData(keyspace, cf, ColumnFamilyType.Standard, BytesType.instance, null);
    Collection<SSTableReader> originalSstables = readSSTables(srcPart, srcDir, metadata, handler);

    handler.output(
            String.format("Converting sstables of ks[%s], cf[%s], from %s to %s. Src dir: %s. Dest dir: %s.",
                    keyspace, cf, srcPart.getClass().getName(), dstPart.getClass().getName(),
                    srcDir.getAbsolutePath(), dstDir.getAbsolutePath()));

    SSTableSimpleUnsortedWriter destWriter = new SSTableSimpleUnsortedWriter(dstDir, dstPart, keyspace, cf,
            AsciiType.instance, null, 64);

    for (SSTableReader reader : originalSstables) {
        handler.output("Reading: " + reader.getFilename());
        SSTableIdentityIterator row;
        SSTableScanner scanner = reader.getDirectScanner(null);

        // collecting keys to export
        while (scanner.hasNext()) {
            row = (SSTableIdentityIterator) scanner.next();

            destWriter.newRow(row.getKey().key);
            while (row.hasNext()) {
                IColumn col = (IColumn) row.next();
                destWriter.addColumn(col.name(), col.value(), col.timestamp());
            }
        }
        scanner.close();
    }

    // Don't forget to close!
    destWriter.close();

    System.exit(0);
}