Example usage for org.apache.commons.lang.time StopWatch getTime

List of usage examples for org.apache.commons.lang.time StopWatch getTime

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch getTime.

Prototype

public long getTime() 

Source Link

Document

Get the time on the stopwatch.

This is either the time between the start and the moment this method is called, or the amount of time between start and stop.

Usage

From source file:fr.inria.edelweiss.kgdqp.core.CentralizedInferrencing.java

public static void main(String args[])
        throws ParseException, EngineException, InterruptedException, IOException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;/*from   w ww . j  a  v  a 2  s .com*/
    boolean rulesSelection = false;
    File rulesDir = null;
    File ontDir = null;

    /////////////////
    Graph graph = Graph.create();
    QueryProcess exec = QueryProcess.create(graph);

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    //        Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    //        Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules");
    Option ontOpt = new Option("o", "ontologiesDir", true,
            "directory containing the ontologies for rules selection");
    //        Option locOpt = new Option("c", "centralized", false, "performs centralized inferences");
    Option dataOpt = new Option("l", "load", true, "data file or directory to be loaded");
    //        Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run");
    //        options.addOption(queryOpt);
    //        options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(rulesOpt);
    options.addOption(ontOpt);
    //        options.addOption(selOpt);
    //        options.addOption(locOpt);
    options.addOption(dataOpt);

    String header = "Corese/KGRAM rule engine experiment command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (cmd.hasOption("o")) {
        rulesSelection = true;
        String ontDirPath = cmd.getOptionValue("o");
        ontDir = new File(ontDirPath);
        if (!ontDir.isDirectory()) {
            logger.warn(ontDirPath + " is not a valid directory path.");
            System.exit(0);
        }
    }
    if (!cmd.hasOption("r")) {
        logger.info("You must specify a path for inference rules directory !");
        System.exit(0);
    }

    if (cmd.hasOption("l")) {
        String[] dataPaths = cmd.getOptionValues("l");
        for (String path : dataPaths) {
            Load ld = Load.create(graph);
            ld.load(path);
            logger.info("Loaded " + path);
        }
    }

    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    String rulesDirPath = cmd.getOptionValue("r");
    rulesDir = new File(rulesDirPath);
    if (!rulesDir.isDirectory()) {
        logger.warn(rulesDirPath + " is not a valid directory path.");
        System.exit(0);
    }

    // Local rules graph initialization
    Graph rulesG = Graph.create();
    Load ld = Load.create(rulesG);

    if (rulesSelection) {
        // Ontology loading
        if (ontDir.isDirectory()) {
            for (File o : ontDir.listFiles()) {
                logger.info("Loading " + o.getAbsolutePath());
                ld.load(o.getAbsolutePath());
            }
        }
    }

    // Rules loading
    if (rulesDir.isDirectory()) {
        for (File r : rulesDir.listFiles()) {
            logger.info("Loading " + r.getAbsolutePath());
            ld.load(r.getAbsolutePath());
        }
    }

    // Rule engine initialization
    RuleEngine ruleEngine = RuleEngine.create(graph);
    ruleEngine.set(exec);
    ruleEngine.setOptimize(true);
    ruleEngine.setConstructResult(true);
    ruleEngine.setTrace(true);

    StopWatch sw = new StopWatch();
    logger.info("Federated graph size : " + graph.size());
    logger.info("Rules graph size : " + rulesG.size());

    // Rule selection
    logger.info("Rules selection");
    QueryProcess localKgram = QueryProcess.create(rulesG);
    ArrayList<String> applicableRules = new ArrayList<String>();
    sw.start();
    String rulesSelQuery = "";
    if (rulesSelection) {
        rulesSelQuery = pertinentRulesQuery;
    } else {
        rulesSelQuery = allRulesQuery;
    }
    Mappings maps = localKgram.query(rulesSelQuery);
    logger.info("Rules selected in " + sw.getTime() + " ms");
    logger.info("Applicable rules : " + maps.size());

    // Selected rule loading
    for (Mapping map : maps) {
        IDatatype dt = (IDatatype) map.getValue("?res");
        String rule = dt.getLabel();
        //loading rule in the rule engine
        //            logger.info("Adding rule : ");
        //            System.out.println("-------");
        //            System.out.println(rule);
        //            System.out.println("");
        //            if (! rule.toLowerCase().contains("sameas")) {
        applicableRules.add(rule);
        ruleEngine.addRule(rule);
        //            }
    }

    // Rules application on distributed sparql endpoints
    logger.info("Rules application (" + applicableRules.size() + " rules)");
    ExecutorService threadPool = Executors.newCachedThreadPool();
    RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine);
    sw.reset();
    sw.start();

    //        ruleEngine.process();
    threadPool.execute(ruleThread);
    threadPool.shutdown();

    //monitoring loop
    while (!threadPool.isTerminated()) {
        //            System.out.println("******************************");
        //            System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));
        //            System.out.println("Rule engine running for " + sw.getTime() + " ms");
        //            System.out.println("Federated graph size : " + graph.size());
        System.out.println(sw.getTime() + " , " + graph.size());
        Thread.sleep(5000);
    }

    logger.info("Federated graph size : " + graph.size());
    //        logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));

    //        TripleFormat f = TripleFormat.create(graph, true);
    //        f.write("/tmp/gAll.ttl");

}

From source file:fr.inria.edelweiss.kgdqp.core.CentralizedInferrencingNoSpin.java

public static void main(String args[])
        throws ParseException, EngineException, InterruptedException, IOException, LoadException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;/*from   www. ja va2 s . c o m*/
    boolean rulesSelection = false;
    File rulesDir = null;
    File ontDir = null;

    /////////////////
    Graph graph = Graph.create();
    QueryProcess exec = QueryProcess.create(graph);

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    //        Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    //        Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules");
    Option ontOpt = new Option("o", "ontologiesDir", true,
            "directory containing the ontologies for rules selection");
    //        Option locOpt = new Option("c", "centralized", false, "performs centralized inferences");
    Option dataOpt = new Option("l", "load", true, "data file or directory to be loaded");
    //        Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run");
    //        options.addOption(queryOpt);
    //        options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(rulesOpt);
    options.addOption(ontOpt);
    //        options.addOption(selOpt);
    //        options.addOption(locOpt);
    options.addOption(dataOpt);

    String header = "Corese/KGRAM rule engine experiment command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (cmd.hasOption("o")) {
        rulesSelection = true;
        String ontDirPath = cmd.getOptionValue("o");
        ontDir = new File(ontDirPath);
        if (!ontDir.isDirectory()) {
            logger.warn(ontDirPath + " is not a valid directory path.");
            System.exit(0);
        }
    }
    if (!cmd.hasOption("r")) {
        logger.info("You must specify a path for inference rules directory !");
        System.exit(0);
    }

    if (cmd.hasOption("l")) {
        String[] dataPaths = cmd.getOptionValues("l");
        for (String path : dataPaths) {
            Load ld = Load.create(graph);
            ld.load(path);
            logger.info("Loaded " + path);
        }
    }

    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    String rulesDirPath = cmd.getOptionValue("r");
    rulesDir = new File(rulesDirPath);
    if (!rulesDir.isDirectory()) {
        logger.warn(rulesDirPath + " is not a valid directory path.");
        System.exit(0);
    }

    // Local rules graph initialization
    Graph rulesG = Graph.create();
    Load ld = Load.create(rulesG);

    if (rulesSelection) {
        // Ontology loading
        if (ontDir.isDirectory()) {
            for (File o : ontDir.listFiles()) {
                logger.info("Loading " + o.getAbsolutePath());
                ld.load(o.getAbsolutePath());
            }
        }
    }

    // Rules loading
    if (rulesDir.isDirectory()) {
        for (File r : rulesDir.listFiles()) {
            if (r.getAbsolutePath().endsWith(".rq")) {
                logger.info("Loading " + r.getAbsolutePath());
                //                ld.load(r.getAbsolutePath());

                //                    byte[] encoded = Files.readAllBytes(Paths.get(r.getAbsolutePath()));
                //                    String construct = new String(encoded, "UTF-8"); //StandardCharsets.UTF_8);

                FileInputStream f = new FileInputStream(r);
                QueryLoad ql = QueryLoad.create();
                String construct = ql.read(f);
                f.close();

                SPINProcess sp = SPINProcess.create();
                String spinConstruct = sp.toSpin(construct);

                ld.load(new ByteArrayInputStream(spinConstruct.getBytes()), Load.TURTLE_FORMAT);
                logger.info("Rules graph size : " + rulesG.size());

            }
        }
    }

    // Rule engine initialization
    RuleEngine ruleEngine = RuleEngine.create(graph);
    ruleEngine.set(exec);
    ruleEngine.setOptimize(true);
    ruleEngine.setConstructResult(true);
    ruleEngine.setTrace(true);

    StopWatch sw = new StopWatch();
    logger.info("Federated graph size : " + graph.size());
    logger.info("Rules graph size : " + rulesG.size());

    // Rule selection
    logger.info("Rules selection");
    QueryProcess localKgram = QueryProcess.create(rulesG);
    ArrayList<String> applicableRules = new ArrayList<String>();
    sw.start();
    String rulesSelQuery = "";
    if (rulesSelection) {
        rulesSelQuery = pertinentRulesQuery;
    } else {
        rulesSelQuery = allRulesQuery;
    }
    Mappings maps = localKgram.query(rulesSelQuery);
    logger.info("Rules selected in " + sw.getTime() + " ms");
    logger.info("Applicable rules : " + maps.size());

    // Selected rule loading
    for (Mapping map : maps) {
        IDatatype dt = (IDatatype) map.getValue("?res");
        String rule = dt.getLabel();
        //loading rule in the rule engine
        //            logger.info("Adding rule : ");
        //            System.out.println("-------");
        //            System.out.println(rule);
        //            System.out.println("");
        //            if (! rule.toLowerCase().contains("sameas")) {
        applicableRules.add(rule);
        ruleEngine.addRule(rule);
        //            }
    }

    // Rules application on distributed sparql endpoints
    logger.info("Rules application (" + applicableRules.size() + " rules)");
    ExecutorService threadPool = Executors.newCachedThreadPool();
    RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine);
    sw.reset();
    sw.start();

    //        ruleEngine.process();
    threadPool.execute(ruleThread);
    threadPool.shutdown();

    //monitoring loop
    while (!threadPool.isTerminated()) {
        //            System.out.println("******************************");
        //            System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));
        //            System.out.println("Rule engine running for " + sw.getTime() + " ms");
        //            System.out.println("Federated graph size : " + graph.size());
        System.out.println(sw.getTime() + " , " + graph.size());
        Thread.sleep(5000);
    }

    logger.info("Federated graph size : " + graph.size());
    //        logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));

    //        TripleFormat f = TripleFormat.create(graph, true);
    //        f.write("/tmp/gAll.ttl");
}

From source file:fr.inria.edelweiss.kgdqp.core.FedInferrencingCLI.java

public static void main(String args[]) throws ParseException, EngineException, InterruptedException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;//from  w  ww . j  a  v a  2s. c  o m
    boolean rulesSelection = false;
    File rulesDir = null;
    File ontDir = null;

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules");
    Option ontOpt = new Option("o", "ontologiesDir", true,
            "directory containing the ontologies for rules selection");
    //        Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run");
    options.addOption(queryOpt);
    options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(rulesOpt);
    options.addOption(ontOpt);
    //        options.addOption(selOpt);

    String header = "Corese/KGRAM distributed rule engine command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (!cmd.hasOption("e")) {
        logger.info("You must specify at least the URL of one sparql endpoint !");
        System.exit(0);
    } else {
        endpoints = new ArrayList<String>(Arrays.asList(cmd.getOptionValues("e")));
    }
    if (cmd.hasOption("o")) {
        rulesSelection = true;
        String ontDirPath = cmd.getOptionValue("o");
        ontDir = new File(ontDirPath);
        if (!ontDir.isDirectory()) {
            logger.warn(ontDirPath + " is not a valid directory path.");
            System.exit(0);
        }
    }
    if (!cmd.hasOption("r")) {
        logger.info("You must specify a path for inference rules directory !");
        System.exit(0);
    } else if (rulesSelection) {

    }

    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    String rulesDirPath = cmd.getOptionValue("r");
    rulesDir = new File(rulesDirPath);
    if (!rulesDir.isDirectory()) {
        logger.warn(rulesDirPath + " is not a valid directory path.");
        System.exit(0);
    }

    /////////////////
    Graph graph = Graph.create();
    QueryProcessDQP execDQP = QueryProcessDQP.create(graph);
    for (String url : endpoints) {
        try {
            execDQP.addRemote(new URL(url), WSImplem.REST);
        } catch (MalformedURLException ex) {
            logger.error(url + " is not a well-formed URL");
            System.exit(1);
        }
    }

    // Local rules graph initialization
    Graph rulesG = Graph.create();
    Load ld = Load.create(rulesG);

    if (rulesSelection) {
        // Ontology loading
        if (ontDir.isDirectory()) {
            for (File o : ontDir.listFiles()) {
                logger.info("Loading " + o.getAbsolutePath());
                ld.load(o.getAbsolutePath());
            }
        }
    }

    // Rules loading
    if (rulesDir.isDirectory()) {
        for (File r : rulesDir.listFiles()) {
            logger.info("Loading " + r.getAbsolutePath());
            ld.load(r.getAbsolutePath());
        }
    }

    // Rule engine initialization
    RuleEngine ruleEngine = RuleEngine.create(graph);
    ruleEngine.set(execDQP);

    StopWatch sw = new StopWatch();
    logger.info("Federated graph size : " + graph.size());
    logger.info("Rules graph size : " + rulesG.size());

    // Rule selection
    logger.info("Rules selection");
    QueryProcess localKgram = QueryProcess.create(rulesG);
    ArrayList<String> applicableRules = new ArrayList<String>();
    sw.start();
    String rulesSelQuery = "";
    if (rulesSelection) {
        rulesSelQuery = pertinentRulesQuery;
    } else {
        rulesSelQuery = allRulesQuery;
    }
    Mappings maps = localKgram.query(rulesSelQuery);
    logger.info("Rules selected in " + sw.getTime() + " ms");
    logger.info("Applicable rules : " + maps.size());

    // Selected rule loading
    for (Mapping map : maps) {
        IDatatype dt = (IDatatype) map.getValue("?res");
        String rule = dt.getLabel();
        //loading rule in the rule engine
        //            logger.info("Adding rule : " + rule);
        applicableRules.add(rule);
        ruleEngine.addRule(rule);
    }

    // Rules application on distributed sparql endpoints
    logger.info("Rules application (" + applicableRules.size() + " rules)");
    ExecutorService threadPool = Executors.newCachedThreadPool();
    RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine);
    sw.reset();
    sw.start();

    //        ruleEngine.process();
    threadPool.execute(ruleThread);
    threadPool.shutdown();

    //monitoring loop
    while (!threadPool.isTerminated()) {
        System.out.println("******************************");
        System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter,
                QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));
        System.out.println("Rule engine running for " + sw.getTime() + " ms");
        System.out.println("Federated graph size : " + graph.size());
        Thread.sleep(10000);
    }

    logger.info("Federated graph size : " + graph.size());
    logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter,
            QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));

    ///////////// Query file processing
    //        StringBuffer fileData = new StringBuffer(1000);
    //        BufferedReader reader = null;
    //        try {
    //            reader = new BufferedReader(new FileReader(queryPath));
    //        } catch (FileNotFoundException ex) {
    //             logger.error("Query file "+queryPath+" not found !");
    //             System.exit(1);
    //        }
    //        char[] buf = new char[1024];
    //        int numRead = 0;
    //        try {
    //            while ((numRead = reader.read(buf)) != -1) {
    //                String readData = String.valueOf(buf, 0, numRead);
    //                fileData.append(readData);
    //                buf = new char[1024];
    //            }
    //            reader.close();
    //        } catch (IOException ex) {
    //           logger.error("Error while reading query file "+queryPath);
    //           System.exit(1);
    //        }
    //
    //        String sparqlQuery = fileData.toString();
    //
    //        Query q = exec.compile(sparqlQuery,null);
    //        System.out.println(q);
    //        
    //        StopWatch sw = new StopWatch();
    //        sw.start();
    //        Mappings map = exec.query(sparqlQuery);
    //        int dqpSize = map.size();
    //        System.out.println("--------");
    //        long time = sw.getTime();
    //        System.out.println(time + " " + dqpSize);
}

From source file:elaborate.editor.backend.Indexer.java

@SuppressWarnings("boxing")
public static void main(String[] args) {
    boolean wipeIndexFirst = args.length == 0 ? false : "-w".equals(args[0]);
    StopWatch sw = new StopWatch();
    sw.start();//w w w  .ja v a  2 s . c o m
    ElaborateSolrIndexer solr = new ElaborateSolrIndexer();
    if (wipeIndexFirst) {
        Log.info("clearing index");
        solr.clear();
    }
    EntityManager entityManager = HibernateUtil.getEntityManager();
    try {
        ProjectEntryService projectEntryService = ProjectEntryService.instance();
        projectEntryService.setEntityManager(entityManager);
        List<ProjectEntry> projectentries = projectEntryService.getAll();
        int size = projectentries.size();
        Log.info("indexing {} projectEntries", size);
        int n = 1;
        for (ProjectEntry projectEntry : projectentries) {
            Log.info("indexing projectEntry {} ({}/{} = {}%) (est. time remaining: {})", //
                    new Object[] { //
                            projectEntry.getId(), n, size, //
                            percentage(n, size), //
                            time_remaining(n, size, sw.getTime()) //
                    } //
            );
            solr.index(projectEntry, autoCommit(n));
            n++;
        }
    } finally {
        entityManager.close();
    }
    solr.commit();
    sw.stop();
    Log.info("done in {}", convert(sw.getTime()));
}

From source file:fr.inria.edelweiss.kgimport.RdfSplitter.java

/**
 * The application entrypoint, configured through the command line input
 * arguments.//  www. j ava  2s.c  o  m
 *
 * @param args the input command line arguments.
 */
public static void main(String args[]) {

    RdfSplitter rdfSplitter = new RdfSplitter();

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "Print usage information.");
    Option inDirOpt = new Option("i", "input-dir", true, "The directory containing RDF files to be loaded.");
    Option outDirOpt = new Option("o", "output-dir", true,
            "The directory containing the generated RDF fragments");
    Option predFiltOpt = new Option("p", "predicate-filter", true,
            "Predicate filter used to segment the dataset. "
                    + "You can use multiple filters, typically one per fragment.");
    Option fragNbOpt = new Option("n", "number-of-fragments", true,
            "Number of fragments generated for the whole input dataset.");
    Option fragRepOpt = new Option("f", "fractionning-percentage", true,
            "Percentage of the whole input dataset for this fragment.");
    Option tdbOpt = new Option("tdb", "tdb-storage", false,
            "RDF fragments are persisted into a Jena TDB backend.");
    Option versionOpt = new Option("v", "version", false, "Print the version information and exit.");
    options.addOption(inDirOpt);
    options.addOption(outDirOpt);
    options.addOption(predFiltOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(fragNbOpt);
    options.addOption(fragRepOpt);
    options.addOption(tdbOpt);

    String header = "RDF data fragmentation tool command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);

        if (cmd.hasOption("h")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("java -jar [].jar", header, options, footer, true);
            System.exit(0);
        }

        if (!cmd.hasOption("i")) {
            logger.warn("You must specify a valid input directory !");
            System.exit(-1);
        } else {
            rdfSplitter.setInputDirPath(cmd.getOptionValue("i"));
        }
        if (!cmd.hasOption("o")) {
            logger.warn("You must specify a valid output directory !");
            System.exit(-1);
        } else {
            rdfSplitter.setOutputDirPath(cmd.getOptionValue("o"));
        }
        if (cmd.hasOption("p")) {
            rdfSplitter.setInputPredicates(new ArrayList<String>(Arrays.asList(cmd.getOptionValues("p"))));
        }
        if (cmd.hasOption("f")) {
            ArrayList<String> opts = new ArrayList<String>(Arrays.asList(cmd.getOptionValues("f")));
            for (String opt : opts) {
                try {
                    rdfSplitter.getFragList().add(Integer.parseInt(opt));
                } catch (NumberFormatException e) {
                    logger.error(opt + " cannot be pased as an percentage value.");
                    System.exit(-1);
                }
            }
        }
        if (cmd.hasOption("n")) {
            try {
                rdfSplitter.setFragNb(Integer.parseInt(cmd.getOptionValue("n")));
            } catch (NumberFormatException e) {
                logger.error(cmd.getOptionValue("n") + " cannot be pased as an integer value.");
                System.exit(-1);
            }
        }

        File oDir = new File(rdfSplitter.getOutputDirPath());
        if (oDir.exists()) {
            logger.warn(rdfSplitter.getOutputDirPath() + " already exists !");
            oDir = Files.createTempDir();
            logger.warn(oDir.getAbsolutePath() + " created.");
            rdfSplitter.setOutputDirPath(oDir.getAbsolutePath());
        } else {
            if (oDir.mkdir()) {
                logger.info(rdfSplitter.getOutputDirPath() + " created.");
            }
        }

        if (!cmd.hasOption("n") && !cmd.hasOption("f") && !cmd.hasOption("p")) {
            logger.error("You must specify just one fragmentation type through '-n', '-f', or 'p' options");
            for (String arg : args) {
                logger.trace(arg);
            }
            System.exit(-1);
        }

        String fragName = rdfSplitter.getInputDirPath()
                .substring(rdfSplitter.getInputDirPath().lastIndexOf("/") + 1);

        //Input data loading
        Model model = ModelFactory.createDefaultModel();
        File inputDir = new File(rdfSplitter.getInputDirPath());
        if (inputDir.isDirectory()) {
            for (File f : inputDir.listFiles()) {
                logger.info("Loading " + f.getAbsolutePath());
                if (f.isDirectory()) {
                    String directory = f.getAbsolutePath();
                    Dataset dataset = TDBFactory.createDataset(directory);
                    dataset.begin(ReadWrite.READ);
                    // Get model inside the transaction
                    model.add(dataset.getDefaultModel());
                    dataset.end();
                } else {
                    InputStream iS;
                    try {
                        iS = new FileInputStream(f);
                        if (f.getAbsolutePath().endsWith(".n3")) {
                            model.read(iS, null, "N3");
                        } else if (f.getAbsolutePath().endsWith(".nt")) {
                            model.read(iS, null, "N-TRIPLES");
                        } else if (f.getAbsolutePath().endsWith(".rdf")) {
                            model.read(iS, null);
                        }
                    } catch (FileNotFoundException ex) {
                        LogManager.getLogger(RdfSplitter.class.getName()).log(Level.ERROR, "", ex);
                    }
                }
            }
            logger.info("Loaded " + model.size() + " triples");
        } else {
            System.exit(0);
        }

        StopWatch sw = new StopWatch();
        if (cmd.hasOption("n")) {
            sw.start();
            if (cmd.hasOption("tdb")) {
                rdfSplitter.saveFragmentsTDB(rdfSplitter.getFragHoriz(model, rdfSplitter.getFragNb()),
                        "Homog-" + fragName);
            } else {
                rdfSplitter.saveFragmentsRDF(rdfSplitter.getFragHoriz(model, rdfSplitter.getFragNb()),
                        "Homog-" + fragName);
            }
            logger.info("Homog horiz frag in " + sw.getTime() + "ms");
            sw.reset();
        } else if (cmd.hasOption("f")) {
            sw.start();
            if (cmd.hasOption("tdb")) {
                rdfSplitter.saveFragmentsTDB(rdfSplitter.getFragHoriz(model, rdfSplitter.getFragList()),
                        "Inhomog-" + fragName);
            } else {
                rdfSplitter.saveFragmentsRDF(rdfSplitter.getFragHoriz(model, rdfSplitter.getFragList()),
                        "Inhomog-" + fragName);
            }
            logger.info("Inhomog horiz frag in " + sw.getTime() + "ms");
            sw.reset();
        } else if (cmd.hasOption("p")) {
            sw.start();
            if (cmd.hasOption("tdb")) {
                rdfSplitter.saveFragmentsTDB(rdfSplitter.getFragVert(model, rdfSplitter.getInputPredicates()));
            } else {
                rdfSplitter.saveFragmentsRDF(rdfSplitter.getFragVert(model, rdfSplitter.getInputPredicates()));
            }
            logger.info("Vert frag in " + sw.getTime() + "ms");
            sw.reset();
        }

    } catch (ParseException ex) {
        logger.error("Impossible to parse the input command line " + cmd.toString());
    }
}

From source file:gov.nasa.ensemble.common.functional.ParTileExample.java

/**
 * @param args/* www .jav  a 2 s .c  o  m*/
 */
public static void main(String[] args) {

    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    final ExecutorService pool = Executors.newFixedThreadPool(NUM_THREADS);
    final ParModule pm = ParModule.parModule(Strategy.<Unit>executorStrategy(pool));

    final Actor<Tree<Tile>> tileWriter = pm.effect(new Effect<Tree<Tile>>() {
        @Override
        public void e(Tree<Tile> tree) {
            final List<Tile> nodes = tree.flatten().toList();
            final Actor<String> callback = pm.actor(new Effect<String>() {
                final int totalTiles = nodes.length();
                int counter = 0;

                @Override
                public void e(final String response) {
                    //                  System.err.println(response);
                    if (++counter >= totalTiles) {
                        final String msg = MessageFormat.format(
                                "All done! Made {0} tiles for a {1} pixel image in {2} seconds", counter,
                                IMAGE_WIDTH * IMAGE_HEIGHT, stopWatch.getTime() / 1000.0);
                        System.err.println(msg);
                        pool.shutdown();
                    }
                }
            }).asActor();

            nodes.foreach(Actors.act(pm.effect(new Effect<Tile>() {
                @Override
                public void e(final Tile tile) {
                    ThreadUtils.sleep(SAVE_TIME);
                    callback.act("done saving " + tile);
                }
            })));
        }
    });

    final Image inputImage = new Image(V.v(IMAGE_WIDTH, IMAGE_HEIGHT));

    process(inputImage, V.v(0.0, 0.0), 0, pm).to(tileWriter);
}

From source file:elaborate.editor.backend.AnnotationMarkerScrubber.java

@SuppressWarnings("boxing")
public static void main(String[] args) {
    StopWatch sw = new StopWatch();
    sw.start();//from ww w.j a  v a  2  s.c  o m
    EntityManager entityManager = HibernateUtil.beginTransaction();
    TranscriptionService ts = TranscriptionService.instance();
    ts.setEntityManager(entityManager);
    try {
        List<Transcription> resultList = entityManager// .
                .createQuery("select t from Transcription t", Transcription.class)//
                .getResultList();
        int size = resultList.size();
        int n = 1;
        for (Transcription t : resultList) {
            Log.info("indexing transcription {} ({}/{} = {}%)",
                    new Object[] { t.getId(), n, size, percentage(n, size) });
            String bodyBefore = t.getBody();
            ts.cleanupAnnotations(t);
            String bodyAfter = t.getBody();
            if (!bodyAfter.equals(bodyBefore)) {
                ProjectEntry projectEntry = t.getProjectEntry();
                String projectname = projectEntry.getProject().getName();
                long entryId = projectEntry.getId();
                Log.info("url: http://test.elaborate.huygens.knaw.nl/projects/{}/entries/{}/transcriptions/{}",
                        projectname, entryId, t.getTextLayer());
                Log.info("body changed:\nbefore: {}\nafter:{}", bodyBefore, bodyAfter);
            }
            n++;
        }
    } finally {
        HibernateUtil.commitTransaction(entityManager);
    }
    sw.stop();
    Log.info("done in {}", convert(sw.getTime()));
}

From source file:com.icantrap.collections.dawg.Dawg.java

public static void main(String[] args) throws IOException {
    Dawg dawg = Dawg.load(Dawg.class.getResourceAsStream("/twl06.dat"));

    InputStreamReader isr = new InputStreamReader(System.in);
    BufferedReader reader = new BufferedReader(isr);

    StopWatch stopWatch = new StopWatch();

    while (true) {
        System.out.print("letters:  ");
        String letters = reader.readLine();
        System.out.print("pattern:  ");
        String pattern = reader.readLine();

        stopWatch.reset();/*from ww w. j a v a2 s .c  o  m*/
        stopWatch.start();
        Result[] results = dawg.subwords(letters.toUpperCase(), pattern.toUpperCase());
        stopWatch.stop();

        if (results != null) {
            System.out.println();

            for (Result result : results) {
                StringBuilder message = new StringBuilder(result.word);
                if (result.wildcardPositions != null) {
                    message.append(" with wildcards at");
                    for (int position : result.wildcardPositions)
                        message.append(" ").append(position);
                }
                System.out.println(message.toString());
                System.out.println();
            }

            System.out.println("Found " + results.length + " matches in " + stopWatch.getTime() + " ms.");
        }

        System.out.println();
    }
}

From source file:br.edu.ufcg.lsd.oursim.ui.CLI.java

/**
 * Exemplo:/*from  w w w.  j  a  va 2  s  . c o  m*/
 * 
 * <pre>
 *   java -jar oursim.jar -w resources/trace_filtrado_primeiros_1000_jobs.txt -m resources/hostinfo_sdsc.dat -synthetic_av -o oursim_trace.txt
 *   -w resources/trace_filtrado_primeiros_1000_jobs.txt -s persistent -nr 20 -md resources/hostinfo_sdsc.dat -av resources/disponibilidade.txt -o oursim_trace.txt
 *   -w resources/new_iosup_workload.txt -s persistent -pd resources/iosup_site_description.txt -wt iosup -nr 1 -synthetic_av -o oursim_trace.txt
 *   -w resources/new_workload.txt -s persistent -pd resources/marcus_site_description.txt -wt marcus -nr 20 -d -o oursim_trace.txt
 *   1 ms + 1 dia = 2678400 segundos
 * </pre>
 * 
 * @param args
 * @throws FileNotFoundException
 */
public static void main(String[] args) throws IOException {

    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    List<Closeable> closeables = new ArrayList<Closeable>();

    CommandLine cmd = parseCommandLine(args, prepareOptions(), HELP, USAGE, EXECUTION_LINE);

    File outputFile = (File) cmd.getOptionObject(OUTPUT);
    PrintOutput printOutput = new PrintOutput(outputFile, false);
    JobEventDispatcher.getInstance().addListener(printOutput);
    closeables.add(printOutput);
    if (cmd.hasOption(EXTRACT_REMOTE_WORKLOAD)) {
        File remoteWorkloadFile = (File) cmd.getOptionObject(EXTRACT_REMOTE_WORKLOAD);
        Output remoteWorkloadExtractor = new RemoteTasksExtractorOutput(remoteWorkloadFile);
        closeables.add(remoteWorkloadExtractor);
        JobEventDispatcher.getInstance().addListener(remoteWorkloadExtractor);
    }
    Grid grid = prepareGrid(cmd);

    ComputingElementEventCounter computingElementEventCounter = prepareOutputAccounting(cmd,
            cmd.hasOption(VERBOSE));

    Input<? extends AvailabilityRecord> availability = defineAvailability(cmd, grid.getMapOfPeers());

    prepareOptionalOutputFiles(cmd, grid, (SyntheticAvailabilityCharacterizationAbstract) availability,
            closeables);

    long timeOfFirstSubmission = cmd.getOptionValue(WORKLOAD_TYPE).equals("gwa")
            ? GWAFormat.extractSubmissionTimeFromFirstJob(cmd.getOptionValue(WORKLOAD))
            : 0;
    Workload workload = defineWorkloadType(cmd, cmd.getOptionValue(WORKLOAD), grid.getMapOfPeers(),
            timeOfFirstSubmission);

    JobSchedulerPolicy jobScheduler = defineScheduler(cmd, grid.getListOfPeers());

    OurSim oursim = new OurSim(EventQueue.getInstance(), grid, jobScheduler, workload, availability);

    oursim.setActiveEntity(new ActiveEntityImp());

    if (cmd.hasOption(HALT_SIMULATION)) {
        oursim.addHaltEvent(((Number) cmd.getOptionObject(HALT_SIMULATION)).longValue());
    }

    oursim.start();

    for (Closeable c : closeables) {
        c.close();
    }

    EventQueue.getInstance().clear();

    // adiciona mtricas-resumo ao fim do arquivo
    FileWriter fw = new FileWriter(cmd.getOptionValue(OUTPUT), true);
    closeables.add(fw);
    stopWatch.stop();
    fw.write("# Simulation                  duration:" + stopWatch + ".\n");

    double utilization = grid.getUtilization();
    double realUtilization = grid.getTrueUtilization();

    int numberOfResourcesByPeer = Integer.parseInt(cmd.getOptionValue(NUM_RESOURCES_BY_PEER, "0"));
    fw.write(formatSummaryStatistics(computingElementEventCounter, "NA", "NA", false, grid.getPeers().size(),
            numberOfResourcesByPeer, utilization, realUtilization, stopWatch.getTime()) + "\n");
    fw.close();

    System.out.println(
            getSummaryStatistics(computingElementEventCounter, "NA", "NA", false, grid.getPeers().size(),
                    numberOfResourcesByPeer, utilization, realUtilization, stopWatch.getTime()));

}

From source file:com.cedarsoft.serialization.SplittingPerformanceRunner.java

private static void run(@Nonnull String description, @Nonnull Callable<String> callable) throws Exception {
    //Warmup//w  w  w.  j  ava  2s  . c om
    for (int i = 0; i < 1000; i++) {
        assertEquals("1.0.0", callable.call());
    }

    //Do the work
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    for (int i = 0; i < 100000; i++) {
        assertEquals("1.0.0", callable.call());
    }

    stopWatch.stop();
    System.out.println(description + " took " + stopWatch.getTime());
}