Example usage for org.apache.commons.lang.time StopWatch reset

List of usage examples for org.apache.commons.lang.time StopWatch reset

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch reset.

Prototype

public void reset() 

Source Link

Document

Resets the stopwatch.

Usage

From source file:MainClass.java

public static void main(String[] args) {
    StopWatch clock = new StopWatch();
    NumberFormat format = NumberFormat.getInstance();

    System.out.println("How long does it take to take the sin of 0.34 ten million times?");
    clock.start();/*ww  w.jav a2  s  .  c  om*/
    for (int i = 0; i < 100000000; i++) {
        Math.sin(0.34);
    }
    clock.stop();

    System.out.println("It takes " + clock.getTime() + " milliseconds");

    System.out.println("How long does it take to multiply 2 doubles one billion times?");
    clock.reset();
    clock.start();
    for (int i = 0; i < 1000000000; i++) {
        double result = 3423.2234 * 23e-4;
    }
    clock.stop();
    System.out.println("It takes " + clock.getTime() + " milliseconds.");

    System.out.println("How long does it take to add 2 ints one billion times?");
    clock.reset();
    clock.start();
    for (int i = 0; i < 1000000000; i++) {
        int result = 293842923 + 33382922;
    }
    clock.stop();
    System.out.println("It takes " + clock.getTime() + " milliseconds.");

    System.out.println("Testing the split() method.");
    clock.reset();
    clock.start();
    try {
        Thread.sleep(1000);
    } catch (Exception e) {
    }
    clock.split();
    System.out.println("Split Time after 1 sec: " + clock.getTime());
    try {
        Thread.sleep(1000);
    } catch (Exception e) {
    }
    System.out.println("Split Time after 2 sec: " + clock.getTime());
    clock.unsplit();
    try {
        Thread.sleep(1000);
    } catch (Exception e) {
    }
    System.out.println("Time after 3 sec: " + clock.getTime());

}

From source file:de.unisb.cs.st.javalanche.mutation.util.CsvWriter.java

public static void main(String[] args) throws IOException {
    // Set<Long> covered = MutationCoverageFile.getCoveredMutations();
    // List<Long> mutationIds = QueryManager.getMutationsWithoutResult(
    // covered, 0);

    Session session = HibernateUtil.getSessionFactory().openSession();
    List<Mutation> mutations = QueryManager.getMutationsForProject(
            ConfigurationLocator.getJavalancheConfiguration().getProjectPrefix(), session);

    logger.info("Got " + mutations.size() + " mutation ids.");
    List<String> lines = new ArrayList<String>();
    lines.add(Mutation.getCsvHead() + ",DETECTED");
    int counter = 0;
    int flushs = 0;
    StopWatch stp = new StopWatch();
    for (Mutation mutation : mutations) {
        // Mutation mutation = QueryManager.getMutationByID(id, session);
        lines.add(mutation.getCsvString() + "," + mutation.isKilled());
        counter++;/*ww  w .ja  v  a  2s .c  o  m*/
        if (counter > 20) {
            counter = 0;
            // 20, same as the JDBC batch size
            // flush a batch of inserts and release memory:
            // see
            // http://www.hibernate.org/hib_docs/reference/en/html/batch.html
            stp.reset();
            stp.start();
            flushs++;
            session.flush();
            // session.clear();
            logger.info("Did flush. It took: " + DurationFormatUtils.formatDurationHMS(stp.getTime()));
        }
    }
    session.close();
    logger.info("Starting to write file with " + lines.size() + " entries.");
    FileUtils.writeLines(new File("mutations.csv"), lines);
}

From source file:com.icantrap.collections.dawg.Dawg.java

public static void main(String[] args) throws IOException {
    Dawg dawg = Dawg.load(Dawg.class.getResourceAsStream("/twl06.dat"));

    InputStreamReader isr = new InputStreamReader(System.in);
    BufferedReader reader = new BufferedReader(isr);

    StopWatch stopWatch = new StopWatch();

    while (true) {
        System.out.print("letters:  ");
        String letters = reader.readLine();
        System.out.print("pattern:  ");
        String pattern = reader.readLine();

        stopWatch.reset();
        stopWatch.start();//from www  . ja  v  a  2 s .  co  m
        Result[] results = dawg.subwords(letters.toUpperCase(), pattern.toUpperCase());
        stopWatch.stop();

        if (results != null) {
            System.out.println();

            for (Result result : results) {
                StringBuilder message = new StringBuilder(result.word);
                if (result.wildcardPositions != null) {
                    message.append(" with wildcards at");
                    for (int position : result.wildcardPositions)
                        message.append(" ").append(position);
                }
                System.out.println(message.toString());
                System.out.println();
            }

            System.out.println("Found " + results.length + " matches in " + stopWatch.getTime() + " ms.");
        }

        System.out.println();
    }
}

From source file:fr.inria.edelweiss.kgimport.RdfSplitter.java

/**
 * The application entrypoint, configured through the command line input
 * arguments./*from   w ww .  j  a v  a 2  s.c o  m*/
 *
 * @param args the input command line arguments.
 */
public static void main(String args[]) {

    RdfSplitter rdfSplitter = new RdfSplitter();

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "Print usage information.");
    Option inDirOpt = new Option("i", "input-dir", true, "The directory containing RDF files to be loaded.");
    Option outDirOpt = new Option("o", "output-dir", true,
            "The directory containing the generated RDF fragments");
    Option predFiltOpt = new Option("p", "predicate-filter", true,
            "Predicate filter used to segment the dataset. "
                    + "You can use multiple filters, typically one per fragment.");
    Option fragNbOpt = new Option("n", "number-of-fragments", true,
            "Number of fragments generated for the whole input dataset.");
    Option fragRepOpt = new Option("f", "fractionning-percentage", true,
            "Percentage of the whole input dataset for this fragment.");
    Option tdbOpt = new Option("tdb", "tdb-storage", false,
            "RDF fragments are persisted into a Jena TDB backend.");
    Option versionOpt = new Option("v", "version", false, "Print the version information and exit.");
    options.addOption(inDirOpt);
    options.addOption(outDirOpt);
    options.addOption(predFiltOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(fragNbOpt);
    options.addOption(fragRepOpt);
    options.addOption(tdbOpt);

    String header = "RDF data fragmentation tool command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);

        if (cmd.hasOption("h")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("java -jar [].jar", header, options, footer, true);
            System.exit(0);
        }

        if (!cmd.hasOption("i")) {
            logger.warn("You must specify a valid input directory !");
            System.exit(-1);
        } else {
            rdfSplitter.setInputDirPath(cmd.getOptionValue("i"));
        }
        if (!cmd.hasOption("o")) {
            logger.warn("You must specify a valid output directory !");
            System.exit(-1);
        } else {
            rdfSplitter.setOutputDirPath(cmd.getOptionValue("o"));
        }
        if (cmd.hasOption("p")) {
            rdfSplitter.setInputPredicates(new ArrayList<String>(Arrays.asList(cmd.getOptionValues("p"))));
        }
        if (cmd.hasOption("f")) {
            ArrayList<String> opts = new ArrayList<String>(Arrays.asList(cmd.getOptionValues("f")));
            for (String opt : opts) {
                try {
                    rdfSplitter.getFragList().add(Integer.parseInt(opt));
                } catch (NumberFormatException e) {
                    logger.error(opt + " cannot be pased as an percentage value.");
                    System.exit(-1);
                }
            }
        }
        if (cmd.hasOption("n")) {
            try {
                rdfSplitter.setFragNb(Integer.parseInt(cmd.getOptionValue("n")));
            } catch (NumberFormatException e) {
                logger.error(cmd.getOptionValue("n") + " cannot be pased as an integer value.");
                System.exit(-1);
            }
        }

        File oDir = new File(rdfSplitter.getOutputDirPath());
        if (oDir.exists()) {
            logger.warn(rdfSplitter.getOutputDirPath() + " already exists !");
            oDir = Files.createTempDir();
            logger.warn(oDir.getAbsolutePath() + " created.");
            rdfSplitter.setOutputDirPath(oDir.getAbsolutePath());
        } else {
            if (oDir.mkdir()) {
                logger.info(rdfSplitter.getOutputDirPath() + " created.");
            }
        }

        if (!cmd.hasOption("n") && !cmd.hasOption("f") && !cmd.hasOption("p")) {
            logger.error("You must specify just one fragmentation type through '-n', '-f', or 'p' options");
            for (String arg : args) {
                logger.trace(arg);
            }
            System.exit(-1);
        }

        String fragName = rdfSplitter.getInputDirPath()
                .substring(rdfSplitter.getInputDirPath().lastIndexOf("/") + 1);

        //Input data loading
        Model model = ModelFactory.createDefaultModel();
        File inputDir = new File(rdfSplitter.getInputDirPath());
        if (inputDir.isDirectory()) {
            for (File f : inputDir.listFiles()) {
                logger.info("Loading " + f.getAbsolutePath());
                if (f.isDirectory()) {
                    String directory = f.getAbsolutePath();
                    Dataset dataset = TDBFactory.createDataset(directory);
                    dataset.begin(ReadWrite.READ);
                    // Get model inside the transaction
                    model.add(dataset.getDefaultModel());
                    dataset.end();
                } else {
                    InputStream iS;
                    try {
                        iS = new FileInputStream(f);
                        if (f.getAbsolutePath().endsWith(".n3")) {
                            model.read(iS, null, "N3");
                        } else if (f.getAbsolutePath().endsWith(".nt")) {
                            model.read(iS, null, "N-TRIPLES");
                        } else if (f.getAbsolutePath().endsWith(".rdf")) {
                            model.read(iS, null);
                        }
                    } catch (FileNotFoundException ex) {
                        LogManager.getLogger(RdfSplitter.class.getName()).log(Level.ERROR, "", ex);
                    }
                }
            }
            logger.info("Loaded " + model.size() + " triples");
        } else {
            System.exit(0);
        }

        StopWatch sw = new StopWatch();
        if (cmd.hasOption("n")) {
            sw.start();
            if (cmd.hasOption("tdb")) {
                rdfSplitter.saveFragmentsTDB(rdfSplitter.getFragHoriz(model, rdfSplitter.getFragNb()),
                        "Homog-" + fragName);
            } else {
                rdfSplitter.saveFragmentsRDF(rdfSplitter.getFragHoriz(model, rdfSplitter.getFragNb()),
                        "Homog-" + fragName);
            }
            logger.info("Homog horiz frag in " + sw.getTime() + "ms");
            sw.reset();
        } else if (cmd.hasOption("f")) {
            sw.start();
            if (cmd.hasOption("tdb")) {
                rdfSplitter.saveFragmentsTDB(rdfSplitter.getFragHoriz(model, rdfSplitter.getFragList()),
                        "Inhomog-" + fragName);
            } else {
                rdfSplitter.saveFragmentsRDF(rdfSplitter.getFragHoriz(model, rdfSplitter.getFragList()),
                        "Inhomog-" + fragName);
            }
            logger.info("Inhomog horiz frag in " + sw.getTime() + "ms");
            sw.reset();
        } else if (cmd.hasOption("p")) {
            sw.start();
            if (cmd.hasOption("tdb")) {
                rdfSplitter.saveFragmentsTDB(rdfSplitter.getFragVert(model, rdfSplitter.getInputPredicates()));
            } else {
                rdfSplitter.saveFragmentsRDF(rdfSplitter.getFragVert(model, rdfSplitter.getInputPredicates()));
            }
            logger.info("Vert frag in " + sw.getTime() + "ms");
            sw.reset();
        }

    } catch (ParseException ex) {
        logger.error("Impossible to parse the input command line " + cmd.toString());
    }
}

From source file:fr.inria.edelweiss.kgdqp.core.FedInferrencingCLI.java

public static void main(String args[]) throws ParseException, EngineException, InterruptedException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;/*w  w  w.  j ava 2s  . co m*/
    boolean rulesSelection = false;
    File rulesDir = null;
    File ontDir = null;

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules");
    Option ontOpt = new Option("o", "ontologiesDir", true,
            "directory containing the ontologies for rules selection");
    //        Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run");
    options.addOption(queryOpt);
    options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(rulesOpt);
    options.addOption(ontOpt);
    //        options.addOption(selOpt);

    String header = "Corese/KGRAM distributed rule engine command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (!cmd.hasOption("e")) {
        logger.info("You must specify at least the URL of one sparql endpoint !");
        System.exit(0);
    } else {
        endpoints = new ArrayList<String>(Arrays.asList(cmd.getOptionValues("e")));
    }
    if (cmd.hasOption("o")) {
        rulesSelection = true;
        String ontDirPath = cmd.getOptionValue("o");
        ontDir = new File(ontDirPath);
        if (!ontDir.isDirectory()) {
            logger.warn(ontDirPath + " is not a valid directory path.");
            System.exit(0);
        }
    }
    if (!cmd.hasOption("r")) {
        logger.info("You must specify a path for inference rules directory !");
        System.exit(0);
    } else if (rulesSelection) {

    }

    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    String rulesDirPath = cmd.getOptionValue("r");
    rulesDir = new File(rulesDirPath);
    if (!rulesDir.isDirectory()) {
        logger.warn(rulesDirPath + " is not a valid directory path.");
        System.exit(0);
    }

    /////////////////
    Graph graph = Graph.create();
    QueryProcessDQP execDQP = QueryProcessDQP.create(graph);
    for (String url : endpoints) {
        try {
            execDQP.addRemote(new URL(url), WSImplem.REST);
        } catch (MalformedURLException ex) {
            logger.error(url + " is not a well-formed URL");
            System.exit(1);
        }
    }

    // Local rules graph initialization
    Graph rulesG = Graph.create();
    Load ld = Load.create(rulesG);

    if (rulesSelection) {
        // Ontology loading
        if (ontDir.isDirectory()) {
            for (File o : ontDir.listFiles()) {
                logger.info("Loading " + o.getAbsolutePath());
                ld.load(o.getAbsolutePath());
            }
        }
    }

    // Rules loading
    if (rulesDir.isDirectory()) {
        for (File r : rulesDir.listFiles()) {
            logger.info("Loading " + r.getAbsolutePath());
            ld.load(r.getAbsolutePath());
        }
    }

    // Rule engine initialization
    RuleEngine ruleEngine = RuleEngine.create(graph);
    ruleEngine.set(execDQP);

    StopWatch sw = new StopWatch();
    logger.info("Federated graph size : " + graph.size());
    logger.info("Rules graph size : " + rulesG.size());

    // Rule selection
    logger.info("Rules selection");
    QueryProcess localKgram = QueryProcess.create(rulesG);
    ArrayList<String> applicableRules = new ArrayList<String>();
    sw.start();
    String rulesSelQuery = "";
    if (rulesSelection) {
        rulesSelQuery = pertinentRulesQuery;
    } else {
        rulesSelQuery = allRulesQuery;
    }
    Mappings maps = localKgram.query(rulesSelQuery);
    logger.info("Rules selected in " + sw.getTime() + " ms");
    logger.info("Applicable rules : " + maps.size());

    // Selected rule loading
    for (Mapping map : maps) {
        IDatatype dt = (IDatatype) map.getValue("?res");
        String rule = dt.getLabel();
        //loading rule in the rule engine
        //            logger.info("Adding rule : " + rule);
        applicableRules.add(rule);
        ruleEngine.addRule(rule);
    }

    // Rules application on distributed sparql endpoints
    logger.info("Rules application (" + applicableRules.size() + " rules)");
    ExecutorService threadPool = Executors.newCachedThreadPool();
    RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine);
    sw.reset();
    sw.start();

    //        ruleEngine.process();
    threadPool.execute(ruleThread);
    threadPool.shutdown();

    //monitoring loop
    while (!threadPool.isTerminated()) {
        System.out.println("******************************");
        System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter,
                QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));
        System.out.println("Rule engine running for " + sw.getTime() + " ms");
        System.out.println("Federated graph size : " + graph.size());
        Thread.sleep(10000);
    }

    logger.info("Federated graph size : " + graph.size());
    logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter,
            QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));

    ///////////// Query file processing
    //        StringBuffer fileData = new StringBuffer(1000);
    //        BufferedReader reader = null;
    //        try {
    //            reader = new BufferedReader(new FileReader(queryPath));
    //        } catch (FileNotFoundException ex) {
    //             logger.error("Query file "+queryPath+" not found !");
    //             System.exit(1);
    //        }
    //        char[] buf = new char[1024];
    //        int numRead = 0;
    //        try {
    //            while ((numRead = reader.read(buf)) != -1) {
    //                String readData = String.valueOf(buf, 0, numRead);
    //                fileData.append(readData);
    //                buf = new char[1024];
    //            }
    //            reader.close();
    //        } catch (IOException ex) {
    //           logger.error("Error while reading query file "+queryPath);
    //           System.exit(1);
    //        }
    //
    //        String sparqlQuery = fileData.toString();
    //
    //        Query q = exec.compile(sparqlQuery,null);
    //        System.out.println(q);
    //        
    //        StopWatch sw = new StopWatch();
    //        sw.start();
    //        Mappings map = exec.query(sparqlQuery);
    //        int dqpSize = map.size();
    //        System.out.println("--------");
    //        long time = sw.getTime();
    //        System.out.println(time + " " + dqpSize);
}

From source file:fr.inria.edelweiss.kgdqp.core.CentralizedInferrencing.java

public static void main(String args[])
        throws ParseException, EngineException, InterruptedException, IOException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;//  w  w  w.  j  av  a  2 s .  co  m
    boolean rulesSelection = false;
    File rulesDir = null;
    File ontDir = null;

    /////////////////
    Graph graph = Graph.create();
    QueryProcess exec = QueryProcess.create(graph);

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    //        Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    //        Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules");
    Option ontOpt = new Option("o", "ontologiesDir", true,
            "directory containing the ontologies for rules selection");
    //        Option locOpt = new Option("c", "centralized", false, "performs centralized inferences");
    Option dataOpt = new Option("l", "load", true, "data file or directory to be loaded");
    //        Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run");
    //        options.addOption(queryOpt);
    //        options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(rulesOpt);
    options.addOption(ontOpt);
    //        options.addOption(selOpt);
    //        options.addOption(locOpt);
    options.addOption(dataOpt);

    String header = "Corese/KGRAM rule engine experiment command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (cmd.hasOption("o")) {
        rulesSelection = true;
        String ontDirPath = cmd.getOptionValue("o");
        ontDir = new File(ontDirPath);
        if (!ontDir.isDirectory()) {
            logger.warn(ontDirPath + " is not a valid directory path.");
            System.exit(0);
        }
    }
    if (!cmd.hasOption("r")) {
        logger.info("You must specify a path for inference rules directory !");
        System.exit(0);
    }

    if (cmd.hasOption("l")) {
        String[] dataPaths = cmd.getOptionValues("l");
        for (String path : dataPaths) {
            Load ld = Load.create(graph);
            ld.load(path);
            logger.info("Loaded " + path);
        }
    }

    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    String rulesDirPath = cmd.getOptionValue("r");
    rulesDir = new File(rulesDirPath);
    if (!rulesDir.isDirectory()) {
        logger.warn(rulesDirPath + " is not a valid directory path.");
        System.exit(0);
    }

    // Local rules graph initialization
    Graph rulesG = Graph.create();
    Load ld = Load.create(rulesG);

    if (rulesSelection) {
        // Ontology loading
        if (ontDir.isDirectory()) {
            for (File o : ontDir.listFiles()) {
                logger.info("Loading " + o.getAbsolutePath());
                ld.load(o.getAbsolutePath());
            }
        }
    }

    // Rules loading
    if (rulesDir.isDirectory()) {
        for (File r : rulesDir.listFiles()) {
            logger.info("Loading " + r.getAbsolutePath());
            ld.load(r.getAbsolutePath());
        }
    }

    // Rule engine initialization
    RuleEngine ruleEngine = RuleEngine.create(graph);
    ruleEngine.set(exec);
    ruleEngine.setOptimize(true);
    ruleEngine.setConstructResult(true);
    ruleEngine.setTrace(true);

    StopWatch sw = new StopWatch();
    logger.info("Federated graph size : " + graph.size());
    logger.info("Rules graph size : " + rulesG.size());

    // Rule selection
    logger.info("Rules selection");
    QueryProcess localKgram = QueryProcess.create(rulesG);
    ArrayList<String> applicableRules = new ArrayList<String>();
    sw.start();
    String rulesSelQuery = "";
    if (rulesSelection) {
        rulesSelQuery = pertinentRulesQuery;
    } else {
        rulesSelQuery = allRulesQuery;
    }
    Mappings maps = localKgram.query(rulesSelQuery);
    logger.info("Rules selected in " + sw.getTime() + " ms");
    logger.info("Applicable rules : " + maps.size());

    // Selected rule loading
    for (Mapping map : maps) {
        IDatatype dt = (IDatatype) map.getValue("?res");
        String rule = dt.getLabel();
        //loading rule in the rule engine
        //            logger.info("Adding rule : ");
        //            System.out.println("-------");
        //            System.out.println(rule);
        //            System.out.println("");
        //            if (! rule.toLowerCase().contains("sameas")) {
        applicableRules.add(rule);
        ruleEngine.addRule(rule);
        //            }
    }

    // Rules application on distributed sparql endpoints
    logger.info("Rules application (" + applicableRules.size() + " rules)");
    ExecutorService threadPool = Executors.newCachedThreadPool();
    RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine);
    sw.reset();
    sw.start();

    //        ruleEngine.process();
    threadPool.execute(ruleThread);
    threadPool.shutdown();

    //monitoring loop
    while (!threadPool.isTerminated()) {
        //            System.out.println("******************************");
        //            System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));
        //            System.out.println("Rule engine running for " + sw.getTime() + " ms");
        //            System.out.println("Federated graph size : " + graph.size());
        System.out.println(sw.getTime() + " , " + graph.size());
        Thread.sleep(5000);
    }

    logger.info("Federated graph size : " + graph.size());
    //        logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));

    //        TripleFormat f = TripleFormat.create(graph, true);
    //        f.write("/tmp/gAll.ttl");

}

From source file:fr.inria.edelweiss.kgdqp.core.CentralizedInferrencingNoSpin.java

public static void main(String args[])
        throws ParseException, EngineException, InterruptedException, IOException, LoadException {

    List<String> endpoints = new ArrayList<String>();
    String queryPath = null;/*from   w  w  w . java 2s  .  c  o m*/
    boolean rulesSelection = false;
    File rulesDir = null;
    File ontDir = null;

    /////////////////
    Graph graph = Graph.create();
    QueryProcess exec = QueryProcess.create(graph);

    Options options = new Options();
    Option helpOpt = new Option("h", "help", false, "print this message");
    //        Option queryOpt = new Option("q", "query", true, "specify the sparql query file");
    //        Option endpointOpt = new Option("e", "endpoint", true, "a federated sparql endpoint URL");
    Option versionOpt = new Option("v", "version", false, "print the version information and exit");
    Option rulesOpt = new Option("r", "rulesDir", true, "directory containing the inference rules");
    Option ontOpt = new Option("o", "ontologiesDir", true,
            "directory containing the ontologies for rules selection");
    //        Option locOpt = new Option("c", "centralized", false, "performs centralized inferences");
    Option dataOpt = new Option("l", "load", true, "data file or directory to be loaded");
    //        Option selOpt = new Option("s", "rulesSelection", false, "if set to true, only the applicable rules are run");
    //        options.addOption(queryOpt);
    //        options.addOption(endpointOpt);
    options.addOption(helpOpt);
    options.addOption(versionOpt);
    options.addOption(rulesOpt);
    options.addOption(ontOpt);
    //        options.addOption(selOpt);
    //        options.addOption(locOpt);
    options.addOption(dataOpt);

    String header = "Corese/KGRAM rule engine experiment command line interface";
    String footer = "\nPlease report any issue to alban.gaignard@cnrs.fr, olivier.corby@inria.fr";

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);
    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("kgdqp", header, options, footer, true);
        System.exit(0);
    }
    if (cmd.hasOption("o")) {
        rulesSelection = true;
        String ontDirPath = cmd.getOptionValue("o");
        ontDir = new File(ontDirPath);
        if (!ontDir.isDirectory()) {
            logger.warn(ontDirPath + " is not a valid directory path.");
            System.exit(0);
        }
    }
    if (!cmd.hasOption("r")) {
        logger.info("You must specify a path for inference rules directory !");
        System.exit(0);
    }

    if (cmd.hasOption("l")) {
        String[] dataPaths = cmd.getOptionValues("l");
        for (String path : dataPaths) {
            Load ld = Load.create(graph);
            ld.load(path);
            logger.info("Loaded " + path);
        }
    }

    if (cmd.hasOption("v")) {
        logger.info("version 3.0.4-SNAPSHOT");
        System.exit(0);
    }

    String rulesDirPath = cmd.getOptionValue("r");
    rulesDir = new File(rulesDirPath);
    if (!rulesDir.isDirectory()) {
        logger.warn(rulesDirPath + " is not a valid directory path.");
        System.exit(0);
    }

    // Local rules graph initialization
    Graph rulesG = Graph.create();
    Load ld = Load.create(rulesG);

    if (rulesSelection) {
        // Ontology loading
        if (ontDir.isDirectory()) {
            for (File o : ontDir.listFiles()) {
                logger.info("Loading " + o.getAbsolutePath());
                ld.load(o.getAbsolutePath());
            }
        }
    }

    // Rules loading
    if (rulesDir.isDirectory()) {
        for (File r : rulesDir.listFiles()) {
            if (r.getAbsolutePath().endsWith(".rq")) {
                logger.info("Loading " + r.getAbsolutePath());
                //                ld.load(r.getAbsolutePath());

                //                    byte[] encoded = Files.readAllBytes(Paths.get(r.getAbsolutePath()));
                //                    String construct = new String(encoded, "UTF-8"); //StandardCharsets.UTF_8);

                FileInputStream f = new FileInputStream(r);
                QueryLoad ql = QueryLoad.create();
                String construct = ql.read(f);
                f.close();

                SPINProcess sp = SPINProcess.create();
                String spinConstruct = sp.toSpin(construct);

                ld.load(new ByteArrayInputStream(spinConstruct.getBytes()), Load.TURTLE_FORMAT);
                logger.info("Rules graph size : " + rulesG.size());

            }
        }
    }

    // Rule engine initialization
    RuleEngine ruleEngine = RuleEngine.create(graph);
    ruleEngine.set(exec);
    ruleEngine.setOptimize(true);
    ruleEngine.setConstructResult(true);
    ruleEngine.setTrace(true);

    StopWatch sw = new StopWatch();
    logger.info("Federated graph size : " + graph.size());
    logger.info("Rules graph size : " + rulesG.size());

    // Rule selection
    logger.info("Rules selection");
    QueryProcess localKgram = QueryProcess.create(rulesG);
    ArrayList<String> applicableRules = new ArrayList<String>();
    sw.start();
    String rulesSelQuery = "";
    if (rulesSelection) {
        rulesSelQuery = pertinentRulesQuery;
    } else {
        rulesSelQuery = allRulesQuery;
    }
    Mappings maps = localKgram.query(rulesSelQuery);
    logger.info("Rules selected in " + sw.getTime() + " ms");
    logger.info("Applicable rules : " + maps.size());

    // Selected rule loading
    for (Mapping map : maps) {
        IDatatype dt = (IDatatype) map.getValue("?res");
        String rule = dt.getLabel();
        //loading rule in the rule engine
        //            logger.info("Adding rule : ");
        //            System.out.println("-------");
        //            System.out.println(rule);
        //            System.out.println("");
        //            if (! rule.toLowerCase().contains("sameas")) {
        applicableRules.add(rule);
        ruleEngine.addRule(rule);
        //            }
    }

    // Rules application on distributed sparql endpoints
    logger.info("Rules application (" + applicableRules.size() + " rules)");
    ExecutorService threadPool = Executors.newCachedThreadPool();
    RuleEngineThread ruleThread = new RuleEngineThread(ruleEngine);
    sw.reset();
    sw.start();

    //        ruleEngine.process();
    threadPool.execute(ruleThread);
    threadPool.shutdown();

    //monitoring loop
    while (!threadPool.isTerminated()) {
        //            System.out.println("******************************");
        //            System.out.println(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));
        //            System.out.println("Rule engine running for " + sw.getTime() + " ms");
        //            System.out.println("Federated graph size : " + graph.size());
        System.out.println(sw.getTime() + " , " + graph.size());
        Thread.sleep(5000);
    }

    logger.info("Federated graph size : " + graph.size());
    //        logger.info(Util.jsonDqpCost(QueryProcessDQP.queryCounter, QueryProcessDQP.queryVolumeCounter, QueryProcessDQP.sourceCounter, QueryProcessDQP.sourceVolumeCounter));

    //        TripleFormat f = TripleFormat.create(graph, true);
    //        f.write("/tmp/gAll.ttl");
}

From source file:de.unisb.cs.st.javalanche.mutation.util.ResultDeleter.java

public static void deleteResults(Session session, Query q) {
    @SuppressWarnings("unchecked")
    List<Mutation> mutations = q.list();
    int deletes = 0, flushs = 0;
    StopWatch stp = new StopWatch();
    for (Mutation m : mutations) {
        MutationTestResult result = m.getMutationResult();
        if (result != null) {
            m.setMutationResult(null);/*from www. ja va2s .co m*/
            session.delete(result);
            deletes++;
        }
        if (deletes > 20) {
            // 20, same as the JDBC batch size
            // flush a batch of inserts and release memory:
            // see
            // http://www.hibernate.org/hib_docs/reference/en/html/batch.html
            stp.reset();
            stp.start();
            flushs++;
            session.flush();
            // session.clear();
            logger.info("Did flush. It took: " + DurationFormatUtils.formatDurationHMS(stp.getTime()));
            deletes = 0;
        }
    }
    logger.info(String.format("Deleted %d mutation results", mutations.size()));
}

From source file:com.google.code.jerseyclients.asynchttpclient.AsyncHttpClientJerseyClientHandler.java

/**
 * @see com.sun.jersey.api.client.ClientHandler#handle(com.sun.jersey.api.client.ClientRequest)
 *//*from w  ww . ja v  a 2 s.  c o  m*/
public ClientResponse handle(final ClientRequest clientRequest) throws ClientHandlerException {

    final BoundRequestBuilder boundRequestBuilder = getBoundRequestBuilder(clientRequest);

    PerRequestConfig perRequestConfig = new PerRequestConfig();
    perRequestConfig.setRequestTimeoutInMs(this.jerseyHttpClientConfig.getReadTimeOut());

    if (this.jerseyHttpClientConfig.getProxyInformation() != null) {
        ProxyServer proxyServer = new ProxyServer(jerseyHttpClientConfig.getProxyInformation().getProxyHost(),
                jerseyHttpClientConfig.getProxyInformation().getProxyPort());
        perRequestConfig = new PerRequestConfig(proxyServer, this.jerseyHttpClientConfig.getReadTimeOut());
    }

    boundRequestBuilder.setPerRequestConfig(perRequestConfig);

    if (this.jerseyHttpClientConfig.getApplicationCode() != null) {
        boundRequestBuilder.addHeader(this.jerseyHttpClientConfig.getApplicationCodeHeader(),
                this.jerseyHttpClientConfig.getApplicationCode());
    }
    if (this.jerseyHttpClientConfig.getOptionnalHeaders() != null) {
        for (Entry<String, String> entry : this.jerseyHttpClientConfig.getOptionnalHeaders().entrySet()) {
            boundRequestBuilder.addHeader(entry.getKey(), entry.getValue());
        }
    }

    if (StringUtils.equalsIgnoreCase("POST", clientRequest.getMethod())) {

        if (clientRequest.getEntity() != null) {
            final RequestEntityWriter re = getRequestEntityWriter(clientRequest);

            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            try {
                re.writeRequestEntity(new CommittingOutputStream(baos) {
                    @Override
                    protected void commit() throws IOException {
                        writeOutBoundHeaders(clientRequest.getHeaders(), boundRequestBuilder);
                    }
                });
            } catch (IOException ex) {
                throw new ClientHandlerException(ex);
            }

            boundRequestBuilder.setBody(new ByteArrayInputStream(baos.toByteArray()));

        }
    } else {
        writeOutBoundHeaders(clientRequest.getHeaders(), boundRequestBuilder);
    }
    try {
        StopWatch stopWatch = new StopWatch();
        stopWatch.reset();
        stopWatch.start();
        Future<Response> futureResponse = boundRequestBuilder.execute();
        Response response = futureResponse.get();
        int httpReturnCode = response.getStatusCode();
        stopWatch.stop();
        log.info("time to call rest url " + clientRequest.getURI() + ", " + stopWatch.getTime() + " ms");
        // in case of empty content returned we returned an empty stream
        // to return a null object
        if (httpReturnCode == Status.NO_CONTENT.getStatusCode()) {
            new ClientResponse(httpReturnCode, getInBoundHeaders(response), IOUtils.toInputStream(""),
                    getMessageBodyWorkers());
        }
        return new ClientResponse(httpReturnCode, getInBoundHeaders(response),
                response.getResponseBodyAsStream() == null ? IOUtils.toInputStream("")
                        : response.getResponseBodyAsStream(),
                getMessageBodyWorkers());
    } catch (Exception e) {
        if (e.getCause() != null && (e.getCause() instanceof TimeoutException)) {
            throw new ClientHandlerException(new SocketTimeoutException());
        }
        throw new ClientHandlerException(e);
    }
}

From source file:ch.systemsx.cisd.openbis.generic.server.dataaccess.db.search.FullTextIndexerRunnable.java

public final void run() {
    final IndexMode indexMode = context.getIndexMode();
    if (indexMode == IndexMode.NO_INDEX) {
        return;/*from  ww  w .  j av  a2  s.c  o m*/
    }
    final Set<Class<?>> indexedEntities = indexedEntityFinder.getIndexedEntities();

    // final Set<Class<?>> indexedEntities = new HashSet<Class<?>>();
    // indexedEntities.add(ExternalDataPE.class);
    // indexedEntities.add(ExperimentPE.class);

    if (indexedEntities.size() == 0) {
        operationLog.info(
                String.format("No entity annotated with '%s' has been found.", Indexed.class.getSimpleName()));
        return;
    }
    Class<?> currentEntity = null;
    try {
        // timeout exceptions were observed for the default timeout when database was bigger
        IndexWriter.setDefaultWriteLockTimeout(3000);
        final File indexBase = new File(context.getIndexBase());
        final File markerFile = new File(indexBase, FULL_TEXT_INDEX_MARKER_FILENAME);
        if (indexMode == IndexMode.SKIP_IF_MARKER_FOUND && markerFile.exists()) {
            operationLog
                    .debug(String.format("Skipping indexing process as " + "marker file '%s' already exists.",
                            markerFile.getAbsolutePath()));
            return;
        }
        final Session session = getSession();
        final StopWatch stopWatch = new StopWatch();
        for (final Class<?> indexedEntity : indexedEntities) {
            currentEntity = indexedEntity;
            stopWatch.reset();
            stopWatch.start();
            fullTextIndexer.doFullTextIndex(session, indexedEntity);
            stopWatch.stop();
            operationLog
                    .info(String.format("Indexing entity '%s' took %s.", indexedEntity.getName(), stopWatch));
        }
        FileUtils.touch(markerFile);
        releaseSession(session);
    } catch (final Throwable th) {
        notificationLog
                .error(String.format("A problem has occurred while indexing entity '%s'.", currentEntity), th);
    }
}