Example usage for org.apache.commons.lang.time StopWatch stop

List of usage examples for org.apache.commons.lang.time StopWatch stop

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch stop.

Prototype

public void stop() 

Source Link

Document

Stop the stopwatch.

This method ends a new timing session, allowing the time to be retrieved.

Usage

From source file:ch.systemsx.cisd.openbis.generic.server.dataaccess.db.IndexCreationUtil.java

public static void main(final String[] args) throws Exception {
    Parameters parameters = null;/*from w w  w.  j  a va  2 s.  c  o  m*/
    try {
        parameters = new Parameters(args);
    } catch (IllegalArgumentException e) {
        System.out.println(Parameters.getUsage());
        System.exit(1);
        return; // for Eclipse
    }
    LogInitializer.init();
    String databaseKind = parameters.getDatabaseKind();
    String duplicatedDatabaseKind = parameters.getDuplicatedDatabaseKind();
    String indexFolder = parameters.getIndexFolder();
    if (duplicatedDatabaseKind != null) {

        String databaseName = DATABASE_NAME_PREFIX + databaseKind;
        String duplicatedDatabaseName = DATABASE_NAME_PREFIX + duplicatedDatabaseKind;
        boolean ok = duplicateDatabase(duplicatedDatabaseName, databaseName);
        if (ok == false) {
            System.exit(1);
        }
        File dumpFile = parameters.getDumpFile();
        operationLog.info("Dump '" + duplicatedDatabaseName + "' into '" + dumpFile + "'.");
        DumpPreparator.createDatabaseDump(duplicatedDatabaseName, dumpFile);
        databaseKind = duplicatedDatabaseKind;
        FileUtilities.deleteRecursively(new File(indexFolder));
    }
    System.setProperty("database.kind", databaseKind);
    // Deactivate the indexing in the application context loaded by Spring.
    System.setProperty("hibernate.search.index-mode", "NO_INDEX");
    System.setProperty("hibernate.search.index-base", indexFolder);
    System.setProperty("database.create-from-scratch", "false");
    hibernateSearchContext = createHibernateSearchContext(indexFolder);
    hibernateSearchContext.afterPropertiesSet();
    operationLog.info("=========== Start indexing ===========");
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    performFullTextIndex();
    stopWatch.stop();
    operationLog.info("Index of database '" + DATABASE_NAME_PREFIX + databaseKind + "' successfully built in '"
            + indexFolder + "' after " + ((stopWatch.getTime() + 30000) / 60000) + " minutes.");
    System.exit(0);
}

From source file:com.eaio.uuid.UUIDPerformance.java

public static void main(String[] args) {

    Thread[] threads = new Thread[Runtime.getRuntime().availableProcessors()];

    for (int i = 0; i < threads.length; ++i) {
        threads[i] = new Thread(new UUIDRunnable(count / threads.length));
    }/*w  w  w .  j a  va  2  s  .c om*/

    StopWatch watch = new StopWatch();
    watch.start();

    for (Thread t : threads) {
        t.start();
    }

    for (Thread t : threads) {
        try {
            t.join();
        } catch (InterruptedException e) {
            // Moo
        }
    }

    watch.stop();
    System.out.println(watch.getTime());
}

From source file:MainClass.java

public static void main(String[] args) {
    StopWatch clock = new StopWatch();
    NumberFormat format = NumberFormat.getInstance();

    System.out.println("How long does it take to take the sin of 0.34 ten million times?");
    clock.start();//ww  w. j  a va 2 s.  com
    for (int i = 0; i < 100000000; i++) {
        Math.sin(0.34);
    }
    clock.stop();

    System.out.println("It takes " + clock.getTime() + " milliseconds");

    System.out.println("How long does it take to multiply 2 doubles one billion times?");
    clock.reset();
    clock.start();
    for (int i = 0; i < 1000000000; i++) {
        double result = 3423.2234 * 23e-4;
    }
    clock.stop();
    System.out.println("It takes " + clock.getTime() + " milliseconds.");

    System.out.println("How long does it take to add 2 ints one billion times?");
    clock.reset();
    clock.start();
    for (int i = 0; i < 1000000000; i++) {
        int result = 293842923 + 33382922;
    }
    clock.stop();
    System.out.println("It takes " + clock.getTime() + " milliseconds.");

    System.out.println("Testing the split() method.");
    clock.reset();
    clock.start();
    try {
        Thread.sleep(1000);
    } catch (Exception e) {
    }
    clock.split();
    System.out.println("Split Time after 1 sec: " + clock.getTime());
    try {
        Thread.sleep(1000);
    } catch (Exception e) {
    }
    System.out.println("Split Time after 2 sec: " + clock.getTime());
    clock.unsplit();
    try {
        Thread.sleep(1000);
    } catch (Exception e) {
    }
    System.out.println("Time after 3 sec: " + clock.getTime());

}

From source file:MainClass.java

public static void main(String[] args) {
    StopWatch stWatch = new StopWatch();

    //Start StopWatch
    stWatch.start();/*from w w  w .ja  v a2s. c  om*/

    //Get iterator for all days in a week starting Monday
    Iterator itr = DateUtils.iterator(new Date(), DateUtils.RANGE_WEEK_MONDAY);

    while (itr.hasNext()) {
        Calendar gCal = (Calendar) itr.next();
        System.out.println(gCal.getTime());
    }

    //Stop StopWatch
    stWatch.stop();
    System.out.println("Time Taken >>" + stWatch.getTime());

}

From source file:TimeTrial.java

public static void main(String[] args) {

    StopWatch stWatch = new StopWatch();

    // Start StopWatch
    stWatch.start();//w  w w.  j ava 2 s .  c o m

    // Get iterator for all days in a week starting Monday
    Iterator itr = DateUtils.iterator(new Date(), DateUtils.RANGE_WEEK_MONDAY);

    while (itr.hasNext()) {
        Calendar gCal = (Calendar) itr.next();
        System.out.println(gCal.getTime());
    }

    // Stop StopWatch
    stWatch.stop();
    System.out.println("Time Taken >>" + stWatch.getTime());

}

From source file:com.icantrap.collections.dawg.Dawg.java

public static void main(String[] args) throws IOException {
    Dawg dawg = Dawg.load(Dawg.class.getResourceAsStream("/twl06.dat"));

    InputStreamReader isr = new InputStreamReader(System.in);
    BufferedReader reader = new BufferedReader(isr);

    StopWatch stopWatch = new StopWatch();

    while (true) {
        System.out.print("letters:  ");
        String letters = reader.readLine();
        System.out.print("pattern:  ");
        String pattern = reader.readLine();

        stopWatch.reset();/*from  w  ww . j  a v a  2 s  .  c  o m*/
        stopWatch.start();
        Result[] results = dawg.subwords(letters.toUpperCase(), pattern.toUpperCase());
        stopWatch.stop();

        if (results != null) {
            System.out.println();

            for (Result result : results) {
                StringBuilder message = new StringBuilder(result.word);
                if (result.wildcardPositions != null) {
                    message.append(" with wildcards at");
                    for (int position : result.wildcardPositions)
                        message.append(" ").append(position);
                }
                System.out.println(message.toString());
                System.out.println();
            }

            System.out.println("Found " + results.length + " matches in " + stopWatch.getTime() + " ms.");
        }

        System.out.println();
    }
}

From source file:hadoop.Main.java

public static void main(String[] args) throws Exception {

    StopWatch timer = new StopWatch();
    timer.start();/*from   w ww.j  av  a 2 s  . c  o  m*/

    System.out.println("Main program is starting");

    String dir = "hdfs://127.0.1.1:9000/user/fiqie/twitter/";

    Configuration conf = new Configuration();
    conf.set("fs.default.name", "hdfs://127.0.1.1:9000");
    FileSystem fs = FileSystem.get(conf);

    String[] prepareOpts = { dir + "input/small.txt", dir + "output/pr-0.out" };
    ToolRunner.run(new Configuration(), new InitPageRank(), prepareOpts);

    //String[] initOpts = { dir + "output/prepared.out", dir + "output/pr-0.out" };
    //ToolRunner.run(new Configuration(), new InitPageRankDriver(), initOpts);

    for (int i = 1; i <= NUMBER_OF_ITERATION; i++) {
        String previous = dir + "output/pr-" + (i - 1) + ".out";
        String current = dir + "output/pr-" + i + ".out";
        String[] opts = { previous, current };
        ToolRunner.run(new Configuration(), new CalculatePageRank(), opts);

        if (i == NUMBER_OF_ITERATION) {
            String[] finalOpts = { dir + "output/pr-" + i + ".out", dir + "output/pr-final.out" };
            ToolRunner.run(new Configuration(), new FinishPageRank(), finalOpts);
        }
    }

    timer.stop();
    System.out.println("Elapsed " + timer.toString());

}

From source file:elaborate.editor.backend.Indexer.java

@SuppressWarnings("boxing")
public static void main(String[] args) {
    boolean wipeIndexFirst = args.length == 0 ? false : "-w".equals(args[0]);
    StopWatch sw = new StopWatch();
    sw.start();//from w  w  w.  j a va 2s . c om
    ElaborateSolrIndexer solr = new ElaborateSolrIndexer();
    if (wipeIndexFirst) {
        Log.info("clearing index");
        solr.clear();
    }
    EntityManager entityManager = HibernateUtil.getEntityManager();
    try {
        ProjectEntryService projectEntryService = ProjectEntryService.instance();
        projectEntryService.setEntityManager(entityManager);
        List<ProjectEntry> projectentries = projectEntryService.getAll();
        int size = projectentries.size();
        Log.info("indexing {} projectEntries", size);
        int n = 1;
        for (ProjectEntry projectEntry : projectentries) {
            Log.info("indexing projectEntry {} ({}/{} = {}%) (est. time remaining: {})", //
                    new Object[] { //
                            projectEntry.getId(), n, size, //
                            percentage(n, size), //
                            time_remaining(n, size, sw.getTime()) //
                    } //
            );
            solr.index(projectEntry, autoCommit(n));
            n++;
        }
    } finally {
        entityManager.close();
    }
    solr.commit();
    sw.stop();
    Log.info("done in {}", convert(sw.getTime()));
}

From source file:elaborate.editor.backend.AnnotationMarkerScrubber.java

@SuppressWarnings("boxing")
public static void main(String[] args) {
    StopWatch sw = new StopWatch();
    sw.start();//from w  w w .  j  a v  a 2 s .c  o m
    EntityManager entityManager = HibernateUtil.beginTransaction();
    TranscriptionService ts = TranscriptionService.instance();
    ts.setEntityManager(entityManager);
    try {
        List<Transcription> resultList = entityManager// .
                .createQuery("select t from Transcription t", Transcription.class)//
                .getResultList();
        int size = resultList.size();
        int n = 1;
        for (Transcription t : resultList) {
            Log.info("indexing transcription {} ({}/{} = {}%)",
                    new Object[] { t.getId(), n, size, percentage(n, size) });
            String bodyBefore = t.getBody();
            ts.cleanupAnnotations(t);
            String bodyAfter = t.getBody();
            if (!bodyAfter.equals(bodyBefore)) {
                ProjectEntry projectEntry = t.getProjectEntry();
                String projectname = projectEntry.getProject().getName();
                long entryId = projectEntry.getId();
                Log.info("url: http://test.elaborate.huygens.knaw.nl/projects/{}/entries/{}/transcriptions/{}",
                        projectname, entryId, t.getTextLayer());
                Log.info("body changed:\nbefore: {}\nafter:{}", bodyBefore, bodyAfter);
            }
            n++;
        }
    } finally {
        HibernateUtil.commitTransaction(entityManager);
    }
    sw.stop();
    Log.info("done in {}", convert(sw.getTime()));
}

From source file:br.edu.ufcg.lsd.oursim.ui.CLI.java

/**
 * Exemplo:/*  w  ww.  j  a  v  a2 s.  c om*/
 * 
 * <pre>
 *   java -jar oursim.jar -w resources/trace_filtrado_primeiros_1000_jobs.txt -m resources/hostinfo_sdsc.dat -synthetic_av -o oursim_trace.txt
 *   -w resources/trace_filtrado_primeiros_1000_jobs.txt -s persistent -nr 20 -md resources/hostinfo_sdsc.dat -av resources/disponibilidade.txt -o oursim_trace.txt
 *   -w resources/new_iosup_workload.txt -s persistent -pd resources/iosup_site_description.txt -wt iosup -nr 1 -synthetic_av -o oursim_trace.txt
 *   -w resources/new_workload.txt -s persistent -pd resources/marcus_site_description.txt -wt marcus -nr 20 -d -o oursim_trace.txt
 *   1 ms + 1 dia = 2678400 segundos
 * </pre>
 * 
 * @param args
 * @throws FileNotFoundException
 */
public static void main(String[] args) throws IOException {

    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    List<Closeable> closeables = new ArrayList<Closeable>();

    CommandLine cmd = parseCommandLine(args, prepareOptions(), HELP, USAGE, EXECUTION_LINE);

    File outputFile = (File) cmd.getOptionObject(OUTPUT);
    PrintOutput printOutput = new PrintOutput(outputFile, false);
    JobEventDispatcher.getInstance().addListener(printOutput);
    closeables.add(printOutput);
    if (cmd.hasOption(EXTRACT_REMOTE_WORKLOAD)) {
        File remoteWorkloadFile = (File) cmd.getOptionObject(EXTRACT_REMOTE_WORKLOAD);
        Output remoteWorkloadExtractor = new RemoteTasksExtractorOutput(remoteWorkloadFile);
        closeables.add(remoteWorkloadExtractor);
        JobEventDispatcher.getInstance().addListener(remoteWorkloadExtractor);
    }
    Grid grid = prepareGrid(cmd);

    ComputingElementEventCounter computingElementEventCounter = prepareOutputAccounting(cmd,
            cmd.hasOption(VERBOSE));

    Input<? extends AvailabilityRecord> availability = defineAvailability(cmd, grid.getMapOfPeers());

    prepareOptionalOutputFiles(cmd, grid, (SyntheticAvailabilityCharacterizationAbstract) availability,
            closeables);

    long timeOfFirstSubmission = cmd.getOptionValue(WORKLOAD_TYPE).equals("gwa")
            ? GWAFormat.extractSubmissionTimeFromFirstJob(cmd.getOptionValue(WORKLOAD))
            : 0;
    Workload workload = defineWorkloadType(cmd, cmd.getOptionValue(WORKLOAD), grid.getMapOfPeers(),
            timeOfFirstSubmission);

    JobSchedulerPolicy jobScheduler = defineScheduler(cmd, grid.getListOfPeers());

    OurSim oursim = new OurSim(EventQueue.getInstance(), grid, jobScheduler, workload, availability);

    oursim.setActiveEntity(new ActiveEntityImp());

    if (cmd.hasOption(HALT_SIMULATION)) {
        oursim.addHaltEvent(((Number) cmd.getOptionObject(HALT_SIMULATION)).longValue());
    }

    oursim.start();

    for (Closeable c : closeables) {
        c.close();
    }

    EventQueue.getInstance().clear();

    // adiciona mtricas-resumo ao fim do arquivo
    FileWriter fw = new FileWriter(cmd.getOptionValue(OUTPUT), true);
    closeables.add(fw);
    stopWatch.stop();
    fw.write("# Simulation                  duration:" + stopWatch + ".\n");

    double utilization = grid.getUtilization();
    double realUtilization = grid.getTrueUtilization();

    int numberOfResourcesByPeer = Integer.parseInt(cmd.getOptionValue(NUM_RESOURCES_BY_PEER, "0"));
    fw.write(formatSummaryStatistics(computingElementEventCounter, "NA", "NA", false, grid.getPeers().size(),
            numberOfResourcesByPeer, utilization, realUtilization, stopWatch.getTime()) + "\n");
    fw.close();

    System.out.println(
            getSummaryStatistics(computingElementEventCounter, "NA", "NA", false, grid.getPeers().size(),
                    numberOfResourcesByPeer, utilization, realUtilization, stopWatch.getTime()));

}