Example usage for com.google.common.base Stopwatch start

List of usage examples for com.google.common.base Stopwatch start

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch start.

Prototype

public Stopwatch start() 

Source Link

Document

Starts the stopwatch.

Usage

From source file:jobs.BuildIndexJob.java

@Override
public void doJob() throws Exception {
    Logger.info("Computing the indexes...");
    Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();

    CharArraySet stopSet = CharArraySet.copy(Version.LUCENE_47, StandardAnalyzer.STOP_WORDS_SET);
    stopSet.add("we");
    stopSet.add("pmid");
    stopSet.add("were");
    stopSet.add("from");
    stopSet.add("reply");
    stopSet.add("can");
    stopSet.add("between");
    stopSet.add("using");
    stopSet.add("used");
    stopSet.add("however");
    stopSet.add("which");
    stopSet.add("our");
    stopSet.add("among");
    stopSet.add("while");
    stopSet.add("this");

    StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_47, stopSet);
    //        Analyzer analyzer = new CustomStopWordsStandardAnalyzer(Version.LUCENE_47);
    //Does [1-5]-grams, as determined by previous graphs
    ShingleAnalyzerWrapper shingleAnalyzer = new ShingleAnalyzerWrapper(analyzer, 2, 5);
    int now = Integer.parseInt((String) play.Play.configuration.get("analysis.year"));

    for (int t = now; t >= now - 1; t--) {
        //Create a folder for the index
        //TODO delete the folders first
        VirtualFile.fromRelativePath("/indexes/index-" + t).getRealFile().mkdir();
        Directory directory = FSDirectory
                .open(VirtualFile.fromRelativePath("/indexes/index-" + t).getRealFile());
        IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_47, shingleAnalyzer);
        IndexWriter iwriter = new IndexWriter(directory, config);

        //Retrieve the citations given a year t
        SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
        Date start = sdf.parse("01/01/" + t);
        Date end = sdf.parse("31/12/" + t);

        Logger.info("Query for year " + t + "...");
        List<MorphiaCitation> citations = MorphiaCitation.q().filter("created <=", end)
                .filter("created >=", start).asList();

        Logger.info("Citation size: " + citations.size());
        int total = citations.size();
        int counter = 0;

        //Iterate over the citations and create the index
        for (MorphiaCitation citation : citations) {
            counter++;/*from  w ww. j a  v  a  2s  .  c  o  m*/
            Logger.info("i (" + t + "): " + counter + "/" + total);
            Document doc = new Document();
            String contents = "";

            doc.add(new Field("pmid", citation.pmid, TextField.TYPE_STORED));

            if (citation.abstractText != null) {
                contents += citation.abstractText;
            }

            if (citation.title != null) {
                contents += citation.title;
            }

            if (!contents.equals("")) {
                doc.add(new Field("contents", contents, TextField.TYPE_STORED));
            }

            iwriter.addDocument(doc);

        }
        iwriter.close();
    }
    stopwatch.stop();
    Utils.emailAdmin("Indexing done. ", "Job finished in " + stopwatch.elapsed(TimeUnit.MINUTES) + " minutes.");
    Logger.info("Time to index the documents: " + stopwatch.elapsed(TimeUnit.MINUTES));
}

From source file:org.apache.drill.exec.store.mongo.MongoRecordReader.java

@Override
public int next() {
    if (cursor == null) {
        logger.info("Filters Applied : " + filters);
        logger.info("Fields Selected :" + fields);
        cursor = collection.find(filters).projection(fields).batchSize(100).iterator();
    }/*from  w w w .j av  a 2  s.co  m*/

    writer.allocate();
    writer.reset();

    int docCount = 0;
    Stopwatch watch = new Stopwatch();
    watch.start();

    try {
        while (docCount < BaseValueVector.INITIAL_VALUE_ALLOCATION && cursor.hasNext()) {
            writer.setPosition(docCount);
            String doc = cursor.next().toJson();
            jsonReader.setSource(doc.getBytes(Charsets.UTF_8));
            jsonReader.write(writer);
            docCount++;
        }

        jsonReader.ensureAtLeastOneField(writer);

        writer.setValueCount(docCount);
        logger.debug("Took {} ms to get {} records", watch.elapsed(TimeUnit.MILLISECONDS), docCount);
        return docCount;
    } catch (IOException e) {
        String msg = "Failure while reading document. - Parser was at record: " + (docCount + 1);
        logger.error(msg, e);
        throw new DrillRuntimeException(msg, e);
    }
}

From source file:net.monofraps.gradlebukkit.tasks.RunBukkit.java

@TaskAction
public void doWork() throws IOException, LifecycleExecutionException, InterruptedException {
    final int latestDownloadedBuild = findBukkitBuildToRun();
    if (latestDownloadedBuild < 0) {
        throw new LifecycleExecutionException("Couldn't find Bukkit jar to run.");
    }/*from   w  w  w  .  j a  v a  2 s .  c  om*/

    final String bukkitJarName = "bukkit-" + latestDownloadedBuild + ".jar";

    final RemoteDebugging debugConfiguration = ((Bukkit) getProject().getExtensions().getByName("bukkit"))
            .getDebugSettings();
    final String debuggingArguments = (debugConfiguration == null) ? "" : debugConfiguration.getJvmArguments();

    final ProcessBuilder bukkitProcessBuilder = new ProcessBuilder("java", debuggingArguments, "-jar",
            bukkitJarName);
    bukkitProcessBuilder.environment().putAll(System.getenv());
    bukkitProcessBuilder.directory(new File(getProject().getBuildDir(), "bukkit"));

    getLogger().lifecycle("Starting Bukkit...");
    final Process bukkitProcess = bukkitProcessBuilder.start();

    final StreamGrabber errorGrabber = new StreamGrabber(bukkitProcess.getErrorStream());
    final StreamGrabber stdoutGrabber = new StreamGrabber(bukkitProcess.getInputStream());
    errorGrabber.start();
    stdoutGrabber.start();

    final PrintWriter stdinWriter = new PrintWriter(bukkitProcess.getOutputStream());
    String line;
    while ((line = System.console().readLine()) != null && !line.equals("gterm")) {
        stdinWriter.write(line);
        stdinWriter.write("\n");
        stdinWriter.flush();

        try {
            bukkitProcess.exitValue();
            break;
        } catch (final IllegalThreadStateException ignored) {
        }
    }

    try {
        bukkitProcess.exitValue();
    } catch (final IllegalThreadStateException ex) {
        final Thread joiner = new Thread() {
            @Override
            public void run() {
                bukkitProcess.destroy();
            }
        };

        joiner.start();
        final Stopwatch stopwatch = new Stopwatch();
        stopwatch.start();
        while (joiner.isAlive()) {
            if (stopwatch.elapsed(TimeUnit.MILLISECONDS) > 60) {
                joiner.interrupt();
                joiner.join(5000);
            }
            Thread.sleep(500);
        }
        stopwatch.stop();
    }

    getLogger().lifecycle("Bukkit process exited with exit code " + bukkitProcess.exitValue());
}

From source file:org.apache.drill.exec.physical.impl.TopN.PriorityQueueTemplate.java

@Override
public void generate() throws SchemaChangeException {
    Stopwatch watch = new Stopwatch();
    watch.start();
    final DrillBuf drillBuf = allocator.buffer(4 * queueSize);
    finalSv4 = new SelectionVector4(drillBuf, queueSize, 4000);
    for (int i = queueSize - 1; i >= 0; i--) {
        finalSv4.set(i, pop());/* w  w w .  j  a va2  s. c om*/
    }
    logger.debug("Took {} us to generate output of {}", watch.elapsed(TimeUnit.MICROSECONDS),
            finalSv4.getTotalCount());
}

From source file:com.arpnetworking.metrics.mad.performance.FilePerfTestBase.java

/**
 * Runs a filter.//from   ww  w  .  j a v  a 2 s  .  c  o  m
 *
 * @param pipelineConfigurationFile Pipeline configuration file.
 * @param duration Timeout period.
 * @param variables Substitution key-value pairs into pipeline configuration file.
 * @throws IOException if configuration cannot be loaded.
 */
protected void benchmark(final String pipelineConfigurationFile, final Duration duration,
        final ImmutableMap<String, String> variables) throws IOException {
    // Replace any variables in the configuration file
    String configuration = Resources.toString(Resources.getResource(pipelineConfigurationFile), Charsets.UTF_8);
    for (final Map.Entry<String, String> entry : variables.entrySet()) {
        configuration = configuration.replace(entry.getKey(), entry.getValue());
    }

    // Load the specified stock configuration
    final PipelineConfiguration stockPipelineConfiguration = new StaticConfiguration.Builder()
            .addSource(new JsonNodeLiteralSource.Builder().setSource(configuration).build())
            .setObjectMapper(PipelineConfiguration.createObjectMapper(_injector)).build()
            .getRequiredAs(PipelineConfiguration.class);

    // Canary tracking
    LOGGER.info(String.format("Expected canaries; periods=%s", stockPipelineConfiguration.getPeriods()));
    final CountDownLatch latch = new CountDownLatch(stockPipelineConfiguration.getPeriods().size());
    final Set<Period> periods = Sets.newConcurrentHashSet();

    // Create custom "canary" sink
    final ListeningSink sink = new ListeningSink((periodicData) -> {
        if (periodicData != null) {
            for (final String metricName : periodicData.getData().keys()) {
                if (TestFileGenerator.CANARY.equals(metricName)) {
                    if (periods.add(periodicData.getPeriod())) {
                        LOGGER.info(String.format("Canary flew; filter=%s, period=%s", this.getClass(),
                                periodicData.getPeriod()));
                        latch.countDown();
                    }
                }
            }
        }
        return null;
    });

    // Add the custom "canary" sink
    final List<Sink> benchmarkSinks = Lists.newArrayList(stockPipelineConfiguration.getSinks());
    benchmarkSinks.add(sink);

    // Create the custom configuration
    final PipelineConfiguration benchmarkPipelineConfiguration = OvalBuilder.<PipelineConfiguration, PipelineConfiguration.Builder>clone(
            stockPipelineConfiguration).setSinks(benchmarkSinks).build();

    // Instantiate the pipeline
    final Pipeline pipeline = new Pipeline(benchmarkPipelineConfiguration);

    // Execute the pipeline until the canary flies the coop
    try {
        LOGGER.debug(String.format("Launching pipeline; configuration=%s", pipelineConfigurationFile));
        final Stopwatch timer = Stopwatch.createUnstarted();
        timer.start();
        pipeline.launch();

        if (!latch.await(duration.getMillis(), TimeUnit.MILLISECONDS)) {
            LOGGER.error("Test timed out");
            throw new RuntimeException("Test timed out");
        }

        timer.stop();
        LOGGER.info(String.format("Performance filter result; filter=%s, seconds=%s", this.getClass(),
                timer.elapsed(TimeUnit.SECONDS)));

    } catch (final InterruptedException e) {
        Thread.interrupted();
        throw new RuntimeException("Test interrupted");
    } finally {
        pipeline.shutdown();
    }
}

From source file:jobs.ComputeTrendsJob.java

@Override
public void doJob() throws Exception {
    Logger.info("Trends computation started...");
    Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();

    int now = Integer.parseInt((String) play.Play.configuration.get("analysis.year"));
    //Retrieve the citations given a year t
    Logger.info("Query for documents year " + now + "...");
    SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
    Date start = sdf.parse("01/01/" + now);
    Date end = sdf.parse("31/12/" + now);
    long totalDocsNow = MorphiaCitation.q().filter("created <=", end).filter("created >=", start).count();
    Logger.info("Number of documents: " + totalDocsNow);

    int then = now - 1;
    Logger.info("Query for documents year " + then + "...");
    start = sdf.parse("01/01/" + then);
    end = sdf.parse("31/12/" + then);
    long totalDocsThen = MorphiaCitation.q().filter("created <=", end).filter("created >=", start).count();
    Logger.info("Number of documents: " + totalDocsThen);

    Logger.info("Previous year: " + then);

    //Retrieve all the phrases in the database, and compute
    Logger.info("Retrieving phrases...");

    List<MorphiaPhrase> phrases = MorphiaPhrase.findAll();
    int total = phrases.size();
    int counter = 0;

    for (MorphiaPhrase phrase : phrases) {
        Stopwatch time = Stopwatch.createUnstarted();
        time.start();// ww w .ja v  a 2 s  . c o m
        counter++;
        Logger.info("i: " + counter + "/" + total + " (" + phrase.value + ")");
        if (phrase.frequencyThen != 0) {
            //std(c, t) = doc(c, t) / doc(t)
            //Trend: ( std(c, now) - std(c, then) ) / std(c, then)
            //Volumetric: trend(c, delta) * doc(c, now)
            //doc(now) = totalDocsNow
            //doc(then) = totalDocsThen
            //doc(c, now) = frequencyNow
            //doc(c, then) = frequencyThen
            //std(c, now) = frequencyNow / totalDocsNow = stdNow
            double stdNow = (double) phrase.frequencyNow / totalDocsNow;
            Logger.info("phrase.frequencyNow: " + phrase.frequencyNow);
            Logger.info("stdNow: " + stdNow);
            //std(c, then) = frequencyThen / totalDocsThen = stdThen
            double stdThen = (double) phrase.frequencyThen / totalDocsThen;
            Logger.info("frequencyThen: " + phrase.frequencyThen);
            Logger.info("stdThen: " + stdThen);
            //trend(c, delta) = ( stdNow - stdThen ) / stdThen
            double trend = (stdNow - stdThen) / stdThen * 100;
            Logger.info("Trend: " + trend);
            double volumetricTrend = trend * phrase.frequencyNow;
            Logger.info("Volumetric trend: " + volumetricTrend);
            phrase.trend = trend;
            phrase.volumetricTrend = volumetricTrend;
            phrase.displayTrend = new DecimalFormat("#.00").format(trend);
        } else {
            phrase.isNew = true;
        }

        phrase.save();
    }

    //Compute the rank
    int rank = 1;

    Logger.info("Computing rank...");
    List<MorphiaPhrase> rankPhrases = MorphiaPhrase.q().filter("isNew", false).order("-trend").asList();
    for (MorphiaPhrase phrase : rankPhrases) {
        phrase.rank = rank;
        phrase.save();
        rank++;
    }

    Logger.info("Job done.");
    stopwatch.stop();
    Utils.emailAdmin("Trends computed", "Job finished in " + stopwatch.elapsed(TimeUnit.MINUTES) + " minutes.");

    //TODO: remove terms with numbers trailing e.g. october 2012, however 2, 2012 were
    //remove extra stop words too, one on the top of the volumetric list
    //merge the strings together, no positive score?
}

From source file:org.jclouds.virtualbox.functions.CreateAndInstallVm.java

@Override
public IMachine apply(MasterSpec masterSpec) {
    VmSpec vmSpec = masterSpec.getVmSpec();
    IsoSpec isoSpec = masterSpec.getIsoSpec();
    String masterName = vmSpec.getVmName();
    IMachine masterMachine = checkNotNull(createAndRegisterMachineFromIsoIfNotAlreadyExists.apply(masterSpec),
            "master machine");
    // Launch machine and wait for it to come online
    machineController.ensureMachineIsLaunched(masterName);
    String installationKeySequence = isoSpec.getInstallationKeySequence().replace("PRECONFIGURATION_URL",
            preconfigurationUrl);//from www.  jav a2 s  .com
    configureOsInstallationWithKeyboardSequence(masterName, installationKeySequence);

    masterMachine.setExtraData(GUEST_OS_USER, masterSpec.getLoginCredentials().getUser());
    masterMachine.setExtraData(GUEST_OS_PASSWORD, masterSpec.getLoginCredentials().getPassword());

    SshClient client = sshClientForIMachine.apply(masterMachine);
    logger.debug(">> awaiting installation to finish node(%s)", masterName);
    Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();
    checkState(sshResponds.apply(client), "timed out waiting for guest %s to be accessible via ssh",
            masterName);
    stopwatch.stop();
    logger.debug(String.format("Elapsed time for the OS installation: %d minutes",
            TimeUnit.SECONDS.convert(stopwatch.elapsed(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS)));
    NodeMetadata nodeMetadata = imachineToNodeMetadata.apply(masterMachine);

    logger.debug(">> awaiting post-installation actions on vm: %s", masterName);
    ListenableFuture<ExecResponse> execCleanup = machineUtils.runScriptOnNode(nodeMetadata,
            call("cleanupUdevIfNeeded"), RunScriptOptions.NONE);
    ExecResponse cleanupResponse = Futures.getUnchecked(execCleanup);
    checkState(cleanupResponse.getExitStatus() == 0, "post-installation actions on vm(%s) failed", masterName);

    logger.debug(">> awaiting installation of guest additions on vm: %s", masterName);
    ListenableFuture<ExecResponse> execInstallGA = machineUtils.runScriptOnNode(nodeMetadata,
            new InstallGuestAdditions(vmSpec, version), RunScriptOptions.NONE);

    logger.debug(">> check installation of guest additions on vm: %s", masterName);
    ListenableFuture<ExecResponse> checkGAinstallation = machineUtils.runScriptOnNode(nodeMetadata,
            call("checkVBoxService"), RunScriptOptions.NONE);
    ExecResponse checkGAinstallationResponse = Futures.getUnchecked(checkGAinstallation);
    checkState(checkGAinstallationResponse.getExitStatus() == 0,
            "check installation of guest additions on vm(%s) " + "failed", masterName);

    machineController.ensureMachineIsShutdown(masterName);

    // detach DVD and ISOs, if needed
    Iterable<IMediumAttachment> mediumAttachments = Iterables.filter(
            masterMachine.getMediumAttachmentsOfController("IDE Controller"),
            new Predicate<IMediumAttachment>() {
                public boolean apply(IMediumAttachment in) {
                    return in.getMedium() != null && in.getMedium().getDeviceType().equals(DeviceType.DVD);
                }
            });
    for (IMediumAttachment iMediumAttachment : mediumAttachments) {
        logger.debug("<< iMedium(%s) detached from (%s)", iMediumAttachment.getMedium().getName(),
                masterMachine.getName());
        machineUtils.sharedLockMachineAndApply(masterMachine.getName(), new DetachDistroMediumFromMachine(
                iMediumAttachment.getController(), iMediumAttachment.getPort(), iMediumAttachment.getDevice()));
    }
    return masterMachine;
}

From source file:org.apache.drill.exec.physical.impl.TopN.PriorityQueueTemplate.java

@Override
public void add(FragmentContext context, RecordBatchData batch) throws SchemaChangeException {
    Stopwatch watch = new Stopwatch();
    watch.start();
    if (hyperBatch == null) {
        hyperBatch = new ExpandableHyperContainer(batch.getContainer());
    } else {/* w w w. j  ava 2  s .  co  m*/
        hyperBatch.addBatch(batch.getContainer());
    }

    doSetup(context, hyperBatch, null); // may not need to do this every time

    int count = 0;
    SelectionVector2 sv2 = null;
    if (hasSv2) {
        sv2 = batch.getSv2();
    }
    for (; queueSize < limit && count < batch.getRecordCount(); count++) {
        heapSv4.set(queueSize, batchCount, hasSv2 ? sv2.getIndex(count) : count);
        queueSize++;
        siftUp();
    }
    for (; count < batch.getRecordCount(); count++) {
        heapSv4.set(limit, batchCount, hasSv2 ? sv2.getIndex(count) : count);
        if (compare(limit, 0) < 0) {
            swap(limit, 0);
            siftDown();
        }
    }
    batchCount++;
    if (hasSv2) {
        sv2.clear();
    }
    logger.debug("Took {} us to add {} records", watch.elapsed(TimeUnit.MICROSECONDS), count);
}

From source file:org.apache.kudu.client.DeadlineTracker.java

/**
 * Creates a new tracker, using the specified stopwatch, and starts it right now.
 * The stopwatch is reset if it was already running.
 * @param stopwatch Specific Stopwatch to use
 */// w w w . ja  v a2s  . com
public DeadlineTracker(Stopwatch stopwatch) {
    if (stopwatch.isRunning()) {
        stopwatch.reset();
    }
    this.stopwatch = stopwatch.start();
}

From source file:com.facebook.presto.split.NativeSplitManager.java

@Override
public List<Partition> getPartitions(TableHandle tableHandle, Map<ColumnHandle, Object> bindings) {
    Stopwatch partitionTimer = new Stopwatch();
    partitionTimer.start();

    checkArgument(tableHandle instanceof NativeTableHandle, "Table must be a native table");

    TableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);

    checkState(tableMetadata != null, "no metadata for %s found", tableHandle);

    Set<TablePartition> tablePartitions = shardManager.getPartitions(tableHandle);

    log.debug("Partition retrieval, native table %s (%d partitions): %dms", tableHandle, tablePartitions.size(),
            partitionTimer.elapsed(TimeUnit.MILLISECONDS));

    Multimap<String, ? extends PartitionKey> allPartitionKeys = shardManager.getAllPartitionKeys(tableHandle);
    Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(tableHandle);

    log.debug("Partition key retrieval, native table %s (%d keys): %dms", tableHandle, allPartitionKeys.size(),
            partitionTimer.elapsed(TimeUnit.MILLISECONDS));

    List<Partition> partitions = ImmutableList.copyOf(
            Collections2.transform(tablePartitions, new PartitionFunction(columnHandles, allPartitionKeys)));

    log.debug("Partition generation, native table %s (%d partitions): %dms", tableHandle, partitions.size(),
            partitionTimer.elapsed(TimeUnit.MILLISECONDS));

    return partitions;
}