Example usage for com.google.common.base Stopwatch stop

List of usage examples for com.google.common.base Stopwatch stop

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch stop.

Prototype

public Stopwatch stop() 

Source Link

Document

Stops the stopwatch.

Usage

From source file:org.apache.drill.exec.client.QuerySubmitter.java

public int submitQuery(DrillClient client, String plan, String type, String format, int width)
        throws Exception {

    PrintingResultsListener listener;//from   ww  w  . j ava 2  s. c om

    String[] queries;
    QueryType queryType;
    type = type.toLowerCase();
    switch (type) {
    case "sql":
        queryType = QueryType.SQL;
        queries = plan.trim().split(";");
        break;
    case "logical":
        queryType = QueryType.LOGICAL;
        queries = new String[] { plan };
        break;
    case "physical":
        queryType = QueryType.PHYSICAL;
        queries = new String[] { plan };
        break;
    default:
        System.out.println("Invalid query type: " + type);
        return -1;
    }

    Format outputFormat;
    format = format.toLowerCase();
    switch (format) {
    case "csv":
        outputFormat = Format.CSV;
        break;
    case "tsv":
        outputFormat = Format.TSV;
        break;
    case "table":
        outputFormat = Format.TABLE;
        break;
    default:
        System.out.println("Invalid format type: " + format);
        return -1;
    }
    Stopwatch watch = new Stopwatch();
    for (String query : queries) {
        listener = new PrintingResultsListener(client.getConfig(), outputFormat, width);
        watch.start();
        client.runQuery(queryType, query, listener);
        int rows = listener.await();
        System.out.println(String.format("%d record%s selected (%f seconds)", rows, rows > 1 ? "s" : "",
                (float) watch.elapsed(TimeUnit.MILLISECONDS) / (float) 1000));
        if (query != queries[queries.length - 1]) {
            System.out.println();
        }
        watch.stop();
        watch.reset();
    }
    return 0;

}

From source file:ch.ge.ve.protopoc.service.protocol.DefaultAuthority.java

@Override
public ObliviousTransferResponse handleBallot(Integer voterIndex, BallotAndQuery ballotAndQuery) {
    Preconditions.checkState(publicCredentials != null,
            "The public credentials need to have been retrieved first");

    log.info(String.format("Authority %d handling ballot", j));

    Stopwatch stopwatch = Stopwatch.createStarted();
    List<BigInteger> publicIdentificationCredentials = publicCredentials.stream().map(p -> p.x)
            .collect(Collectors.toList());
    if (!voteCastingAuthorityAlgorithms.checkBallot(voterIndex, ballotAndQuery, systemPublicKey,
            publicIdentificationCredentials, ballotEntries)) {
        throw new IncorrectBallotRuntimeException(
                String.format("Ballot for voter %d was deemed invalid", voterIndex));
    }//from  w  w  w  . j a v a2  s.  c o m
    stopwatch.stop();
    ballotVerificationTimes.add(stopwatch.elapsed(TimeUnit.MILLISECONDS));

    stopwatch.reset().start();
    ObliviousTransferResponseAndRand responseAndRand = voteCastingAuthorityAlgorithms.genResponse(voterIndex,
            ballotAndQuery.getBold_a(), systemPublicKey, electionSet.getBold_n(), electorateData.getK(),
            electorateData.getP());
    ballotEntries.add(new BallotEntry(voterIndex, ballotAndQuery, responseAndRand.getBold_r()));
    ObliviousTransferResponse beta = responseAndRand.getBeta();
    stopwatch.stop();
    queryResponseTimes.add(stopwatch.elapsed(TimeUnit.MILLISECONDS));

    return beta;
}

From source file:iuno.tdm.paymentservice.BitcoinInvoice.java

/**
 * This constructor checks a new invoice for sanity.
 *
 * @param id   unique id of invoice object
 * @param inv  invoice as defined in restful api
 * @param addr address for incoming payment (likely the payments service own wallet)
 * @throws IllegalArgumentException thrown if provided invoice contains illegal values
 *///from w w w.  j  a v a 2  s  . com
BitcoinInvoice(UUID id, Invoice inv, Address addr, Address addr2,
        BitcoinInvoiceCallbackInterface callbackInterface, DeterministicSeed seed)
        throws IllegalArgumentException {
    bitcoinInvoiceCallbackInterface = callbackInterface;
    incomingTxList.addStateListener(incomingTxStateListener);
    transferTxList.addStateListener(transferTxStateListener);
    // check sanity of invoice
    totalAmount = inv.getTotalAmount();
    if (totalAmount < Transaction.MIN_NONDUST_OUTPUT.getValue())
        throw new IllegalArgumentException("invoice amount is less than bitcoin minimum dust output");

    // check values (transfer shall be lower than totalamount)
    for (AddressValuePair avp : inv.getTransfers()) {
        Address a = Address.fromBase58(params, avp.getAddress());
        long value = avp.getCoin();
        if (value < Transaction.MIN_NONDUST_OUTPUT.getValue())
            throw new IllegalArgumentException(
                    "transfer amount to " + avp.getAddress() + " is less than bitcoin minimum dust output");
        transferAmount += value;
        transfers.add(new TransferPair(a, Coin.valueOf(value)));
    }
    if (totalAmount < (transferAmount + Transaction.MIN_NONDUST_OUTPUT.getValue()))
        throw new IllegalArgumentException("total invoice amount minus sum of transfer amounts is dust");

    // expiration date shall be in the future
    expiration = inv.getExpiration();
    if (isExpired())
        throw new IllegalArgumentException("expiration date must be in the future");

    invoiceId = id;
    referenceId = inv.getReferenceId();
    receiveAddress = addr;
    transferAddress = addr2;

    Stopwatch watch = Stopwatch.createStarted();
    group = new KeyChainGroup(params, seed);
    group.setLookaheadSize(4);
    couponWallet = new Wallet(params, group);

    watch.stop();
    logger.info("creating wallet took {}", watch);

    couponWallet.addChangeEventListener(this); // FIXME add appropriate call to remove the listener
    couponWallet.addTransactionConfidenceEventListener(this); // FIXME add appropriate call to remove the listener
}

From source file:org.agatom.springatom.data.oid.creators.DefaultSOidCreator.java

@Override
public SOid fromString(final String from) throws Exception {
    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug(String.format("fromString(from=%s)", from));
    }/*  w  w w  .  ja  v a2  s. c o  m*/
    final SOid oid;
    final Stopwatch stopwatch = Stopwatch.createStarted();
    try {
        final String split[] = from.split(":");
        Assert.isTrue(split.length == 3,
                String.format("OID[%s] is invalid, it should be in format A:B:C", from));

        final String oidPrefix = split[TYPE_PREFIX_INDEX];
        final Class<?> oidClass = Class.forName(split[CLASS_NAME_INDEX]);
        final String oidId = split[ID_INDEX];

        oid = this.getOidObject(oidPrefix, oidClass, oidId);
    } catch (Exception exp) {
        LOGGER.error(String.format("fromString(from=%s) failed...", from), Throwables.getRootCause(exp));
        throw exp;
    }

    stopwatch.stop();

    if (LOGGER.isTraceEnabled()) {
        final long elapsed = stopwatch.elapsed(TimeUnit.MILLISECONDS);
        LOGGER.trace(String.format("fromString(from=%s) to SOid(oid=%s) took %d ms", from, oid, elapsed));
    }

    return oid;
}

From source file:org.apache.druid.segment.realtime.appenderator.AppenderatorPlumber.java

@Override
public void persist(final Committer committer) {
    final Stopwatch runExecStopwatch = Stopwatch.createStarted();
    appenderator.persistAll(committer);/*from w w  w.j av  a  2  s.  c om*/

    final long startDelay = runExecStopwatch.elapsed(TimeUnit.MILLISECONDS);
    metrics.incrementPersistBackPressureMillis(startDelay);
    if (startDelay > WARN_DELAY) {
        log.warn("Ingestion was throttled for [%,d] millis because persists were pending.", startDelay);
    }
    runExecStopwatch.stop();
}

From source file:com.twitter.distributedlog.lock.ZKDistributedLock.java

/**
 * Asynchronously acquire the lock. Technically the try phase of this operation--which adds us to the waiter
 * list--is executed synchronously, but the lock wait itself doesn't block.
 *///from   ww w . j  a v  a 2  s. c  o m
public synchronized Future<ZKDistributedLock> asyncAcquire() {
    if (null != lockAcquireFuture) {
        return Future
                .exception(new UnexpectedException("Someone is already acquiring/acquired lock " + lockPath));
    }
    final Promise<ZKDistributedLock> promise = new Promise<ZKDistributedLock>(
            new Function<Throwable, BoxedUnit>() {
                @Override
                public BoxedUnit apply(Throwable cause) {
                    lockStateExecutor.submit(lockPath, new Runnable() {
                        @Override
                        public void run() {
                            asyncClose();
                        }
                    });
                    return BoxedUnit.UNIT;
                }
            });
    final Stopwatch stopwatch = Stopwatch.createStarted();
    promise.addEventListener(new FutureEventListener<ZKDistributedLock>() {
        @Override
        public void onSuccess(ZKDistributedLock lock) {
            acquireStats.registerSuccessfulEvent(stopwatch.stop().elapsed(TimeUnit.MICROSECONDS));
        }

        @Override
        public void onFailure(Throwable cause) {
            acquireStats.registerFailedEvent(stopwatch.stop().elapsed(TimeUnit.MICROSECONDS));
            // release the lock if fail to acquire
            asyncClose();
        }
    });
    this.lockAcquireFuture = promise;
    lockStateExecutor.submit(lockPath, new Runnable() {
        @Override
        public void run() {
            doAsyncAcquireWithSemaphore(promise, lockTimeout);
        }
    });
    return promise;
}

From source file:org.apache.hive.ptest.execution.HostExecutor.java

/**
 * Executes the test batch on the drone in question. If the command
 * exits with a status code of 255 throw an AbortDroneException.
 *///from  w w  w  . ja  va2s .  c o m
private boolean executeTestBatch(Drone drone, TestBatch batch, Set<TestBatch> failedTestResults)
        throws IOException, SSHExecutionException, AbortDroneException {
    String scriptName = "hiveptest-" + batch.getName() + ".sh";
    File script = new File(mLocalScratchDirectory, scriptName);
    Map<String, String> templateVariables = Maps.newHashMap(mTemplateDefaults);
    templateVariables.put("instanceName", drone.getInstanceName());
    templateVariables.put("batchName", batch.getName());
    templateVariables.put("testArguments", batch.getTestArguments());
    templateVariables.put("localDir", drone.getLocalDirectory());
    templateVariables.put("logDir", drone.getLocalLogDirectory());
    Preconditions.checkArgument(StringUtils.isNotBlank(batch.getTestModuleRelativeDir()));
    templateVariables.put("testModule", batch.getTestModuleRelativeDir());
    String command = Templates.getTemplateResult("bash $localDir/$instanceName/scratch/" + script.getName(),
            templateVariables);
    Templates.writeTemplateResult("batch-exec.vm", script, templateVariables);
    copyToDroneFromLocal(drone, script.getAbsolutePath(), "$localDir/$instanceName/scratch/" + scriptName);
    script.delete();
    Stopwatch sw = Stopwatch.createStarted();
    mLogger.info(drone + " executing " + batch + " with " + command);
    RemoteCommandResult sshResult = new SSHCommand(mSSHCommandExecutor, drone.getPrivateKey(), drone.getUser(),
            drone.getHost(), drone.getInstance(), command, true).call();
    sw.stop();
    mLogger.info("Completed executing tests for batch [{}] on host {}. ElapsedTime(ms)={}",
            new Object[] { batch.getName(), getHost().toShortString(), sw.elapsed(TimeUnit.MILLISECONDS) });
    File batchLogDir = null;
    if (sshResult.getExitCode() == Constants.EXIT_CODE_UNKNOWN) {
        throw new AbortDroneException(
                "Drone " + drone.toString() + " exited with " + Constants.EXIT_CODE_UNKNOWN + ": " + sshResult);
    }
    if (mShutdown) {
        mLogger.warn("Shutting down host " + mHost.getName());
        return false;
    }
    boolean result;
    if (sshResult.getExitCode() != 0 || sshResult.getException() != null) {
        result = false;
        batchLogDir = Dirs.create(new File(mFailedTestLogDir, batch.getName()));
    } else {
        result = true;
        batchLogDir = Dirs.create(new File(mSuccessfulTestLogDir, batch.getName()));
    }
    copyFromDroneToLocal(drone, batchLogDir.getAbsolutePath(), drone.getLocalLogDirectory() + "/",
            fetchLogsForSuccessfulTests || !result);
    File logFile = new File(batchLogDir, String.format("%s.txt", batch.getName()));
    PrintWriter writer = new PrintWriter(logFile);
    writer.write(String.format("result = '%s'\n", sshResult.toString()));
    writer.write(String.format("output = '%s'\n", sshResult.getOutput()));
    if (sshResult.getException() != null) {
        sshResult.getException().printStackTrace(writer);
    }
    writer.close();
    return result;
}

From source file:org.codice.ddf.configuration.migration.ImportMigrationContextImpl.java

private void doImportGivenVersionLogic(Consumer<String> importVersionLogic) {
    if (migratable != null) {
        final String version = getMigratableVersion().orElse(null);

        if (skip) {
            LOGGER.debug("Skipping optional migratable [{}] with version [{}]", id, version);
            return;
        }/*from  w  ww . j  a va 2s. c om*/
        LOGGER.debug("Importing migratable [{}] from version [{}]...", id, version);
        Stopwatch stopwatch = null;

        if (LOGGER.isDebugEnabled()) {
            stopwatch = Stopwatch.createStarted();
        }
        try {
            importVersionLogic.accept(version);
        } finally {
            inputStreams.forEach(IOUtils::closeQuietly); // we do not care if we failed to close them
        }
        if (LOGGER.isDebugEnabled() && (stopwatch != null)) {
            LOGGER.debug("Imported time for {}: {}", id, stopwatch.stop());
        }
    } else if (id != null) { // not a system context
        LOGGER.warn("unable to import migration data for migratable [{}]; migratable is no longer available",
                id);
        report.record(new MigrationException(Messages.IMPORT_UNKNOWN_DATA_FOUND_ERROR));
    } // else - no errors and nothing to do for the system context
}

From source file:ch.ge.ve.protopoc.service.protocol.DefaultAuthority.java

@Override
public FinalizationCodePart handleConfirmation(Integer voterIndex, Confirmation confirmation)
        throws IncorrectConfirmationRuntimeException {
    Preconditions.checkState(publicCredentials != null,
            "The public credentials need to have been retrieved first");
    Stopwatch stopwatch = Stopwatch.createStarted();
    List<BigInteger> publicConfirmationCredentials = publicCredentials.stream().map(p -> p.y)
            .collect(Collectors.toList());

    if (!voteConfirmationAuthorityAlgorithms.checkConfirmation(voterIndex, confirmation,
            publicConfirmationCredentials, ballotEntries, confirmationEntries)) {
        throw new IncorrectConfirmationRuntimeException(
                "Confirmation for voter " + voterIndex + " was deemed invalid");
    }//from w w  w.  j  av a  2 s  .c  om
    stopwatch.stop();
    confirmationVerificationTimes.add(stopwatch.elapsed(TimeUnit.MILLISECONDS));

    confirmationEntries.add(new ConfirmationEntry(voterIndex, confirmation));

    stopwatch.reset().start();
    FinalizationCodePart finalization = voteConfirmationAuthorityAlgorithms.getFinalization(voterIndex,
            electorateData.getP(), ballotEntries);
    stopwatch.stop();
    finalizationComputationTimes.add(stopwatch.elapsed(TimeUnit.MILLISECONDS));

    return finalization;
}

From source file:org.lenskit.eval.temporal.TemporalEvaluator.java

/**
 * During the evaluation, it will replay the ratings, try to predict each one, and
 * write the prediction, TARMSE and the rating to the output file
 *//*from  w w  w. j  a va 2 s  . co m*/
public void execute() throws IOException, RecommenderBuildException {
    loadInputs();

    //Initialize recommender engine and recommender
    LenskitRecommenderEngine lre = null;
    Recommender recommender = null;

    //Start try block -- will try to write output on file
    try (TableWriter tableWriter = openOutput(); SequenceWriter extWriter = openExtendedOutput()) {

        List<Rating> ratings = ObjectStreams
                .makeList(dataSource.streamEvents(Rating.class, SortOrder.TIMESTAMP));
        BinaryRatingDAO limitedDao = dataSource.createWindowedView(0);

        //Initialize local variables, will use to calculate RMSE
        double sse = 0;
        int n = 0;
        // Initialize build parameters
        long buildTime = 0L;
        int buildsCount = 0;
        int ratingsSinceLastBuild = 0;

        //Loop through ratings
        for (Rating r : ratings) {
            Map<String, Object> json = new HashMap<>();
            json.put("userId", r.getUserId());
            json.put("itemId", r.getItemId());
            json.put("timestamp", r.getTimestamp());
            json.put("rating", r.getValue());

            if (recommender == null
                    || (r.getTimestamp() > 0 && limitedDao.getLimitTimestamp() < r.getTimestamp())) {
                limitedDao = dataSource.createWindowedView(r.getTimestamp());
                LenskitConfiguration config = new LenskitConfiguration();
                config.addComponent(limitedDao);

                //rebuild recommender system if its older then rebuild period set or null
                if ((r.getTimestamp() - buildTime >= spec.getRebuildPeriod()) || lre == null) {
                    buildTime = r.getTimestamp();
                    buildsCount++;

                    logger.info("building model {} at time {}, {} ratings since last build", buildsCount,
                            buildTime, ratingsSinceLastBuild);

                    Stopwatch timer = Stopwatch.createStarted();
                    lre = LenskitRecommenderEngine.newBuilder()
                            .addConfiguration(algorithm.getConfigurations().get(0))
                            .addConfiguration(config, ModelDisposition.EXCLUDED).build();
                    timer.stop();
                    logger.info("built model {} in {}", buildsCount, timer);

                    ratingsSinceLastBuild = 0;
                }
                if (recommender != null) {
                    recommender.close();
                }
                recommender = lre.createRecommender(config);
            }
            ratingsSinceLastBuild += 1;

            json.put("modelAge", r.getTimestamp() - buildTime);

            // get rating prediction if available
            Double predict = null;
            RatingPredictor predictor = recommender.getRatingPredictor();
            Result predictionResult = null;
            if (predictor != null) {
                predictionResult = predictor.predict(r.getUserId(), r.getItemId());
            }

            if (predictionResult != null) {
                predict = predictionResult.getScore();
                logger.debug("predicted {} for rating {}", predict, r);
                json.put("prediction", predict);
            } else {
                json.put("prediction", null);
            }

            /***calculate Time Averaged RMSE***/
            double rmse = 0.0;
            if (predict != null && !Double.isNaN(predict)) {
                double err = predict - r.getValue();
                sse += err * err;
                n++;
                rmse = sqrt(sse / n);
            }

            // Compute recommendations
            Integer rank = null;
            ItemRecommender irec = recommender.getItemRecommender();
            if (irec != null) {
                rank = getRecommendationRank(limitedDao, r, json, irec);

            }

            /**writes the Prediction Score, Rank and TARMSE on file.**/
            tableWriter.writeRow(r.getUserId(), r.getItemId(), r.getValue(), r.getTimestamp(), predict, rmse,
                    r.getTimestamp() - buildTime, rank, buildsCount);
            if (extWriter != null) {
                extWriter.write(json);
            }
        } // loop ratings

    } finally {
        if (recommender != null) {
            recommender.close();
        }
    }
}