Example usage for com.google.common.base Stopwatch createUnstarted

List of usage examples for com.google.common.base Stopwatch createUnstarted

Introduction

In this page you can find the example usage for com.google.common.base Stopwatch createUnstarted.

Prototype

@CheckReturnValue
public static Stopwatch createUnstarted() 

Source Link

Document

Creates (but does not start) a new stopwatch using System#nanoTime as its time source.

Usage

From source file:org.apache.drill.exec.store.mapr.db.json.MaprDBJsonRecordReader.java

@Override
public int next() {
    Stopwatch watch = Stopwatch.createUnstarted();
    watch.start();//from w w w .j  ava  2s  .  c o  m

    vectorWriter.allocate();
    vectorWriter.reset();

    int recordCount = 0;
    DBDocumentReaderBase reader = null;

    while (recordCount < BaseValueVector.INITIAL_VALUE_ALLOCATION) {
        vectorWriter.setPosition(recordCount);
        try {
            reader = nextDocumentReader();
            if (reader == null) {
                break; // no more documents for this scanner
            } else if (isSkipQuery()) {
                vectorWriter.rootAsMap().bit("count").writeBit(1);
            } else {
                MapOrListWriterImpl writer = new MapOrListWriterImpl(vectorWriter.rootAsMap());
                if (idOnly) {
                    writeId(writer, reader.getId());
                } else {
                    if (reader.next() != EventType.START_MAP) {
                        throw dataReadError("The document did not start with START_MAP!");
                    }
                    writeToListOrMap(writer, reader);
                }
            }
            recordCount++;
        } catch (UserException e) {
            throw UserException
                    .unsupportedError(e).addContext(String.format("Table: %s, document id: '%s'",
                            table.getPath(), reader == null ? null : IdCodec.asString(reader.getId())))
                    .build(logger);
        } catch (SchemaChangeException e) {
            if (ignoreSchemaChange) {
                logger.warn("{}. Dropping the row from result.", e.getMessage());
                logger.debug("Stack trace:", e);
            } else {
                throw dataReadError(e);
            }
        }
    }

    vectorWriter.setValueCount(recordCount);
    logger.debug("Took {} ms to get {} records", watch.elapsed(TimeUnit.MILLISECONDS), recordCount);
    return recordCount;
}

From source file:org.apache.drill.exec.store.parquet.columnreaders.AsyncPageReader.java

private DrillBuf decompress(PageHeader pageHeader, DrillBuf compressedData) {
    DrillBuf pageDataBuf = null;/* w  ww. j  a  v a  2 s  . c o m*/
    Stopwatch timer = Stopwatch.createUnstarted();
    long timeToRead;
    int compressedSize = pageHeader.getCompressed_page_size();
    int uncompressedSize = pageHeader.getUncompressed_page_size();
    pageDataBuf = allocateTemporaryBuffer(uncompressedSize);
    try {
        timer.start();
        CompressionCodecName codecName = parentColumnReader.columnChunkMetaData.getCodec();
        ByteBuffer input = compressedData.nioBuffer(0, compressedSize);
        ByteBuffer output = pageDataBuf.nioBuffer(0, uncompressedSize);
        DecompressionHelper decompressionHelper = new DecompressionHelper(codecName);
        decompressionHelper.decompress(input, compressedSize, output, uncompressedSize);
        pageDataBuf.writerIndex(uncompressedSize);
        timeToRead = timer.elapsed(TimeUnit.NANOSECONDS);
        this.updateStats(pageHeader, "Decompress", 0, timeToRead, compressedSize, uncompressedSize);
    } catch (IOException e) {
        handleAndThrowException(e, "Error decompressing data.");
    }
    return pageDataBuf;
}

From source file:org.zanata.service.impl.MergeTranslationsServiceImpl.java

private Integer mergeTranslations(final Long sourceVersionId, final Long targetVersionId, final int batchStart,
        final int batchLength, final boolean useNewerTranslation, final List<HLocale> supportedLocales)
        throws Exception {
    final Stopwatch stopwatch = Stopwatch.createUnstarted();
    stopwatch.start();/*from  w  w w  . jav a  2 s  .c o  m*/
    List<HTextFlow[]> matches = textFlowDAO.getSourceByMatchedContext(sourceVersionId, targetVersionId,
            batchStart, batchLength);
    Multimap<DocumentLocaleKey, TextFlowTargetStateChange> eventMap = HashMultimap.create();
    Map<DocumentLocaleKey, Map<ContentState, Long>> docStatsMap = Maps.newHashMap();
    Map<DocumentLocaleKey, Long> lastUpdatedTargetId = Maps.newHashMap();
    ;
    for (HTextFlow[] results : matches) {
        HTextFlow sourceTf = results[0];
        HTextFlow targetTf = results[1];
        boolean foundChange = false;
        Map<Long, ContentState> localeContentStateMap = Maps.newHashMap();
        for (HLocale hLocale : supportedLocales) {
            HTextFlowTarget sourceTft = sourceTf.getTargets().get(hLocale.getId());
            // only process translated state
            if (sourceTft == null || !sourceTft.getState().isTranslated()) {
                continue;
            }
            HTextFlowTarget targetTft = targetTf.getTargets().get(hLocale.getId());
            if (targetTft == null) {
                targetTft = new HTextFlowTarget(targetTf, hLocale);
                targetTft.setVersionNum(0);
                targetTf.getTargets().put(hLocale.getId(), targetTft);
            }
            if (MergeTranslationsServiceImpl.shouldMerge(sourceTft, targetTft, useNewerTranslation)) {
                foundChange = true;
                ContentState oldState = targetTft.getState();
                localeContentStateMap.put(hLocale.getId(), oldState);
                mergeTextFlowTarget(sourceTft, targetTft);
            }
        }
        if (foundChange) {
            translationStateCacheImpl.clearDocumentStatistics(targetTf.getDocument().getId());
            textFlowDAO.makePersistent(targetTf);
            textFlowDAO.flush();
            for (Map.Entry<Long, ContentState> entry : localeContentStateMap.entrySet()) {
                HTextFlowTarget updatedTarget = targetTf.getTargets().get(entry.getKey());
                DocumentLocaleKey key = new DocumentLocaleKey(targetTf.getDocument().getId(),
                        updatedTarget.getLocale().getLocaleId());
                eventMap.put(key, new TextFlowTargetStateEvent.TextFlowTargetStateChange(targetTf.getId(),
                        updatedTarget.getId(), updatedTarget.getState(), entry.getValue()));
                lastUpdatedTargetId.put(key, updatedTarget.getId());
                Map<ContentState, Long> contentStateDeltas = docStatsMap.get(key) == null ? Maps.newHashMap()
                        : docStatsMap.get(key);
                DocStatsEvent.updateContentStateDeltas(contentStateDeltas, updatedTarget.getState(),
                        entry.getValue(), targetTf.getWordCount());
                docStatsMap.put(key, contentStateDeltas);
            }
        }
    }
    Long actorId = authenticatedAccount.getPerson().getId();
    for (Map.Entry<DocumentLocaleKey, Collection<TextFlowTargetStateChange>> entry : eventMap.asMap()
            .entrySet()) {
        TextFlowTargetStateEvent tftUpdatedEvent = new TextFlowTargetStateEvent(entry.getKey(), targetVersionId,
                actorId, ImmutableList.copyOf(entry.getValue()));
        textFlowTargetStateEvent.fire(tftUpdatedEvent);
    }
    for (Map.Entry<DocumentLocaleKey, Map<ContentState, Long>> entry : docStatsMap.entrySet()) {
        DocStatsEvent docEvent = new DocStatsEvent(entry.getKey(), targetVersionId, entry.getValue(),
                lastUpdatedTargetId.get(entry.getKey()));
        docStatsEvent.fire(docEvent);
    }
    stopwatch.stop();
    log.info("Complete merge translations of {} in {}", matches.size() * supportedLocales.size(), stopwatch);
    return matches.size() * supportedLocales.size();
}

From source file:com.twitter.distributedlog.BKAsyncLogReaderDLSN.java

BKAsyncLogReaderDLSN(BKDistributedLogManager bkdlm, ScheduledExecutorService executorService,
        OrderedScheduler lockStateExecutor, DLSN startDLSN, Optional<String> subscriberId,
        boolean returnEndOfStreamRecord, boolean deserializeRecordSet, StatsLogger statsLogger) {
    this.bkDistributedLogManager = bkdlm;
    this.executorService = executorService;
    this.bkLedgerManager = bkDistributedLogManager.createReadHandler(subscriberId, lockStateExecutor, this,
            deserializeRecordSet, true);
    sessionExpireWatcher = this.bkLedgerManager.registerExpirationHandler(this);
    LOG.debug("Starting async reader at {}", startDLSN);
    this.startDLSN = startDLSN;
    this.scheduleDelayStopwatch = Stopwatch.createUnstarted();
    this.readNextDelayStopwatch = Stopwatch.createStarted();
    this.positionGapDetectionEnabled = bkdlm.getConf().getPositionGapDetectionEnabled();
    this.idleErrorThresholdMillis = bkdlm.getConf().getReaderIdleErrorThresholdMillis();
    this.returnEndOfStreamRecord = returnEndOfStreamRecord;

    // Failure Injection
    this.failureInjector = AsyncRandomFailureInjector.newBuilder()
            .injectDelays(bkdlm.getConf().getEIInjectReadAheadDelay(),
                    bkdlm.getConf().getEIInjectReadAheadDelayPercent(),
                    bkdlm.getConf().getEIInjectMaxReadAheadDelayMs())
            .injectErrors(false, 10).injectStops(bkdlm.getConf().getEIInjectReadAheadStall(), 10)
            .injectCorruption(bkdlm.getConf().getEIInjectReadAheadBrokenEntries()).build();

    // Stats//from   w w w . ja  va2  s  .  c  o  m
    StatsLogger asyncReaderStatsLogger = statsLogger.scope("async_reader");
    futureSetLatency = asyncReaderStatsLogger.getOpStatsLogger("future_set");
    scheduleLatency = asyncReaderStatsLogger.getOpStatsLogger("schedule");
    backgroundReaderRunTime = asyncReaderStatsLogger.getOpStatsLogger("background_read");
    readNextExecTime = asyncReaderStatsLogger.getOpStatsLogger("read_next_exec");
    timeBetweenReadNexts = asyncReaderStatsLogger.getOpStatsLogger("time_between_read_next");
    delayUntilPromiseSatisfied = asyncReaderStatsLogger.getOpStatsLogger("delay_until_promise_satisfied");
    idleReaderError = asyncReaderStatsLogger.getCounter("idle_reader_error");
    idleReaderCheckCount = asyncReaderStatsLogger.getCounter("idle_reader_check_total");
    idleReaderCheckIdleReadRequestCount = asyncReaderStatsLogger
            .getCounter("idle_reader_check_idle_read_requests");
    idleReaderCheckIdleReadAheadCount = asyncReaderStatsLogger.getCounter("idle_reader_check_idle_readahead");

    // Lock the stream if requested. The lock will be released when the reader is closed.
    this.lockStream = false;
    this.idleReaderTimeoutTask = scheduleIdleReaderTaskIfNecessary();
}

From source file:ezbake.groups.service.caching.RedisCacheLayer.java

private Stopwatch getStopwatch() {
    Stopwatch watch = Stopwatch.createUnstarted();
    if (shouldLogTimers) {
        watch.start();
    }
    return watch;
}

From source file:org.apache.distributedlog.BKAsyncLogReader.java

BKAsyncLogReader(BKDistributedLogManager bkdlm, OrderedScheduler scheduler, DLSN startDLSN,
        Optional<String> subscriberId, boolean returnEndOfStreamRecord, StatsLogger statsLogger) {
    this.streamName = bkdlm.getStreamName();
    this.bkDistributedLogManager = bkdlm;
    this.scheduler = scheduler;
    this.readHandler = bkDistributedLogManager.createReadHandler(subscriberId, this, true);
    LOG.debug("Starting async reader at {}", startDLSN);
    this.startDLSN = startDLSN;
    this.scheduleDelayStopwatch = Stopwatch.createUnstarted();
    this.readNextDelayStopwatch = Stopwatch.createStarted();
    this.positionGapDetectionEnabled = bkdlm.getConf().getPositionGapDetectionEnabled();
    this.idleErrorThresholdMillis = bkdlm.getConf().getReaderIdleErrorThresholdMillis();
    this.returnEndOfStreamRecord = returnEndOfStreamRecord;

    // Stats//w  ww  . j  a  v  a2s .  co m
    StatsLogger asyncReaderStatsLogger = statsLogger.scope("async_reader");
    futureSetLatency = asyncReaderStatsLogger.getOpStatsLogger("future_set");
    scheduleLatency = asyncReaderStatsLogger.getOpStatsLogger("schedule");
    backgroundReaderRunTime = asyncReaderStatsLogger.getOpStatsLogger("background_read");
    readNextExecTime = asyncReaderStatsLogger.getOpStatsLogger("read_next_exec");
    timeBetweenReadNexts = asyncReaderStatsLogger.getOpStatsLogger("time_between_read_next");
    delayUntilPromiseSatisfied = asyncReaderStatsLogger.getOpStatsLogger("delay_until_promise_satisfied");
    idleReaderError = asyncReaderStatsLogger.getCounter("idle_reader_error");
    idleReaderCheckCount = asyncReaderStatsLogger.getCounter("idle_reader_check_total");
    idleReaderCheckIdleReadRequestCount = asyncReaderStatsLogger
            .getCounter("idle_reader_check_idle_read_requests");
    idleReaderCheckIdleReadAheadCount = asyncReaderStatsLogger.getCounter("idle_reader_check_idle_readahead");

    // Lock the stream if requested. The lock will be released when the reader is closed.
    this.lockStream = false;
    this.idleReaderTimeoutTask = scheduleIdleReaderTaskIfNecessary();
    this.lastProcessTime = Stopwatch.createStarted();
}

From source file:org.geogig.osm.cli.commands.OSMHistoryImport.java

private void importOsmHistory(GeogigCLI cli, Console console, HistoryDownloader downloader,
        @Nullable Envelope featureFilter) throws IOException {

    ensureTypesExist(cli);/*  w w w . ja v a 2 s  . c o  m*/

    Iterator<Changeset> changesets = downloader.fetchChangesets();

    GeoGIG geogig = cli.getGeogig();

    boolean initialized = false;
    Stopwatch sw = Stopwatch.createUnstarted();

    while (changesets.hasNext() && !silentListener.isCanceled()) {
        sw.reset().start();
        Changeset changeset = changesets.next();
        if (changeset.isOpen()) {
            throw new CommandFailedException(
                    "Can't import past changeset " + changeset.getId() + " as it is still open.");
        }
        String desc = String.format("obtaining osm changeset %,d...", changeset.getId());
        console.print(desc);
        console.flush();

        Optional<Iterator<Change>> opchanges = changeset.getChanges().get();
        if (!opchanges.isPresent()) {
            updateBranchChangeset(geogig, changeset.getId());
            console.println(" does not apply.");
            console.flush();
            sw.stop();
            continue;
        }
        Iterator<Change> changes = opchanges.get();
        console.print(" inserting...");
        console.flush();

        long changeCount = insertChanges(cli, changes, featureFilter);
        if (!silentListener.isCanceled()) {
            console.print(String.format(" Applied %,d changes, staging...", changeCount));
            console.flush();
            geogig.command(AddOp.class).setProgressListener(silentListener).call();
            commit(cli, changeset);

            if (args.autoIndex && !initialized) {
                initializeIndex(cli);
                initialized = true;
            }
        }
        console.println(String.format(" (%s)", sw.stop()));
        console.flush();
    }
}

From source file:com.twitter.distributedlog.readahead.ReadAheadWorker.java

public ReadAheadWorker(DistributedLogConfiguration conf, DynamicDistributedLogConfiguration dynConf,
        ZKLogMetadataForReader logMetadata, BKLogHandler ledgerManager, ZooKeeperClient zkc,
        OrderedScheduler scheduler, LedgerHandleCache handleCache, LedgerReadPosition startPosition,
        ReadAheadCache readAheadCache, boolean isHandleForReading,
        ReadAheadExceptionsLogger readAheadExceptionsLogger, StatsLogger handlerStatsLogger,
        StatsLogger readAheadPerStreamStatsLogger, AlertStatsLogger alertStatsLogger,
        AsyncFailureInjector failureInjector, AsyncNotification notification) {
    // Log information
    this.fullyQualifiedName = logMetadata.getFullyQualifiedName();
    this.conf = conf;
    this.dynConf = dynConf;
    this.logMetadata = logMetadata;
    this.bkLedgerManager = ledgerManager;
    this.isHandleForReading = isHandleForReading;
    this.notification = notification;
    // Resources//from w ww.  j a v  a  2s  .com
    this.zkc = zkc;
    this.scheduler = scheduler;
    this.handleCache = handleCache;
    this.readAheadCache = readAheadCache;
    // Readahead status
    this.startReadPosition = new LedgerReadPosition(startPosition);
    this.nextReadAheadPosition = new LedgerReadPosition(startPosition);
    // LogSegments
    this.getLedgersWatcher = this.zkc.getWatcherManager().registerChildWatcher(logMetadata.getLogSegmentsPath(),
            this);
    // Failure Detection
    this.failureInjector = failureInjector;
    // Tracing
    this.metadataLatencyWarnThresholdMillis = conf.getMetadataLatencyWarnThresholdMillis();
    this.noLedgerExceptionOnReadLACThreshold = conf
            .getReadAheadNoSuchLedgerExceptionOnReadLACErrorThresholdMillis() / conf.getReadAheadWaitTime();
    this.tracker = new ReadAheadTracker(logMetadata.getLogName(), readAheadCache,
            ReadAheadPhase.SCHEDULE_READAHEAD, readAheadPerStreamStatsLogger);
    this.resumeStopWatch = Stopwatch.createUnstarted();
    // Misc
    this.readAheadSkipBrokenEntries = conf.getReadAheadSkipBrokenEntries();
    // Stats
    this.alertStatsLogger = alertStatsLogger;
    this.readAheadPerStreamStatsLogger = readAheadPerStreamStatsLogger;
    StatsLogger readAheadStatsLogger = handlerStatsLogger.scope("readahead_worker");
    readAheadWorkerWaits = readAheadStatsLogger.getCounter("wait");
    readAheadEntryPiggyBackHits = readAheadStatsLogger.getCounter("entry_piggy_back_hits");
    readAheadEntryPiggyBackMisses = readAheadStatsLogger.getCounter("entry_piggy_back_misses");
    readAheadReadEntriesStat = readAheadStatsLogger.getOpStatsLogger("read_entries");
    readAheadReadLACAndEntryCounter = readAheadStatsLogger.getCounter("read_lac_and_entry_counter");
    readAheadCacheFullCounter = readAheadStatsLogger.getCounter("cache_full");
    readAheadSkippedBrokenEntries = readAheadStatsLogger.getCounter("skipped_broken_entries");
    readAheadCacheResumeStat = readAheadStatsLogger.getOpStatsLogger("resume");
    readAheadLacLagStats = readAheadStatsLogger.getOpStatsLogger("read_lac_lag");
    longPollInterruptionStat = readAheadStatsLogger.getOpStatsLogger("long_poll_interruption");
    notificationExecutionStat = readAheadStatsLogger.getOpStatsLogger("notification_execution");
    metadataReinitializationStat = readAheadStatsLogger.getOpStatsLogger("metadata_reinitialization");
    idleReaderWarn = readAheadStatsLogger.getCounter("idle_reader_warn");
    this.readAheadExceptionsLogger = readAheadExceptionsLogger;
}

From source file:put.ci.cevo.framework.algorithms.ApacheCMAES.java

/**
 * {@inheritDoc}/*from  w  w  w . ja  v a  2 s .  c o m*/
 */
@Override
protected PointValuePair doOptimize() {
    // -------------------- Initialization --------------------------------

    isMinimize = getGoalType().equals(GoalType.MINIMIZE);
    final double[] guess = getStartPoint();
    // number of objective variables/problem dimension
    dimension = guess.length;
    initializeCMA(guess);
    iterations = 0;
    double bestValue = (isMinimize ? Double.MAX_VALUE : Double.MIN_VALUE);
    push(fitnessHistory, bestValue);
    PointValuePair optimum = new PointValuePair(getStartPoint(), isMinimize ? bestValue : -bestValue);
    PointValuePair lastResult = null;

    // -------------------- Generation Loop --------------------------------
    EvaluatedPopulation<double[]> evaluatedPopulation = null;

    Stopwatch stopwatch = Stopwatch.createUnstarted();
    generationLoop: for (iterations = 1; iterations <= maxIterations; iterations++) {
        stopwatch.reset();
        stopwatch.start();
        incrementIterationCount();

        // Generate and evaluate lambda offspring
        final RealMatrix arz = randn1(dimension, lambda);
        final RealMatrix arx = zeros(dimension, lambda);
        final double[] fitness = new double[lambda];
        // generate random offspring
        for (int k = 0; k < lambda; k++) {
            RealMatrix arxk = null;
            for (int i = 0; i < checkFeasableCount + 1; i++) {
                if (diagonalOnly <= 0) {
                    arxk = xmean.add(BD.multiply(arz.getColumnMatrix(k)).scalarMultiply(sigma)); // m + sig * Normal(0,C)
                } else {
                    arxk = xmean.add(times(diagD, arz.getColumnMatrix(k)).scalarMultiply(sigma));
                }
                //if (i >= checkFeasableCount ||
                //      fitfun.isFeasible(arxk.getColumn(0))) {
                //   break;
                //}
                // regenerate random arguments for row
                arz.setColumn(k, randn(dimension));
            }
            copyColumn(arxk, 0, arx, k);
            //try {
            //   valuePenaltyPairs[k] = fitfun.value(arx.getColumn(k)); // compute fitness
            //} catch (TooManyEvaluationsException e) {
            //   break generationLoop;
            //}
        }

        double newPopTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0;
        stopwatch.reset();
        stopwatch.start();
        ArrayList<double[]> population = new ArrayList<>(lambda);
        // This is mine. I ignore constraints.
        for (int k = 0; k < lambda; ++k) {
            population.add(arx.getColumn(k));
        }

        evaluatedPopulation = populationEvaluator.evaluate(population, iterations - 1, random);
        final ValuePenaltyPair[] valuePenaltyPairs = new ValuePenaltyPair[lambda];
        for (int k = 0; k < lambda; ++k) {
            valuePenaltyPairs[k] = new ValuePenaltyPair(evaluatedPopulation.getPopulation().get(k).getFitness(),
                    0.0);
        }

        // Compute fitnesses by adding value and penalty after scaling by value range.
        double valueRange = valueRange(valuePenaltyPairs);
        for (int iValue = 0; iValue < valuePenaltyPairs.length; iValue++) {
            fitness[iValue] = valuePenaltyPairs[iValue].value + valuePenaltyPairs[iValue].penalty * valueRange;
            if (!isMinimize)
                fitness[iValue] = -fitness[iValue];
        }
        double evalTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0;
        stopwatch.reset();
        stopwatch.start();

        // Sort by fitness and compute weighted mean into xmean
        final int[] arindex = sortedIndices(fitness);
        // Calculate new xmean, this is selection and recombination
        final RealMatrix xold = xmean; // for speed up of Eq. (2) and (3)
        final RealMatrix bestArx = selectColumns(arx, MathArrays.copyOf(arindex, mu));
        xmean = bestArx.multiply(weights);
        final RealMatrix bestArz = selectColumns(arz, MathArrays.copyOf(arindex, mu));
        final RealMatrix zmean = bestArz.multiply(weights);
        final boolean hsig = updateEvolutionPaths(zmean, xold);
        if (diagonalOnly <= 0) {
            updateCovariance(hsig, bestArx, arz, arindex, xold);
        } else {
            updateCovarianceDiagonalOnly(hsig, bestArz);
        }
        // Adapt step size sigma - Eq. (5)
        sigma *= FastMath.exp(FastMath.min(1, (normps / chiN - 1) * cs / damps));
        final double bestFitness = fitness[arindex[0]];
        final double worstFitness = fitness[arindex[arindex.length - 1]];
        if (bestValue > bestFitness) {
            bestValue = bestFitness;
            lastResult = optimum;
            optimum = new PointValuePair(bestArx.getColumn(0), isMinimize ? bestFitness : -bestFitness);
            if (getConvergenceChecker() != null && lastResult != null
                    && getConvergenceChecker().converged(iterations, optimum, lastResult)) {
                break generationLoop;
            }
        }
        // handle termination criteria
        // Break, if fitness is good enough
        if (stopFitness != 0 && bestFitness < (isMinimize ? stopFitness : -stopFitness)) {
            break generationLoop;
        }
        final double[] sqrtDiagC = sqrt(diagC).getColumn(0);
        final double[] pcCol = pc.getColumn(0);
        for (int i = 0; i < dimension; i++) {
            if (sigma * FastMath.max(FastMath.abs(pcCol[i]), sqrtDiagC[i]) > stopTolX) {
                break;
            }
            if (i >= dimension - 1) {
                break generationLoop;
            }
        }
        for (int i = 0; i < dimension; i++) {
            if (sigma * sqrtDiagC[i] > stopTolUpX) {
                break generationLoop;
            }
        }
        final double historyBest = min(fitnessHistory);
        final double historyWorst = max(fitnessHistory);
        if (iterations > 2 && FastMath.max(historyWorst, worstFitness)
                - FastMath.min(historyBest, bestFitness) < stopTolFun) {
            break generationLoop;
        }
        if (iterations > fitnessHistory.length && historyWorst - historyBest < stopTolHistFun) {
            break generationLoop;
        }
        // condition number of the covariance matrix exceeds 1e14
        if (max(diagD) / min(diagD) > 1e7) {
            break generationLoop;
        }
        // user defined termination
        if (getConvergenceChecker() != null) {
            final PointValuePair current = new PointValuePair(bestArx.getColumn(0),
                    isMinimize ? bestFitness : -bestFitness);
            if (lastResult != null && getConvergenceChecker().converged(iterations, current, lastResult)) {
                break generationLoop;
            }
            lastResult = current;
        }
        // Adjust step size in case of equal function values (flat fitness)
        if (bestValue == fitness[arindex[(int) (0.1 + lambda / 4.)]]) {
            sigma *= FastMath.exp(0.2 + cs / damps);
        }
        if (iterations > 2
                && FastMath.max(historyWorst, bestFitness) - FastMath.min(historyBest, bestFitness) == 0) {
            sigma *= FastMath.exp(0.2 + cs / damps);
        }
        // store best in history
        push(fitnessHistory, bestFitness);
        if (generateStatistics) {
            statisticsSigmaHistory.add(sigma);
            statisticsFitnessHistory.add(bestFitness);
            statisticsMeanHistory.add(xmean.transpose());
            statisticsDHistory.add(diagD.transpose().scalarMultiply(1E5));
        }

        double cmaesTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0;
        stopwatch.reset();
        stopwatch.start();
        listener.onNextIteraction(evaluatedPopulation);
        double listernerTime = stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000.0;
        logger.info(String.format("NewPop: %.2f, Eval: %.2f, CMAES: %.2f, Listerner: %.2f", newPopTime,
                evalTime, cmaesTime, listernerTime));
    }
    listener.onLastIteraction(evaluatedPopulation);

    return optimum;
}

From source file:org.geogig.osm.cli.commands.OSMHistoryImport.java

/**
 * @param primitive//from   w w w  . j ava 2  s  .  c om
 * @param thisChangePointCache
 * @return
 */
private Geometry parseGeometry(Context context, Primitive primitive,
        Map<Long, Coordinate> thisChangePointCache) {

    if (primitive instanceof Relation) {
        return null;
    }

    if (primitive instanceof Node) {
        Optional<Point> location = ((Node) primitive).getLocation();
        return location.orNull();
    }

    final Way way = (Way) primitive;
    final ImmutableList<Long> nodes = way.getNodes();

    List<Coordinate> coordinates = Lists.newArrayList(nodes.size());
    FindTreeChild findTreeChild = context.command(FindTreeChild.class);
    Optional<ObjectId> nodesTreeId = context.command(ResolveTreeish.class)
            .setTreeish(Ref.STAGE_HEAD + ":" + NODE_TYPE_NAME).call();
    if (nodesTreeId.isPresent()) {
        RevTree headTree = context.objectDatabase().getTree(nodesTreeId.get());
        findTreeChild.setParent(headTree);
    }
    int findTreeChildCalls = 0;
    Stopwatch findTreeChildSW = Stopwatch.createUnstarted();
    ObjectStore objectDatabase = context.objectDatabase();
    for (Long nodeId : nodes) {
        Coordinate coord = thisChangePointCache.get(nodeId);
        if (coord == null) {
            findTreeChildCalls++;
            String fid = String.valueOf(nodeId);
            findTreeChildSW.start();
            Optional<NodeRef> nodeRef = findTreeChild.setChildPath(fid).call();
            findTreeChildSW.stop();
            Optional<org.locationtech.geogig.model.Node> ref = Optional.absent();
            if (nodeRef.isPresent()) {
                ref = Optional.of(nodeRef.get().getNode());
            }

            if (ref.isPresent()) {
                final int locationAttIndex = 6;
                ObjectId objectId = ref.get().getObjectId();
                RevFeature revFeature = objectDatabase.getFeature(objectId);
                Point p = (Point) revFeature.get(locationAttIndex, GEOMF).orNull();
                if (p != null) {
                    coord = p.getCoordinate();
                    thisChangePointCache.put(Long.valueOf(nodeId), coord);
                }
            }
        }
        if (coord != null) {
            coordinates.add(coord);
        }
    }
    if (findTreeChildCalls > 0) {
        //            System.err.printf("%,d findTreeChild calls (%s)\n", findTreeChildCalls,
        //                    findTreeChildSW);
    }
    if (coordinates.size() < 2) {
        return null;
    }
    return GEOMF.createLineString(coordinates.toArray(new Coordinate[coordinates.size()]));
}