Example usage for org.apache.commons.lang.time StopWatch getTime

List of usage examples for org.apache.commons.lang.time StopWatch getTime

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch getTime.

Prototype

public long getTime() 

Source Link

Document

Get the time on the stopwatch.

This is either the time between the start and the moment this method is called, or the amount of time between start and stop.

Usage

From source file:org.inria.wordpress.enduser.xml2wdpconf.tools.ATLTransformationManager.java

private void launch(HashMap<String, Object> options, InputStream atlFileInputStream) {
    StopWatch timeTransfo = new StopWatch();
    timeTransfo.start();/*from w ww.jav a 2s.c  o  m*/
    transformationLauncher.launch(ILauncher.RUN_MODE, new NullProgressMonitor(), options, atlFileInputStream);
    System.out.println("Execution time - Transformation only ('launch' method) - Transformation part : "
            + (timeTransfo.getTime() / 1000.0));
}

From source file:org.janusgraph.graphdb.idmanagement.VariableLongTest.java

private void readWriteTest(final ReadWriteLong impl, long maxValue, long jump, boolean negative,
        boolean backward) {
    Preconditions.checkArgument(maxValue % jump == 0);
    long allocate = maxValue / jump * 8 * (negative ? 2 : 1);
    Preconditions.checkArgument(allocate < (1 << 28));
    WriteBuffer wb = new WriteByteBuffer((int) allocate);
    int num = 0;//from   ww  w  . j av a 2  s. c  o  m
    StopWatch w = new StopWatch();
    w.start();
    for (long i = (negative ? -maxValue : 0); i <= maxValue; i += jump) {
        impl.write(wb, i);
        num++;
    }
    //for (int i=0;i<b.remaining();i++) System.out.print(b.get(i)+"|");
    w.stop();
    ReadBuffer rb = wb.getStaticBuffer().asReadBuffer();
    log.info("Writing " + num + " longs in " + rb.length() + " bytes. in time: " + w.getTime());

    final ReadVerify read = new ReadVerify() {
        @Override
        public void next(ReadBuffer rb, long expected) {
            int beforePos = rb.getPosition();
            long value = impl.read(rb);
            assertEquals(expected, value);
            int length = Math.abs(rb.getPosition() - beforePos);
            assertEquals("On: " + expected, length, impl.length(expected));
        }
    };

    if (backward) {
        rb.movePositionTo(rb.length());
        for (long i = maxValue; i != (negative ? -maxValue : 0); i -= jump) {
            read.next(rb, i);
        }
    } else {
        for (long i = (negative ? -maxValue : 0); i <= maxValue; i += jump) {
            read.next(rb, i);
        }
    }

    //Test boundaries
    wb = new WriteByteBuffer(512);
    impl.write(wb, 0);
    impl.write(wb, Long.MAX_VALUE);
    if (negative)
        impl.write(wb, -Long.MAX_VALUE);
    rb = wb.getStaticBuffer().asReadBuffer();
    if (backward) {
        rb.movePositionTo(rb.length());
        if (negative)
            assertEquals(-Long.MAX_VALUE, impl.read(rb));
        assertEquals(Long.MAX_VALUE, impl.read(rb));
        assertEquals(0, impl.read(rb));
    } else {
        assertEquals(0, impl.read(rb));
        assertEquals(Long.MAX_VALUE, impl.read(rb));
        if (negative)
            assertEquals(-Long.MAX_VALUE, impl.read(rb));
    }
}

From source file:org.jasig.schedassist.impl.relationship.advising.AdvisorListRelationshipDataSourceImpl.java

/**
 * This method deletes all existing rows from the isis_records table, then invokes
 * {@link #batchLoadData(Resource)} to refresh it.
 * /*from   w ww .j a va2  s  . c  o  m*/
 * This method is marked with Spring's {@link Transactional} annotation, and if
 * the available application is running should only be executed when transactional 
 * support is available.
 * 
 * @see Transactional
 * @param resource
 * @throws IOException
 */
@Transactional(isolation = Isolation.READ_COMMITTED, propagation = Propagation.REQUIRES_NEW)
public synchronized void reloadData() {
    final String propertyValue = System.getProperty("org.jasig.schedassist.runScheduledTasks", "true");
    if (Boolean.parseBoolean(propertyValue)) {
        String currentTerm = TermCalculator.getCurrentTerm();
        if (isResourceUpdated(advisorListResource)) {
            LOG.info("resource updated, reloading advisorList data");
            List<StudentAdvisorAssignment> records = readResource(advisorListResource, currentTerm);

            LOG.info("deleting all existing records from advisorlist table");
            StopWatch stopWatch = new StopWatch();
            stopWatch.start();
            this.getJdbcTemplate().execute("delete from advisorlist");
            long deleteTime = stopWatch.getTime();
            LOG.info("finished deleting existing (" + deleteTime + " msec), starting batch insert");
            stopWatch.reset();
            stopWatch.start();
            SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(records.toArray());
            this.getSimpleJdbcTemplate().batchUpdate(
                    "insert into advisorlist (advisor_emplid, advisor_relationship, student_emplid, term_description, term_number, advisor_type, committee_role) values (:advisorEmplid, :advisorRelationshipDescription, :studentEmplid, :termDescription, :termNumber, :advisorType, :committeeRole)",
                    batch);
            long insertTime = stopWatch.getTime();
            stopWatch.stop();
            LOG.info("batch insert complete (" + insertTime + " msec)");
            LOG.info("reloadData complete (total time: " + (insertTime + deleteTime) + " msec)");
            this.lastReloadTimestamp = new Date();
            try {
                this.resourceLastModified = advisorListResource.lastModified();
            } catch (IOException e) {
                LOG.debug("ignoring IOException from accessing Resource.lastModified()");
            }
        } else {
            LOG.info("resource not modified since last reload, skipping");
        }
    } else {
        LOG.debug("ignoring reloadData as 'org.jasig.schedassist.runScheduledTasks' set to false");
    }
}

From source file:org.jasig.schedassist.impl.relationship.CSVRelationshipDataSourceImpl.java

/**
 * This method deletes all existing rows from the isis_records table, then invokes
 * {@link #batchLoadData(Resource)} to refresh it.
 * //from ww  w .j a  va  2 s.  c  o m
 * This method is marked with Spring's {@link Transactional} annotation, and if
 * the Scheduling Assistant application is running should only be executed when transactional 
 * support is available.
 * 
 * @see Transactional
 * @param resource
 * @throws IOException
 */
@Transactional(isolation = Isolation.READ_COMMITTED, propagation = Propagation.REQUIRES_NEW)
public synchronized void reloadData() {
    final String propertyValue = System.getProperty("org.jasig.schedassist.runScheduledTasks", "true");
    if (Boolean.parseBoolean(propertyValue)) {
        if (isResourceUpdated(csvResource)) {
            LOG.info("resource updated, reloading advisorList data");
            //List<StudentAdvisorAssignment> records = readResource(advisorListResource, currentTerm);
            List<CSVRelationship> records = new ArrayList<CSVRelationship>();
            try {
                records = readCSVResource(csvResource);
            } catch (IOException e) {
                LOG.error("caught IOException reading csv data source", e);
                return;
            }

            if (records.isEmpty()) {
                LOG.warn("resource returned empty set, skipping reloadData");
                return;
            }

            LOG.info("deleting all existing records from csv_relationships table");
            StopWatch stopWatch = new StopWatch();
            stopWatch.start();
            this.getJdbcTemplate().execute("delete from csv_relationships");
            long deleteTime = stopWatch.getTime();
            LOG.info("finished deleting existing (" + deleteTime + " msec), starting batch insert");
            stopWatch.reset();
            stopWatch.start();
            SqlParameterSource[] batch = SqlParameterSourceUtils.createBatch(records.toArray());
            this.getSimpleJdbcTemplate().batchUpdate(
                    "insert into csv_relationships (owner_id, visitor_id, rel_description) values (:ownerIdentifier, :visitorIdentifier, :relationshipDescription)",
                    batch);
            long insertTime = stopWatch.getTime();
            stopWatch.stop();
            LOG.info("batch insert complete (" + insertTime + " msec)");
            LOG.info("reloadData complete (total time: " + (insertTime + deleteTime) + " msec)");
            this.lastReloadTimestamp = new Date();
            try {
                this.resourceLastModified = csvResource.lastModified();
            } catch (IOException e) {
                LOG.debug("ignoring IOException from accessing Resource.lastModified()");
            }
        } else {
            LOG.info("resource not modified since last reload, skipping");
        }
    } else {
        LOG.debug("ignoring reloadData as 'org.jasig.schedassist.runScheduledTasks' set to false");
    }
}

From source file:org.jasig.schedassist.oraclecalendar.OracleSessionPoolableObjectFactory.java

/**
 * Cast the 2nd argument as a {@link Session}.
 * If non-null, call {@link Session#disconnect(int)}.
 * //  w ww  .  j  a  v  a 2  s .c  o  m
 *  (non-Javadoc)
 * @see org.apache.commons.pool.BaseKeyedPoolableObjectFactory#destroyObject(java.lang.Object, java.lang.Object)
 */
@Override
public void destroyObject(Object key, Object obj) throws Exception {
    if (LOG.isDebugEnabled()) {
        LOG.debug("destroyObject called with " + key + " and " + obj);
    }

    Session session = (Session) obj;

    // disconnect
    if (null != session) {
        StopWatch stopWatch = new StopWatch();
        stopWatch.start();
        session.disconnect(Api.CSDK_FLAG_NONE);
        stopWatch.stop();

        if (LOG.isInfoEnabled()) {
            LOG.info("CSDK Session disconnect elapsed time (msec): " + stopWatch.getTime());
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("finished disconnect for key " + key + ", session " + session);
        }
    } else {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Session object was null for key: " + key + ", skipping disconnect call");
        }
    }
}

From source file:org.kuali.ole.docstore.indexer.solr.WorkLicenseDocumentIndexer.java

@Override
public String indexDocuments(List<RequestDocument> requestDocuments, boolean commit) {
    BatchIngestStatistics batchStatistics = BulkIngestStatistics.getInstance().getCurrentBatch();

    String result = null;/*from  www . j a  v  a  2s . c  o m*/
    StopWatch timer = new StopWatch();
    StopWatch xmlToObjTime = new StopWatch();
    xmlToObjTime.start();
    xmlToObjTime.suspend();
    timer.start();
    List<SolrInputDocument> solrInputDocuments = new ArrayList<SolrInputDocument>();
    if (requestDocuments != null && requestDocuments.size() > 0) {
        StopWatch buildSolrInputDocTime = new StopWatch();
        StopWatch xmlToPojoTimer = new StopWatch();
        buildSolrInputDocTime.start();
        buildSolrInputDocTime.suspend();
        xmlToPojoTimer.start();
        xmlToPojoTimer.suspend();
        try {
            for (RequestDocument requestDocument : requestDocuments) {
                if (DocFormat.ONIXPL.isEqualTo((requestDocument.getFormat()))) { //onixpl
                    new WorkLicenseOnixplDocBuilder().buildSolrInputDocument(requestDocument,
                            solrInputDocuments);
                } else if ((DocFormat.PDF.isEqualTo((requestDocument.getFormat())))
                        || DocFormat.DOC.isEqualTo(requestDocument.getFormat())
                        || DocFormat.XSLT.isEqualTo(requestDocument.getFormat())) { //pdf
                    new WorkLicenseBinaryDocBuilder().buildSolrInputDocument(requestDocument,
                            solrInputDocuments);
                } else {
                    throw new Exception(
                            "Unsupported Document Format : " + requestDocument.getFormat() + " Called.");
                }
                assignUUIDs(solrInputDocuments, null);
            }
        } catch (Exception e1) {
            result = buildFailureMsg(null, "Indexing failed. " + e1.getMessage());
            logger.error(result, e1);
        }
        timer.stop();
        if ((null == solrInputDocuments) || (solrInputDocuments.isEmpty())) {
            result = buildFailureMsg(null, "No valid documents found in input.");
            return result;
        }
        int numDocs = solrInputDocuments.size();
        batchStatistics.setTimeToConvertXmlToPojo(xmlToPojoTimer.getTime());
        batchStatistics.setTimeToConvertToSolrInputDocs(buildSolrInputDocTime.getTime());
        logger.info("Conversion to Solr docs- Num:" + numDocs + ": Time taken:" + timer.toString());
        result = indexSolrDocuments(solrInputDocuments, commit);
    }
    return result;
}

From source file:org.kuali.ole.docstore.service.IngestNIndexHandlerService.java

/**
 * Method to ingest and index bulk Request.
 *
 * @param request//from w  w w.  ja  v a 2  s  .c o m
 * @return
 */
public List<String> bulkIngestNIndex(Request request, Session session) {
    //RequestDocument requestDocument = request.getRequestDocuments().get(0);
    //DocumentManager documentManager = BeanLocator.getDocumentManagerFactory().getDocumentManager(requestDocument);
    BatchIngestStatistics batchStatistics = BulkIngestStatistics.getInstance().getCurrentBatch();
    BulkIngestStatistics bulkLoadStatistics = BulkIngestStatistics.getInstance();
    long commitSize = ProcessParameters.BULK_INGEST_COMMIT_SIZE;
    logger.debug("commitSize = " + commitSize);
    logger.debug("bulkIngestNIndex(" + request.getRequestDocuments().size() + ") START");
    logger.debug("BULK_INGEST_IS_LINKING_ENABLED=" + ProcessParameters.BULK_INGEST_IS_LINKING_ENABLED);
    //Session session = null;
    List<String> docUUIDs = new ArrayList<String>();
    StopWatch ingestTimer = new StopWatch();
    StopWatch indexTimer = new StopWatch();
    StopWatch totalTimer = new StopWatch();
    StopWatch createNodesTimer = new StopWatch();
    StopWatch sessionSaveTimer = new StopWatch();
    StopWatch solrOptimizeTimer = new StopWatch();
    long recCount = request.getRequestDocuments().size();
    boolean isCommit = false;
    totalTimer.start();
    try {
        ingestTimer.start();
        createNodesTimer.start();
        //session = RepositoryManager.getRepositoryManager().getSession(request.getUser(), request.getOperation());
        List<RequestDocument> reqDocs = request.getRequestDocuments();
        if (prevRequestDocs == null) {
            prevRequestDocs = new ArrayList<RequestDocument>();
        }
        prevRequestDocs.addAll(request.getRequestDocuments());
        logger.info("prevRequestDocs" + prevRequestDocs.size());
        docUUIDs.addAll(documentIngester.ingestRequestDocumentsForBulk(reqDocs, session));
        //docUUIDs.addAll(documentIngester.ingestRequestDocumentsForBulkUsingBTreeMgr(reqDocs, session));
        //documentManager.store(reqDocs,session);
        createNodesTimer.stop();
        try {
            ingestTimer.suspend();
            indexTimer.start();
        } catch (Exception e2) {
            logger.error(e2.getMessage(), e2);
        }
        bulkLoadStatistics.setCommitRecCount(bulkLoadStatistics.getCommitRecCount() + recCount);
        if (bulkLoadStatistics.getCommitRecCount() == commitSize || bulkLoadStatistics.isLastBatch()) {
            isCommit = true;
        }
        documentIndexer.indexDocumentsForBulk(reqDocs, isCommit);
        //documentManager.index(reqDocs,isCommit);
        try {
            indexTimer.suspend();
            ingestTimer.resume();
        } catch (Exception e2) {
            logger.error(e2.getMessage(), e2);
        }
        if (isCommit) {
            sessionSaveTimer.start();
            logger.info("Bulk ingest: Repository commit started. Number of records being committed : "
                    + bulkLoadStatistics.getCommitRecCount());
            session.save();
            bulkLoadStatistics.setCommitRecCount(0);
            prevRequestDocs = null;
            sessionSaveTimer.stop();
        }

        try {
            ingestTimer.stop();
        } catch (Exception e2) {
            logger.error(e2.getMessage(), e2);
        }
        // Documents processed can be different from records processed as in the case of Instance data.
        logger.debug("Documents processed:" + recCount);
        bulkLoadStatistics.setFileRecCount(bulkLoadStatistics.getFileRecCount() + recCount);
        logger.info(
                "Bulk ingest: Records processed in the current file :" + bulkLoadStatistics.getFileRecCount());
    } catch (Exception e) {
        bulkLoadStatistics.setCommitRecCount(0);
        try {
            ingestTimer.resume();
        } catch (Exception e2) {
            logger.error(e2.getMessage(), e2);
        }
        //documentIngester.rollbackDocStoreIngestedData(session, request.getRequestDocuments());
        documentIngester.rollbackDocStoreIngestedData(session, prevRequestDocs);
        ingestTimer.stop();
        try {
            indexTimer.resume();
        } catch (Exception e2) {
            logger.error(e2.getMessage(), e2);
        }
        //documentIndexer.rollbackIndexedData(request.getRequestDocuments());
        //prevRequestDocs = prevRequestDocs.subList(0, prevRequestDocs.size() - request.getRequestDocuments().size());
        //logger.info("prevRequestDocs before remove INDEXES = " + prevRequestDocs.size());
        documentIndexer.rollbackIndexedData(prevRequestDocs);
        prevRequestDocs = null;
        try {
            indexTimer.stop();
        } catch (Exception e2) {
            logger.error(e2.getMessage(), e2);
        }
        logger.error("Document Ingest & Index Failed, Cause: " + e.getMessage(), e);
        try {
            totalTimer.stop();
        } catch (Exception e2) {
            logger.error(e2.getMessage(), e2);
        }
        logger.debug("Time Consumptions...:\tcreatingNodes(" + docUUIDs.size() + "):" + createNodesTimer
                + "\tSessionSave(" + docUUIDs.size() + "):" + sessionSaveTimer + "\tIngest(" + docUUIDs.size()
                + "):" + ingestTimer + "\tIndexing(" + docUUIDs.size() + "):" + indexTimer + "\tTotal Time: "
                + totalTimer);
        docUUIDs.clear();
    } finally {
        /*if (session != null) {
        try {
            RepositoryManager.getRepositoryManager().logout(session);
        } catch (OleException e) {
        }
        } */
    }
    try {
        totalTimer.stop();
    } catch (Exception exe) {
        logger.error(exe.getMessage(), exe);
    }
    logger.debug("Time Consumptions...:\tcreatingNodes(" + docUUIDs.size() + "):" + createNodesTimer
            + "\tSessionSave(" + docUUIDs.size() + "):" + sessionSaveTimer + "\tIngest(" + docUUIDs.size()
            + "):" + ingestTimer + "\tIndexing(" + docUUIDs.size() + "):" + indexTimer + "\tTotal Time: "
            + totalTimer);
    logger.debug("bulkIngestNIndex(" + request.getRequestDocuments().size() + ") END");
    batchStatistics.setTimeToCreateNodesInJcr(createNodesTimer.getTime());
    batchStatistics.setTimeToSaveJcrSession(sessionSaveTimer.getTime());
    batchStatistics.setIngestingTime(ingestTimer.getTime());
    batchStatistics.setIndexingTime(indexTimer.getTime());
    batchStatistics.setIngestNIndexTotalTime(totalTimer.getTime());
    updateProcessTimer(docUUIDs.size(), ingestTimer, indexTimer, totalTimer);
    solrOptimizeTimer.start();
    optimizeSolr(docUUIDs.size());
    solrOptimizeTimer.stop();
    batchStatistics.setTimeToSolrOptimize(solrOptimizeTimer.getTime());
    return docUUIDs;
}

From source file:org.kuali.ole.docstore.service.IngestNIndexHandlerService.java

private void updateProcessTimer(int recordsProcessed, StopWatch ingest, StopWatch index, StopWatch total) {
    BulkIngestTimeManager timer = ProcessParameters.BULK_PROCESSOR_TIME_MANAGER;
    synchronized (timer) {
        timer.setRecordsCount(timer.getRecordsCount() + recordsProcessed);
        timer.setIngestingTimer(timer.getIngestingTimer() + ingest.getTime());
        timer.setIndexingTimer(timer.getIndexingTimer() + index.getTime());
        timer.setProcessTimer(timer.getProcessTimer() + total.getTime());
        if (timer.getRecordsCount() >= ProcessParameters.BULK_PROCESSOR_TIMER_DISPLAY) {
            logger.debug(/* ww  w . j  a v a 2  s  .  c  o m*/
                    "----------------------------------------------------------------------------------------------------------------------");
            logger.debug(timer.toString());
            logger.debug(
                    "----------------------------------------------------------------------------------------------------------------------");
            timer.reset();
        }
    }
}

From source file:org.kuali.rice.test.ClearDatabaseLifecycle.java

protected void clearTables(final PlatformTransactionManager transactionManager, final DataSource dataSource) {
    Assert.assertNotNull("DataSource could not be located.", dataSource);
    try {/* ww  w  .j  a  v a  2  s. c  o  m*/
        StopWatch s = new StopWatch();
        s.start();
        new TransactionTemplate(transactionManager).execute(new TransactionCallback() {
            public Object doInTransaction(final TransactionStatus status) {
                verifyTestEnvironment(dataSource);
                return new JdbcTemplate(dataSource).execute(new StatementCallback() {
                    public Object doInStatement(Statement statement) throws SQLException {
                        String schemaName = statement.getConnection().getMetaData().getUserName().toUpperCase();
                        LOG.info("Clearing tables for schema " + schemaName);
                        if (StringUtils.isBlank(schemaName)) {
                            Assert.fail("Empty schema name given");
                        }
                        final List<String> reEnableConstraints = new ArrayList<String>();
                        DatabaseMetaData metaData = statement.getConnection().getMetaData();
                        Map<String, List<String[]>> exportedKeys = indexExportedKeys(metaData, schemaName);
                        final ResultSet resultSet = metaData.getTables(null, schemaName, null,
                                new String[] { "TABLE" });
                        final StringBuilder logStatements = new StringBuilder();
                        while (resultSet.next()) {
                            String tableName = resultSet.getString("TABLE_NAME");
                            if (shouldTableBeCleared(tableName)) {
                                if (!isUsingDerby(metaData) && isUsingOracle(metaData)) {
                                    List<String[]> exportedKeyNames = exportedKeys.get(tableName);
                                    if (exportedKeyNames != null) {
                                        for (String[] exportedKeyName : exportedKeyNames) {
                                            final String fkName = exportedKeyName[0];
                                            final String fkTableName = exportedKeyName[1];
                                            final String disableConstraint = "ALTER TABLE " + fkTableName
                                                    + " DISABLE CONSTRAINT " + fkName;
                                            logStatements.append("Disabling constraints using statement ->"
                                                    + disableConstraint + "<-\n");
                                            statement.addBatch(disableConstraint);
                                            reEnableConstraints.add("ALTER TABLE " + fkTableName
                                                    + " ENABLE CONSTRAINT " + fkName);
                                        }
                                    }
                                } else if (isUsingMySQL(metaData)) {
                                    statement.addBatch("SET FOREIGN_KEY_CHECKS = 0");
                                }
                                String deleteStatement = "DELETE FROM " + tableName;
                                logStatements.append(
                                        "Clearing contents using statement ->" + deleteStatement + "<-\n");
                                statement.addBatch(deleteStatement);
                            }
                        }
                        for (final String constraint : reEnableConstraints) {
                            logStatements
                                    .append("Enabling constraints using statement ->" + constraint + "<-\n");
                            statement.addBatch(constraint);
                        }
                        if (isUsingMySQL(metaData)) {
                            statement.addBatch("SET FOREIGN_KEY_CHECKS = 1");
                        }
                        LOG.info(logStatements);

                        int[] results = statement.executeBatch();
                        for (int index = 0; index < results.length; index++) {
                            if (results[index] == Statement.EXECUTE_FAILED) {
                                Assert.fail("Execution of database clear statement failed.");
                            }

                        }
                        resultSet.close();
                        LOG.info("Tables successfully cleared for schema " + schemaName);
                        return null;
                    }
                });
            }
        });
        s.stop();
        LOG.info("Time to clear tables: " + DurationFormatUtils.formatDurationHMS(s.getTime()));
    } catch (Exception e) {
        LOG.error(e);
        throw new RuntimeException(e);
    }
}

From source file:org.lic.ip.iplocator.IPv4RadixIntTree.java

private IPv4RadixIntTree() {
    StopWatch sw = new StopWatch();
    sw.start();//from   w w w . ja  v a2s .  co m

    try {
        String filepath = getClass().getClassLoader().getResource("ipdb_all_2015-01-19").getPath();

        int lines = countLinesInLocalFile(filepath);
        logger.info("file lines: {}", lines);

        init(lines);
        loadFromLocalFile(filepath);
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
    }

    sw.stop();
    logger.info("init cost: {}ms", sw.getTime());
}