Example usage for org.apache.commons.lang.time StopWatch stop

List of usage examples for org.apache.commons.lang.time StopWatch stop

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch stop.

Prototype

public void stop() 

Source Link

Document

Stop the stopwatch.

This method ends a new timing session, allowing the time to be retrieved.

Usage

From source file:org.kuali.kra.test.infrastructure.lifecycle.KcUnitTestBaseLifecycle.java

/** {@inheritDoc} */
public void stopPerClass() {
    if (!this.perClassStarted) {
        throw new IllegalStateException("per class lifecycle already stopped");
    }/* w  w  w . j  ava  2  s  . c o m*/

    final StopWatch watch = new StopWatch();

    if (LOG.isDebugEnabled()) {
        watch.start();
        LOG.debug("stopping per class lifecycle");
    }

    try {
        doPerClassStop();
        perClassStarted = false;
    } catch (Throwable e) {
        perClassStarted = false;
        if (LOG.isErrorEnabled()) {
            LOG.error("per class lifecycle failed to stop cleanly", e);
        }
        throw new KcLifecycleException(e);
    }

    if (LOG.isDebugEnabled()) {
        watch.stop();
        LOG.debug("per class lifecycle stopped in " + watch + " time");
    }
}

From source file:org.kuali.kra.test.infrastructure.lifecycle.KcUnitTestBaseLifecycle.java

/** {@inheritDoc} */
public void startPerSuite() {
    if (this.perSuiteStarted) {
        throw new IllegalStateException("per suite lifecycle already started");
    }/*from ww  w .j a v  a 2 s  .  c om*/

    final StopWatch watch = new StopWatch();

    if (LOG.isDebugEnabled()) {
        watch.start();
        LOG.debug("starting per suite lifecycle");
    }

    try {
        doPerSuiteStart();
        perSuiteStarted = true;
    } catch (Throwable e) {
        perSuiteStarted = false;
        if (LOG.isErrorEnabled()) {
            LOG.error("per suite lifecycle failed to start cleanly", e);
        }
        throw new KcLifecycleException(e);
    }

    if (LOG.isDebugEnabled()) {
        watch.stop();
        LOG.debug("per suite lifecycle started in " + watch + " time");
    }
}

From source file:org.kuali.kra.test.infrastructure.lifecycle.KcUnitTestBaseLifecycle.java

/** {@inheritDoc} */
public void stopPerSuite() {
    if (!this.perSuiteStarted) {
        throw new IllegalStateException("per suite lifecycle already stopped");
    }//from   ww w  .java 2  s  . c  o m

    final StopWatch watch = new StopWatch();

    if (LOG.isDebugEnabled()) {
        watch.start();
        LOG.debug("stopping per suite lifecycle");
    }

    try {
        doPerSuiteStop();
        perSuiteStarted = false;
    } catch (Throwable e) {
        perSuiteStarted = false;
        if (LOG.isErrorEnabled()) {
            LOG.error("per suite lifecycle failed to stop cleanly", e);
        }
        throw new KcLifecycleException(e);
    }

    if (LOG.isDebugEnabled()) {
        watch.stop();
        LOG.debug("per suite lifecycle stopped in " + watch + " time");
    }
}

From source file:org.kuali.ole.docstore.indexer.solr.WorkLicenseDocumentIndexer.java

@Override
public String indexDocuments(List<RequestDocument> requestDocuments, boolean commit) {
    BatchIngestStatistics batchStatistics = BulkIngestStatistics.getInstance().getCurrentBatch();

    String result = null;/* w  w w  .  j  av  a 2s . c o  m*/
    StopWatch timer = new StopWatch();
    StopWatch xmlToObjTime = new StopWatch();
    xmlToObjTime.start();
    xmlToObjTime.suspend();
    timer.start();
    List<SolrInputDocument> solrInputDocuments = new ArrayList<SolrInputDocument>();
    if (requestDocuments != null && requestDocuments.size() > 0) {
        StopWatch buildSolrInputDocTime = new StopWatch();
        StopWatch xmlToPojoTimer = new StopWatch();
        buildSolrInputDocTime.start();
        buildSolrInputDocTime.suspend();
        xmlToPojoTimer.start();
        xmlToPojoTimer.suspend();
        try {
            for (RequestDocument requestDocument : requestDocuments) {
                if (DocFormat.ONIXPL.isEqualTo((requestDocument.getFormat()))) { //onixpl
                    new WorkLicenseOnixplDocBuilder().buildSolrInputDocument(requestDocument,
                            solrInputDocuments);
                } else if ((DocFormat.PDF.isEqualTo((requestDocument.getFormat())))
                        || DocFormat.DOC.isEqualTo(requestDocument.getFormat())
                        || DocFormat.XSLT.isEqualTo(requestDocument.getFormat())) { //pdf
                    new WorkLicenseBinaryDocBuilder().buildSolrInputDocument(requestDocument,
                            solrInputDocuments);
                } else {
                    throw new Exception(
                            "Unsupported Document Format : " + requestDocument.getFormat() + " Called.");
                }
                assignUUIDs(solrInputDocuments, null);
            }
        } catch (Exception e1) {
            result = buildFailureMsg(null, "Indexing failed. " + e1.getMessage());
            logger.error(result, e1);
        }
        timer.stop();
        if ((null == solrInputDocuments) || (solrInputDocuments.isEmpty())) {
            result = buildFailureMsg(null, "No valid documents found in input.");
            return result;
        }
        int numDocs = solrInputDocuments.size();
        batchStatistics.setTimeToConvertXmlToPojo(xmlToPojoTimer.getTime());
        batchStatistics.setTimeToConvertToSolrInputDocs(buildSolrInputDocTime.getTime());
        logger.info("Conversion to Solr docs- Num:" + numDocs + ": Time taken:" + timer.toString());
        result = indexSolrDocuments(solrInputDocuments, commit);
    }
    return result;
}

From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java

private void reIndex(String docCategory, String docType, String docFormat) {
    Session session = null;//from  ww w . j av a  2 s . c  o  m
    setRunning(true);
    logger.info("Rebuild Indexes Run(" + docCategory + " : " + docType + " : " + docFormat + "): ");
    try {
        if (docCategory.equals(DocCategory.WORK.getCode())) {
            if (docType.equals(DocType.BIB.getDescription())) {
                if (docFormat.equals(DocFormat.MARC.getCode())
                        || docFormat.equals(DocFormat.DUBLIN_CORE.getCode())
                        || docFormat.equals(DocFormat.DUBLIN_UNQUALIFIED.getCode())) {
                    org.springframework.util.StopWatch stopWatch = new org.springframework.util.StopWatch();
                    stopWatch.start("total time taken");
                    Date date = new Date();
                    EXCEPION_FILE_NAME = "ReindexErrors-" + date.toString() + ".txt";
                    STATUS_FILE_NAME = "ReindexBatchStatus-" + date.toString() + ".txt";
                    BatchBibTreeDBUtil.writeStatusToFile(filePath, RebuildIndexesHandler.EXCEPION_FILE_NAME,
                            "Reindex started at:" + date);
                    BibHoldingItemReindexer bibHoldingItemReindexer = BibHoldingItemReindexer.getInstance();
                    bibHoldingItemReindexer.setTotalBatchStatistics(new ReindexBatchStatistics());
                    bibHoldingItemReindexer.index(batchSize, startIndex, endIndex, updateDate);
                    date = new Date();
                    BatchBibTreeDBUtil.writeStatusToFile(filePath, RebuildIndexesHandler.EXCEPION_FILE_NAME,
                            "Reindex ended at:" + date);
                    stopWatch.stop();
                    logger.info(stopWatch.prettyPrint());
                    //                        workBibMarcAndDublinAll(docCategory, docType, docFormat);
                } else {
                    logger.info("Rebuild Indexes Run(" + docCategory + " : " + docType + " : " + docFormat
                            + "): FAIL");
                }
            } else if (docType.equals(DocType.INSTANCE.getDescription())) {
                if (docFormat.equals(DocFormat.OLEML.getCode())) {
                    workInstanceOLEML(docCategory, docType, docFormat);
                } else {
                    logger.info("Rebuild Indexes Run(" + docCategory + " : " + docType + " : " + docFormat
                            + "): FAIL");
                }
            } else if (docType.equals(DocType.LICENSE.getDescription())) {
                if (docFormat.equals(DocFormat.ONIXPL.getCode()) || docFormat.equals(DocFormat.PDF.getCode())
                        || docFormat.equals(DocFormat.DOC.getCode())) {
                    workLicense(docCategory, docType, docFormat);
                } else {
                    logger.info("Rebuild Indexes Run(" + docCategory + " : " + docType + " : " + docFormat
                            + "): FAIL");
                }
            } else if (docType.equals(DocType.EINSTANCE.getCode())) {
                if (docFormat.equals(DocFormat.OLEML.getCode())) {
                    workEInstanceOLEML(docCategory, docType, docFormat);
                } else {
                    logger.info("Rebuild Indexes Run(" + docCategory + " : " + docType + " : " + docFormat
                            + "): FAIL");
                }
            }
        }
    } catch (Exception e) {
        logger.info(e.getMessage(), e);
    } finally {
        try {
            if (isStop) {
                ReIndexingStatus.getInstance().getDocTypeList().setStatus("Stopped");
            } else {
                ReIndexingStatus.getInstance().getDocTypeList().setStatus("Done");
            }
            RepositoryManager.getRepositoryManager().logout(session);
        } catch (OleException e) {
            logger.error(e.getMessage(), e);
        }
    }

}

From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java

private void indexBibDocs(List<BibTree> bibTreeList, long records, long recCount,
        List<ReIndexingBatchStatus> batchStatusList, ReIndexingBatchStatus reIndexingBatchStatus) {
    StopWatch indexTimer = new StopWatch();
    DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss:SSS");
    try {//from  w  ww.  ja  v a 2 s . c om
        Date startDate = new Date();
        reIndexingBatchStatus.setBatchStartTime(dateFormat.format(startDate));
        indexTimer.start();
        reIndexingBatchStatus.setStatus("Indexing");
        reIndexingBatchStatus.setBatchIndexingTime(indexTimer.toString());
        reIndexingBatchStatus.setRecordsProcessed(records);
        reIndexingBatchStatus.setBatchEndTime(" ");
        batchStatusList.add(reIndexingBatchStatus);
        ReIndexingStatus.getInstance().getDocTypeList().setReIndBatStatusList(batchStatusList);
        DocumentIndexer documentIndexer = BibMarcIndexer.getInstance();
        BibTrees bibTrees = new BibTrees();
        bibTrees.getBibTrees().addAll(bibTreeList);
        documentIndexer.createTrees(bibTrees);
        //logger.debug(result);
        indexTimer.stop();
        Date endDate = new Date();
        reIndexingBatchStatus.setBatchEndTime(dateFormat.format(endDate));
        reIndexingBatchStatus.setBatchIndexingTime(indexTimer.toString());
        reIndexingBatchStatus.setRecordsProcessed(records);
        reIndexingBatchStatus.setStatus("Done");
        reIndexingBatchStatus.setRecordsRemaining(recCount - records);
        ReIndexingStatus.getInstance().getDocTypeList().setReIndBatStatusList(batchStatusList);
        bibTreeList.clear();
    } catch (Exception e) {
        String firstBibId = bibTreeList.get(0).getBib().getId();
        String lastBibId = bibTreeList.get(bibTreeList.size() - 1).getBib().getId();
        logger.error("Rebuild Indexes Process(" + docCategory + " : " + docType + " : " + docFormat
                + ") Processed(" + (records - bibTreeList.size()) + "), Failed @ bibId( First BibId: "
                + firstBibId + "   :  Last BibId : " + lastBibId + "): Cause: " + e, e);
        indexTimer.stop();
        Date endDate = new Date();
        reIndexingBatchStatus.setBatchEndTime(dateFormat.format(endDate));
        reIndexingBatchStatus.setBatchIndexingTime(indexTimer.toString());
        reIndexingBatchStatus.setRecordsProcessed(0L);
        reIndexingBatchStatus.setStatus("Done");
        reIndexingBatchStatus.setRecordsRemaining(recCount - records);
        ReIndexingStatus.getInstance().getDocTypeList().setReIndBatStatusList(batchStatusList);
        bibTreeList.clear();
    }
}

From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java

private void indexDocs(List<RequestDocument> docs, long records, long recCount,
        List<ReIndexingBatchStatus> batchStatusList, ReIndexingBatchStatus reIndexingBatchStatus) {
    try {/*  w w  w . j a  va 2  s .  co m*/
        StopWatch indexTimer = new StopWatch();
        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss:SSS");
        Date startDate = new Date();
        reIndexingBatchStatus.setBatchStartTime(dateFormat.format(startDate));
        indexTimer.start();
        reIndexingBatchStatus.setStatus("Indexing");
        reIndexingBatchStatus.setBatchIndexingTime(indexTimer.toString());
        reIndexingBatchStatus.setRecordsProcessed(records);
        reIndexingBatchStatus.setBatchEndTime(" ");
        batchStatusList.add(reIndexingBatchStatus);
        ReIndexingStatus.getInstance().getDocTypeList().setReIndBatStatusList(batchStatusList);
        IndexerService indexerService = BeanLocator.getDocstoreFactory().getDocumentIndexManager(
                docs.get(0).getCategory(), docs.get(0).getType(), docs.get(0).getFormat());
        String result = indexerService.indexDocuments(docs, false);
        logger.debug(result);
        indexTimer.stop();
        Date endDate = new Date();
        reIndexingBatchStatus.setBatchEndTime(dateFormat.format(endDate));
        reIndexingBatchStatus.setBatchIndexingTime(indexTimer.toString());
        reIndexingBatchStatus.setRecordsProcessed(records);
        reIndexingBatchStatus.setStatus("Done");
        reIndexingBatchStatus.setRecordsRemaining(recCount - records);
        ReIndexingStatus.getInstance().getDocTypeList().setReIndBatStatusList(batchStatusList);
        docs.clear();
    } catch (Exception e) {
        logger.error("Rebuild Indexes Processed(" + (records - docs.size()) + "), Failed @ batch(" + docs.size()
                + "): Cause: " + e + "\n\tContinuous", e);
    }
}

From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java

private void indexAfterParams(StopWatch batchTimer, ReIndexingBatchStatus reIndexingBatchStatus,
        List<ReIndexingBatchStatus> batchStatusList) {
    batchTimer.stop();
    reIndexingBatchStatus.setBatchTotalTime(batchTimer.toString());
    ReIndexingStatus.getInstance().getDocTypeList().setReIndBatStatusList(batchStatusList);
}

From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java

private ReIndexingBatchStatus indexBeforeParams(StopWatch loadTimer) {
    loadTimer.stop();
    ReIndexingBatchStatus reIndexingBatchStatus = new ReIndexingBatchStatus();
    reIndexingBatchStatus.setBatchTotalTime(" ");
    reIndexingBatchStatus.setBatchLoadTime(loadTimer.toString());
    return reIndexingBatchStatus;
}

From source file:org.kuali.ole.docstore.service.DocumentIngester.java

/**
 * Method to ingest a Bib RequestDocument using Btree manager.
 *
 * @param reqDocs/*from www  .  ja  v  a  2s  .  co  m*/
 * @param session
 * @param formatNode
 * @return
 * @throws Exception
 */
protected synchronized List<Node> ingestBibDocumentUsingBTreeMgr(List<RequestDocument> reqDocs, Session session,
        Node formatNode) throws Exception {
    List<Node> fileNodes = null;
    fileNodes = new ArrayList<Node>();
    try {
        /*String file = "file";
        if (DocFormat.MARC.isEqualTo(reqDoc.getFormat()))
        file = FILE_MARC;
        else
        file = reqDoc.getFormat() + FILE;
        Node bibFormatNode = null;
        if (formatNode == null)
        bibFormatNode = getStaticFormatNode(reqDoc, session);
        else
        bibFormatNode = formatNode;
        Node l3 = null;
        synchronized (nodeHandler) {
        Node l1 = nodeHandler.initLevelNode(NODE_LEVEL1, bibFormatNode, false, session);
        Node l2 = nodeHandler.initLevelNode(NODE_LEVEL2, l1, false, session);
        l3 = nodeHandler.initLevelNode(NODE_LEVEL3, l2, false, session);
        } */
        StopWatch btreeTimer = new StopWatch();
        DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss:SSS");
        Date date = new Date();
        btreeTimer.start();
        treeManager = new BTreeManager(formatNode, 500, 1000, Rank.<String>comparableComparator(), true);
        // Create a new NodeSequence with that tree manager
        nodeSequence = ItemSequence.createNodeSequence(treeManager);
        btreeTimer.stop();
        logger.info("Time taken for initializing btree manager sequence=" + btreeTimer.toString());
        StopWatch btreeAddNodeTimer = new StopWatch();
        Node node = null;
        btreeAddNodeTimer.start();
        Random generator = new Random(19580427);
        Format formatter = new SimpleDateFormat("dd-MM-yyyy HH-mm-ss");
        Date date1 = null;
        for (RequestDocument reqDoc : reqDocs) {
            node = null;
            date1 = new Date();
            String dateStr = formatter.format(date1);
            node = nodeSequence.addNode(dateStr + "-" + generator.nextInt(), NodeType.NT_UNSTRUCTURED);
            nodeHandler.initFileNode(node, reqDoc, FILE_MARC, null, session);
            fileNodes.add(node);
            //i++;
        }
        btreeAddNodeTimer.stop();
        logger.info(
                "Time taken for adding " + reqDocs.size() + " nodes to btree: " + btreeAddNodeTimer.toString());

    } catch (Exception e) {
        logger.error("Ingest failed for RequestDocument: ", e);
        throw new Exception(e);
    }
    return fileNodes;
}