Example usage for org.apache.commons.lang.time StopWatch StopWatch

List of usage examples for org.apache.commons.lang.time StopWatch StopWatch

Introduction

In this page you can find the example usage for org.apache.commons.lang.time StopWatch StopWatch.

Prototype

public StopWatch() 

Source Link

Document

Constructor.

Usage

From source file:com.nridge.connector.fs.con_fs.core.RunPublishFS.java

/**
 * When an object implementing interface <code>Runnable</code> is used
 * to create a thread, starting the thread causes the object's
 * <code>run</code> method to be called in that separately executing
 * thread./*from   ww  w .j  av a2 s  .c  o m*/
 * 
 * The general contract of the method <code>run</code> is that it may
 * take any action whatsoever.
 *
 * @see Thread#run()
 */
@Override
public void run() {
    Document conDoc;
    DocumentXML documentXML;
    String docId, queueItem, srcPathFileName;
    Logger appLogger = mAppMgr.getLogger(this, "run");

    appLogger.trace(mAppMgr.LOGMSG_TRACE_ENTER);

    boolean isPublisherInitialized = true;
    DataBag schemaBag = (DataBag) mAppMgr.getProperty(Connector.PROPERTY_SCHEMA_NAME);
    Publishers indexPublishers = new Publishers(mAppMgr, mCrawlQueue, Constants.CFG_PROPERTY_PREFIX);
    try {
        indexPublishers.initialize(schemaBag);
    } catch (NSException e) {
        isPublisherInitialized = false;
        appLogger.error("Publisher initialization: " + e.getMessage());
    }

    if (isPublisherInitialized) {
        BlockingQueue transformQueue = (BlockingQueue) mAppMgr.getProperty(Connector.QUEUE_TRANSFORM_NAME);
        BlockingQueue publishQueue = (BlockingQueue) mAppMgr.getProperty(Connector.QUEUE_PUBLISH_NAME);

        long queueWaitTimeout = mAppMgr.getLong(Constants.CFG_PROPERTY_PREFIX + ".queue.wait_timeout",
                Constants.QUEUE_POLL_TIMEOUT_DEFAULT);
        do {
            try {
                queueItem = (String) transformQueue.poll(queueWaitTimeout, TimeUnit.SECONDS);
                if (mCrawlQueue.isQueueItemDocument(queueItem)) {
                    StopWatch stopWatch = new StopWatch();
                    stopWatch.start();

                    docId = Connector.docIdFromQueueItem(queueItem);

                    appLogger.debug(String.format("Transform Queue Item: %s", docId));
                    srcPathFileName = mCrawlQueue.docPathFileName(Connector.QUEUE_TRANSFORM_NAME, docId);
                    try {
                        documentXML = new DocumentXML();
                        documentXML.load(srcPathFileName);
                        conDoc = documentXML.getDocument();

                        indexPublishers.send(conDoc);

                        File srcFile = new File(srcPathFileName);
                        if (!srcFile.delete())
                            appLogger.warn(String.format("%s: Unable to delete.", srcPathFileName));

                        stopWatch.stop();
                        queueItem = Connector.queueItemIdPhaseTime(queueItem, Connector.PHASE_PUBLISH,
                                stopWatch.getTime());
                        try {
                            // If queue is full, this thread may block.
                            publishQueue.put(queueItem);
                        } catch (InterruptedException e) {
                            // Restore the interrupted status so parent can handle (if it wants to).
                            Thread.currentThread().interrupt();
                        }
                    } catch (Exception e) {
                        String msgStr = String.format("%s: %s", docId, e.getMessage());
                        appLogger.error(msgStr, e);
                        MailManager mailManager = (MailManager) mAppMgr
                                .getProperty(Connector.PROPERTY_MAIL_NAME);
                        mailManager.addMessage(Connector.PHASE_PUBLISH, Connector.STATUS_MAIL_ERROR, msgStr,
                                Constants.MAIL_DETAIL_MESSAGE);
                    }
                }
            } catch (InterruptedException e) {
                queueItem = StringUtils.EMPTY;
            }
        } while (!mCrawlQueue.isPhaseComplete(Connector.PHASE_TRANSFORM, queueItem));

        // Forward the marker queue item to the next queue.

        if (mCrawlQueue.isQueueItemMarker(queueItem)) {
            try {
                // If queue is full, this thread may block.
                publishQueue.put(queueItem);
            } catch (InterruptedException e) {
                // Restore the interrupted status so parent can handle (if it wants to).
                Thread.currentThread().interrupt();
            }
        }

        // Now we can shutdown our search indexer publisher.

        try {
            indexPublishers.shutdown();
        } catch (NSException e) {
            appLogger.error("Publisher shutdown: " + e.getMessage());
        }
    }

    appLogger.trace(mAppMgr.LOGMSG_TRACE_DEPART);
}

From source file:net.di2e.ecdr.libs.result.relevance.RelevanceNormalizer.java

/**
 * Normalize the relevance score for the results in the query response based on the contextual query criteria
 *
 * @param results// w w w. j  av a  2 s  .c  om
 * @param originalQuery
 * @return
 */
public List<Result> normalize(List<Result> results, Query originalQuery) {

    SortBy sortBy = originalQuery.getSortBy();
    // We want to do relevance sort if no sort order was specfied or if Relevance sort was specified
    if (sortBy == null || sortBy.getPropertyName() == null || sortBy.getPropertyName().getPropertyName() == null
            || Result.RELEVANCE.equals(sortBy.getPropertyName().getPropertyName())) {

        Map<String, String> filterParameters = getFilterParameters(originalQuery);

        if (canNormalizeQuery(filterParameters)) {
            LOGGER.debug(
                    "Query contained search phrase and will be sorted by relevance, performing re-indexing to normalize relevance.");
            Directory directory = null;
            DirectoryReader iReader = null;
            Map<String, Result> docMap = new HashMap<>();
            List<Result> updatedResults = new ArrayList<>();
            StopWatch stopWatch = new StopWatch();
            stopWatch.start();
            try {
                Analyzer analyzer = new StandardAnalyzer();

                // create memory-stored index
                directory = new RAMDirectory();

                IndexWriterConfig config = new IndexWriterConfig(Version.LATEST, analyzer);
                IndexWriter iWriter = new IndexWriter(directory, config);

                // loop through all of the results and add them to the index
                for (Result curResult : results) {
                    Document doc = new Document();
                    String text = TextParser.parseTextFrom(curResult.getMetacard().getMetadata());
                    String uuid = UUID.randomUUID().toString();
                    doc.add(new Field(METADATA_FIELD, text, TextField.TYPE_STORED));
                    doc.add(new Field(ID_FIELD, uuid, TextField.TYPE_STORED));
                    iWriter.addDocument(doc);
                    docMap.put(uuid, curResult);
                }

                IOUtils.closeQuietly(iWriter);
                LOGGER.debug("{} Document indexing finished in {} seconds.", RELEVANCE_TIMER,
                        (double) stopWatch.getTime() / 1000.0);
                // Now search the index:
                iReader = DirectoryReader.open(directory);
                IndexSearcher iSearcher = new IndexSearcher(iReader);
                // Parse a simple query that searches for "text":
                QueryParser parser = new QueryParser(METADATA_FIELD, analyzer);
                org.apache.lucene.search.Query query = getQuery(parser, filterParameters);
                ScoreDoc[] hits = iSearcher.search(query, null, docMap.size()).scoreDocs;
                LOGGER.debug("Got back {} results", hits.length);

                // loop through the indexed search results and update the scores in the original query results
                for (ScoreDoc curHit : hits) {
                    Document doc = iSearcher.doc(curHit.doc);
                    String uuid = doc.getField(ID_FIELD).stringValue();
                    Result result = docMap.get(uuid);
                    docMap.remove(uuid);
                    updatedResults.add(updateResult(result, curHit.score));
                    LOGGER.debug("Relevance for result {} was changed FROM {} TO {}",
                            result.getMetacard().getId(), result.getRelevanceScore(), curHit.score);
                }
                // check if there are any results left that did not match the keyword query
                for (Map.Entry<String, Result> curEntry : docMap.entrySet()) {
                    // add result in with 0 relevance score
                    updatedResults.add(updateResult(curEntry.getValue(), 0));
                }
                // create new query response
                return updatedResults;

            } catch (ParseException | IOException | RuntimeException e) {
                LOGGER.warn(
                        "Received an exception while trying to perform re-indexing, sending original queryResponse on.",
                        e);
                return results;
            } finally {
                IOUtils.closeQuietly(iReader);
                IOUtils.closeQuietly(directory);
                stopWatch.stop();
                LOGGER.debug("{} Total relevance process took {} seconds.", RELEVANCE_TIMER,
                        (double) stopWatch.getTime() / 1000.0);
            }
        } else {
            LOGGER.debug(
                    "Query is not sorted based on relevance with contextual criteria. Skipping relevance normalization.");
        }
    } else {
        LOGGER.debug(
                "Query is not sorted based on relevance with contextual criteria. Skipping relevance normalization.");
    }
    return results;
}

From source file:com.nridge.connector.fs.con_fs.core.RunMetricReport.java

/**
 * When an object implementing interface <code>Runnable</code> is used
 * to create a thread, starting the thread causes the object's
 * <code>run</code> method to be called in that separately executing
 * thread.//from w  w w  .  j  a v a  2 s . co m
 * 
 * The general contract of the method <code>run</code> is that it may
 * take any action whatsoever.
 *
 * @see Thread#run()
 */
@Override
public void run() {
    long msTime;
    String[] phaseTimes;
    double secondsTime, docsPerSecond;
    String docId, queueItem, phaseName;
    Logger appLogger = mAppMgr.getLogger(this, "run");

    appLogger.trace(mAppMgr.LOGMSG_TRACE_ENTER);

    long extractCount = 0;
    DescriptiveStatistics dsExtract = new DescriptiveStatistics();
    long transformCount = 0;
    DescriptiveStatistics dsTransform = new DescriptiveStatistics();
    long publishCount = 0;
    DescriptiveStatistics dsPublish = new DescriptiveStatistics();

    BlockingQueue publishQueue = (BlockingQueue) mAppMgr.getProperty(Connector.QUEUE_PUBLISH_NAME);

    do {
        try {
            queueItem = (String) publishQueue.poll(Constants.QUEUE_POLL_TIMEOUT_DEFAULT, TimeUnit.SECONDS);
            if (mCrawlQueue.isQueueItemDocument(queueItem)) {
                StopWatch stopWatch = new StopWatch();
                stopWatch.start();

                docId = Connector.docIdFromQueueItem(queueItem);

                appLogger.debug(String.format("Publish Queue Item: %s", docId));

                phaseTimes = Connector.phaseTimeFromQueueItem(queueItem);
                if (phaseTimes != null) {
                    for (String phaseTime : phaseTimes) {
                        phaseName = Connector.phaseFromPhaseTime(phaseTime);
                        msTime = Connector.timeFromPhaseTime(phaseTime);
                        if (StringUtils.equals(phaseName, Connector.PHASE_EXTRACT)) {
                            extractCount++;
                            secondsTime = msTime / MILLISECONDS_IN_A_SECOND;
                            dsExtract.addValue(secondsTime);
                        } else if (StringUtils.equals(phaseName, Connector.PHASE_TRANSFORM)) {
                            transformCount++;
                            secondsTime = msTime / MILLISECONDS_IN_A_SECOND;
                            dsTransform.addValue(secondsTime);
                        } else if (StringUtils.equals(phaseName, Connector.PHASE_PUBLISH)) {
                            publishCount++;
                            secondsTime = msTime / MILLISECONDS_IN_A_SECOND;
                            dsPublish.addValue(secondsTime);
                        }
                    }
                }
            }
        } catch (InterruptedException e) {
            queueItem = StringUtils.EMPTY;
        }
    } while (!mCrawlQueue.isPhaseComplete(Connector.PHASE_PUBLISH, queueItem));

    // Note: This is the end of the queue processing pipeline, so we will not pass on queue item markers.

    // Generate our metrics summary for the log file.

    writePhaseMetric(Connector.PHASE_EXTRACT, extractCount, dsExtract.getSum());
    writePhaseMetric(Connector.PHASE_TRANSFORM, transformCount, dsTransform.getSum());
    writePhaseMetric(Connector.PHASE_PUBLISH, publishCount, dsPublish.getSum());

    double totalTime = dsExtract.getSum() + dsTransform.getSum() + dsPublish.getSum();
    if ((publishCount > 0L) && (totalTime > 0.0))
        docsPerSecond = publishCount / totalTime;
    else
        docsPerSecond = 0.0;
    String msgStr = String.format("Total metric summary: %d documents, %.2f seconds (%.2f docs/sec avg)",
            publishCount, totalTime, docsPerSecond);
    appLogger.info(msgStr);

    appLogger.trace(mAppMgr.LOGMSG_TRACE_DEPART);
}

From source file:eagle.service.generic.ListQueryResource.java

/**
 * TODO refactor the code structure,  now it's messy
 * @param query/*  w w w  .  j a  v  a 2  s. c o  m*/
 * @param startTime
 * @param endTime
 * @param pageSize
 * @param startRowkey
 * @param treeAgg
 * @param timeSeries
 * @param intervalmin
 * @return
 */
@GET
@Produces({ MediaType.APPLICATION_JSON })
public ListQueryAPIResponseEntity listQuery(@QueryParam("query") String query,
        @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime,
        @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey,
        @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries,
        @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top,
        @QueryParam("filterIfMissing") boolean filterIfMissing, @QueryParam("parallel") int parallel,
        @QueryParam("metricName") String metricName, @QueryParam("verbose") Boolean verbose) {
    if (!EagleConfigFactory.load().isCoprocessorEnabled())
        return listQueryWithoutCoprocessor(query, startTime, endTime, pageSize, startRowkey, treeAgg,
                timeSeries, intervalmin, top, filterIfMissing, parallel, metricName, verbose);

    StopWatch watch = new StopWatch();
    watch.start();
    ListQueryAPIResponseEntity result = new ListQueryAPIResponseEntity();
    try {
        validateQueryParameters(startRowkey, pageSize);

        // 1. Compile query to parse parameters and HBase Filter
        ListQueryCompiler comp = new ListQueryCompiler(query, filterIfMissing);
        String serviceName = comp.serviceName();

        SearchCondition condition = new SearchCondition();
        condition.setOutputVerbose(verbose == null || verbose);
        condition.setOutputAlias(comp.getOutputAlias());
        condition.setFilter(comp.filter());
        condition.setQueryExpression(comp.getQueryExpression());
        if (comp.sortOptions() == null && top > 0) {
            LOG.warn(
                    "Parameter \"top\" is only used for sort query! Ignore top parameter this time since it's not a sort query");
        }

        // 2. Initialize partition values if set
        // TODO: For now we don't support one query to query multiple partitions. In future 
        // if partition is defined for the entity, internally We need to spawn multiple
        // queries and send one query for each search condition for each partition
        final List<String[]> partitionValues = comp.getQueryPartitionValues();
        if (partitionValues != null) {
            condition.setPartitionValues(Arrays.asList(partitionValues.get(0)));
        }

        // 3. Set time range if it's timeseries service
        EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName);
        if (ed.isTimeSeries()) {
            // TODO check timestamp exists for timeseries or topology data
            condition.setStartTime(startTime);
            condition.setEndTime(endTime);
        }

        // 4. Set HBase start scanning rowkey if given
        condition.setStartRowkey(startRowkey);

        // 5. Set page size
        condition.setPageSize(pageSize);

        // 6. Generate output,group-by,aggregated fields
        List<String> outputFields = comp.outputFields();
        List<String> groupbyFields = comp.groupbyFields();
        List<String> aggregateFields = comp.aggregateFields();
        Set<String> filterFields = comp.getFilterFields();

        // Start to generate output fields list {
        condition.setOutputAll(comp.isOutputAll());
        if (outputFields == null)
            outputFields = new ArrayList<String>();
        if (comp.hasAgg()) {
            if (groupbyFields != null)
                outputFields.addAll(groupbyFields);
            if (aggregateFields != null)
                outputFields.addAll(aggregateFields);
            if (GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(serviceName)
                    && !outputFields.contains(GenericMetricEntity.VALUE_FIELD)) {
                outputFields.add(GenericMetricEntity.VALUE_FIELD);
            }
        }
        if (filterFields != null)
            outputFields.addAll(filterFields);
        condition.setOutputFields(outputFields);
        if (comp.isOutputAll()) {
            LOG.info("Output fields: ALL");
        } else {
            LOG.info("Output fields: " + StringUtils.join(outputFields, ","));
        }
        // } END

        // 7. Build GenericQuery
        GenericQuery reader = GenericQueryBuilder.select(outputFields).from(serviceName, metricName)
                .where(condition)
                .groupBy(comp.hasAgg(), groupbyFields, comp.aggregateFunctionTypes(), aggregateFields)
                .timeSeries(timeSeries, intervalmin).treeAgg(treeAgg)
                .orderBy(comp.sortOptions(), comp.sortFunctions(), comp.sortFields()).top(top)
                .parallel(parallel).build();

        // 8. Fill response object
        List entities = reader.result();
        result.setObj(entities);
        result.setTotalResults(entities.size());
        result.setSuccess(true);
        result.setLastTimestamp(reader.getLastTimestamp());
        result.setFirstTimestamp(reader.getFirstTimeStamp());
    } catch (Exception ex) {
        LOG.error("Fail executing list query", ex);
        result.setException(EagleExceptionWrapper.wrap(ex));
        result.setSuccess(false);
        return result;
    } finally {
        watch.stop();
        result.setElapsedms(watch.getTime());
    }
    LOG.info("Query done " + watch.getTime() + " ms");
    return result;
}

From source file:com.gst.infrastructure.security.filter.TenantAwareTenantIdentifierFilter.java

@Override
public void doFilter(final ServletRequest req, final ServletResponse res, final FilterChain chain)
        throws IOException, ServletException {

    final HttpServletRequest request = (HttpServletRequest) req;
    final HttpServletResponse response = (HttpServletResponse) res;

    final StopWatch task = new StopWatch();
    task.start();/*from   w w w.ja  va  2s . c  om*/

    try {

        // allows for Cross-Origin
        // Requests (CORs) to be performed against the platform API.
        response.setHeader("Access-Control-Allow-Origin", "*");
        response.setHeader("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS");
        final String reqHead = request.getHeader("Access-Control-Request-Headers");

        if (null != reqHead && !reqHead.isEmpty()) {
            response.setHeader("Access-Control-Allow-Headers", reqHead);
        }

        if (!"OPTIONS".equalsIgnoreCase(request.getMethod())) {

            String tenantIdentifier = request.getHeader(this.tenantRequestHeader);
            if (org.apache.commons.lang.StringUtils.isBlank(tenantIdentifier)) {
                tenantIdentifier = request.getParameter("tenantIdentifier");
            }

            if (tenantIdentifier == null && this.exceptionIfHeaderMissing) {
                throw new InvalidTenantIdentiferException(
                        "No tenant identifier found: Add request header of '" + this.tenantRequestHeader
                                + "' or add the parameter 'tenantIdentifier' to query string of request URL.");
            }

            String pathInfo = request.getRequestURI();
            boolean isReportRequest = false;
            if (pathInfo != null && pathInfo.contains("report")) {
                isReportRequest = true;
            }
            final FineractPlatformTenant tenant = this.basicAuthTenantDetailsService
                    .loadTenantById(tenantIdentifier, isReportRequest);

            ThreadLocalContextUtil.setTenant(tenant);
            String authToken = request.getHeader("Authorization");

            if (authToken != null && authToken.startsWith("bearer ")) {
                ThreadLocalContextUtil.setAuthToken(authToken.replaceFirst("bearer ", ""));
            }

            if (!firstRequestProcessed) {
                final String baseUrl = request.getRequestURL().toString().replace(request.getRequestURI(),
                        request.getContextPath() + apiUri);
                System.setProperty("baseUrl", baseUrl);

                final boolean ehcacheEnabled = this.configurationDomainService.isEhcacheEnabled();
                if (ehcacheEnabled) {
                    this.cacheWritePlatformService.switchToCache(CacheType.SINGLE_NODE);
                } else {
                    this.cacheWritePlatformService.switchToCache(CacheType.NO_CACHE);
                }
                TenantAwareTenantIdentifierFilter.firstRequestProcessed = true;
            }
            chain.doFilter(request, response);
        }
    } catch (final InvalidTenantIdentiferException e) {
        // deal with exception at low level
        SecurityContextHolder.getContext().setAuthentication(null);

        response.addHeader("WWW-Authenticate", "Basic realm=\"" + "Fineract Platform API" + "\"");
        response.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage());
    } finally {
        task.stop();
        final PlatformRequestLog log = PlatformRequestLog.from(task, request);
        logger.info(this.toApiJsonSerializer.serialize(log));
    }

}

From source file:com.auditbucket.test.functional.TestForceDuplicateRlx.java

@Test
public void uniqueChangeRLXUnderLoad() throws Exception {
    logger.info("uniqueChangeRLXUnderLoad started");
    SecurityContextHolder.getContext().setAuthentication(authMike);
    regService.registerSystemUser(new RegistrationBean("TestTrack", mike, "bah").setIsUnique(false));

    int auditMax = 10;
    int logMax = 10;
    int fortress = 1;
    String simpleJson = "{\"who\":";
    ArrayList<Long> list = new ArrayList<>();

    logger.info("FortressCount: " + fortressMax + " AuditCount: " + auditMax + " LogCount: " + logMax);
    logger.info(//from  w  w  w. ja v a 2 s.  co m
            "We will be expecting a total of " + (auditMax * logMax * fortressMax) + " messages to be handled");

    StopWatch watch = new StopWatch();
    watch.start();
    double splitTotals = 0;
    long totalRows = 0;
    int auditSleepCount; // Discount all the time we spent sleeping

    DecimalFormat f = new DecimalFormat("##.000");

    while (fortress <= fortressMax) {
        String fortressName = "bulkloada" + fortress;
        int audit = 1;
        long requests = 0;
        auditSleepCount = 0;

        Fortress iFortress = fortressService.registerFortress(new FortressInputBean(fortressName, true));
        requests++;
        logger.info("Starting run for " + fortressName);
        while (audit <= auditMax) {
            MetaInputBean aib = new MetaInputBean(iFortress.getName(), fortress + "olivia@sunnybell.com",
                    "CompanyNode", new DateTime(), "ABC" + audit);
            TrackResultBean arb = trackEP.trackHeader(aib, null, null).getBody();
            requests++;
            int log = 1;
            while (log <= logMax) {
                createLog(simpleJson, aib, arb, log);
                requests++;
                log++;
            } // Logs created
            audit++;
        } // Audit headers finished with
        watch.split();
        double fortressRunTime = (watch.getSplitTime() - auditSleepCount) / 1000d;
        logger.info("*** " + iFortress.getName() + " took " + fortressRunTime + "  avg processing time for ["
                + requests + "] RPS= " + f.format(fortressRunTime / requests) + ". Requests per second "
                + f.format(requests / fortressRunTime));

        splitTotals = splitTotals + fortressRunTime;
        totalRows = totalRows + requests;
        watch.reset();
        watch.start();
        list.add(iFortress.getId());
        fortress++;
    }

    logger.info("*** Created data set in " + f.format(splitTotals) + " fortress avg = "
            + f.format(splitTotals / fortressMax) + " avg processing time per request "
            + f.format(splitTotals / totalRows) + ". Requests per second " + f.format(totalRows / splitTotals));
    watch.reset();
}

From source file:com.liferay.portal.search.solr.internal.SolrIndexSearcher.java

@Override
public Hits search(SearchContext searchContext, Query query) throws SearchException {

    StopWatch stopWatch = new StopWatch();

    stopWatch.start();/*w  w w  .  ja  va  2 s  .  c om*/

    try {
        int total = (int) searchCount(searchContext, query);

        int start = searchContext.getStart();
        int end = searchContext.getEnd();

        if ((start == QueryUtil.ALL_POS) && (end == QueryUtil.ALL_POS)) {
            start = 0;
            end = total;
        }

        int[] startAndEnd = SearchPaginationUtil.calculateStartAndEnd(start, end, total);

        start = startAndEnd[0];
        end = startAndEnd[1];

        Hits hits = doSearchHits(searchContext, query, start, end);

        hits.setStart(stopWatch.getStartTime());

        return hits;
    } catch (Exception e) {
        if (_log.isWarnEnabled()) {
            _log.warn(e, e);
        }

        if (!_swallowException) {
            throw new SearchException(e.getMessage(), e);
        }

        return new HitsImpl();
    } finally {
        if (_log.isInfoEnabled()) {
            stopWatch.stop();

            _log.info("Searching " + query.toString() + " took " + stopWatch.getTime() + " ms");
        }
    }
}

From source file:au.id.hazelwood.xmltvguidebuilder.grabber.Grabber.java

private void addListing(ChannelListings channelListings, Config config, ChannelConfig channelConfig,
        String channelTag, DateTime from, DateTime to) {
    StopWatch watch = new StopWatch();
    watch.start();/*from ww  w  .  j  a  va 2  s .  co m*/
    List<Event> events = getEvents(config, channelTag, from, to);
    LOGGER.debug("Found {} events between {} and {}", events.size(), from, to);
    for (Event event : events) {
        ProgrammeDetail programmeDetails;
        LOGGER.debug("Processing event id {}", event.getEventId());
        DateTime scheduleDate = new DateTime(event.getScheduledDate());
        if (scheduleDate.plusMinutes(event.getDuration()).isAfterNow()
                && scheduleDate.isBefore(from.plusDays(config.getSynopsis()))) {
            LOGGER.debug("Fetching detailed synopsis for {}", event.getEventId());
            try {
                EventDetails eventDetails = getForObject(DETAILS_URI, EventDetails.class, event.getEventId(),
                        config.getRegionId());
                programmeDetails = createProgrammeDetails(eventDetails.getEvent());
            } catch (Exception e) {
                LOGGER.debug("Failed to get detailed synopsis. Using basic details instead.");
                programmeDetails = createProgrammeDetails(event);
            }
        } else {
            programmeDetails = createProgrammeDetails(event);
        }
        channelListings.addProgram(channelConfig.getId(), programmeDetails);
    }
    watch.stop();
    String numberOfPrograms = String.valueOf(channelListings.getNumberOfPrograms(channelConfig.getId()));
    LOGGER.info("{} {} events [took {}]", rightPad(channelConfig.getId() + " - " + channelConfig.getName(), 40),
            leftPad(numberOfPrograms, 4), formatDurationWords(watch.getTime()));
}

From source file:com.microsoft.exchange.integration.ImpersonationClientIntegrationTest.java

/**
 * Similar to {@link #testGetUserAvailability()}, but uses {@link FindItem}.
 * /*from  w w w .ja va 2 s .  c  o m*/
 * @throws JAXBException
 */
@Test
public void testFindMoreDetailedItemCalendarType() throws JAXBException {
    initializeCredentials();
    FindItem request = constructFindItemRequest(DateHelper.makeDate("2012-10-12"),
            DateHelper.makeDate("2012-10-13"), emailAddress);
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    FindItemResponse response = ewsClient.findItem(request);
    String captured = capture(response);
    log.info("testFindMoreDetailedItemCalendarType response: " + captured);
    stopWatch.stop();
    log.debug("FindItem request completed in " + stopWatch);
    Assert.assertNotNull(response);
    Assert.assertEquals(expectedEventCount, response.getResponseMessages()
            .getCreateItemResponseMessagesAndDeleteItemResponseMessagesAndGetItemResponseMessages().size());
}

From source file:com.mothsoft.alexis.engine.predictive.OpenNLPMaxentModelTrainerTask.java

@Transactional
@Override/*from   w  w  w .j a va  2  s .c  o  m*/
public void execute() {
    final Long modelId = findAndMark();

    if (modelId != null) {
        logger.info(String.format("Training model %d", modelId));

        final StopWatch stopWatch = new StopWatch();
        stopWatch.start();
        final Model model = this.modelDao.get(modelId);
        train(model);
        stopWatch.stop();

        logger.info(String.format("Training model %d took: %s", modelId, stopWatch.toString()));
    }
}