Example usage for org.springframework.util StopWatch StopWatch

List of usage examples for org.springframework.util StopWatch StopWatch

Introduction

In this page you can find the example usage for org.springframework.util StopWatch StopWatch.

Prototype

public StopWatch() 

Source Link

Document

Construct a new StopWatch .

Usage

From source file:com.surevine.alfresco.audit.SpringAuditFilterBean.java

/**
 * {@inheritDoc}//from ww w . ja v  a  2  s.  com
 */
public void doFilter(final ServletRequest request, final ServletResponse response,
        final FilterChain filterChain) throws IOException, ServletException {

    HttpServletRequest httpServletRequest = null;
    BufferedHttpServletResponse httpServletResponse = null;

    if (request instanceof HttpServletRequest && response instanceof HttpServletResponse) {
        httpServletRequest = (HttpServletRequest) request;
    } else {
        throw new ServletException(
                new IllegalArgumentException("Invalid request or response parameter provided."));
    }

    String method = httpServletRequest.getMethod();

    // Only override current HttpServletRequest with custom implementation if post data
    // will be read.
    if (MULTI_READ_HTTP_METHODS.contains(method)) {
        httpServletRequest = new MultiReadHttpServletRequest(httpServletRequest);
    }

    // Now iterate over each of the installed listeners searching to see if, firstly the http methods match
    // and secondly that an event is fired.
    for (AuditEventListener listener : listeners) {

        if (listener.getMethod().equals(method) && listener.isEventFired(httpServletRequest)) {

            // Need to allow the output to be read twice from the response
            httpServletResponse = new BufferedHttpServletResponse((HttpServletResponse) response);

            List<Auditable> itemsToAudit = null;
            String username = null;
            try {

                HttpSession sess = httpServletRequest.getSession();
                SessionUser user = (SessionUser) sess.getAttribute(AuthenticationHelper.AUTHENTICATION_USER);
                if (user != null) {
                    username = user.getUserName();
                }

                // Used to track total processing time for the request being audited
                StopWatch timer = new StopWatch();

                // There are certain listener types where we have to construct the audit items prior to passing to
                // the filter
                // chain.
                if (listener instanceof GetAuditEventListener
                        || listener instanceof MultiDocumentDeleteAuditEventListener
                        || listener instanceof SafeMoveDocumentAuditEventListener
                        || listener instanceof UnlockDocumentAuditEventListener
                        || listener instanceof UndeleteAuditEventListener
                        || listener instanceof ImmediateArchiveAuditEventListener) {
                    itemsToAudit = listener.populateAuditItems(httpServletRequest, httpServletResponse);

                    // Continue with the filter chain
                    timer.start();
                    filterChain.doFilter(httpServletRequest, httpServletResponse);
                    timer.stop();

                    // Calling finish on the response will release the output stream back to the client.
                    httpServletResponse.finish();

                } else {
                    // Populate the audit item after the filter chain has been run
                    timer.start();
                    filterChain.doFilter(httpServletRequest, httpServletResponse);
                    timer.stop();

                    // Calling finish on the response will release the output stream back to the client.
                    httpServletResponse.finish();

                    itemsToAudit = listener.populateAuditItems(httpServletRequest, httpServletResponse);

                }

                for (Auditable audit : itemsToAudit) {
                    listener.decideSuccess(httpServletResponse, audit);
                    audit.setUser(username);
                    audit.setTimeSpent(timer.getTotalTimeMillis());
                    repository.audit(audit);
                }

            } catch (JSONException e) {
                logger.error("JSONException caught during audit, " + e.getMessage());
                throw new ServletException(e);
            } catch (AlfrescoRuntimeException alfrescoRuntime) {
                logger.error("AlfrescoRuntimeException caught during audit, " + alfrescoRuntime.getMessage());
                throw new ServletException(alfrescoRuntime);
            } catch (DataIntegrityViolationException e) {
                logger.error("Data Integrity Exception caught during audit, " + e.getMessage());
                throw new ServletException(
                        "A Data Integreity Violation occured.  Please see the logs for more information");

            } catch (Exception e) {

                logger.error("Exception caught during audit " + e.getMessage());
                throw new ServletException(e);
            }

            return;
        }
    }

    // If we fall out here there was no auditable event so simply complete the filter chain.
    // And we won't have tinkered with the response object so just add the servletresponse
    // as the parameter.
    filterChain.doFilter(httpServletRequest, response);
}

From source file:org.dd4t.core.filters.impl.DefaultLinkResolverFilter.java

protected void resolveComponentLinkField(ComponentLinkField componentLinkField) {

    StopWatch stopWatch = null;//from  w  ww .j  a v  a2 s  .c o m
    if (logger.isDebugEnabled()) {
        stopWatch = new StopWatch();
        stopWatch.start();
    }
    List<Object> compList = componentLinkField.getValues();

    for (Object component : compList) {
        resolveComponent((GenericComponent) component);
    }

    if (logger.isDebugEnabled()) {
        stopWatch.stop();
        logger.debug("Resolved componentLinkField '" + componentLinkField.getName() + "' in "
                + stopWatch.getTotalTimeMillis() + " ms.");
    }

}

From source file:edu.isistan.carcha.lsa.TraceabilityComparator.java

/**
 * Run.//www .  j av  a  2  s .  co  m
 *
 * @param builder the Vector builder to construct vector using the sspace
 * @param reqConcerns the requirement concerns
 * @param archConcerns the architectural concerns
 */
@SuppressWarnings("unused")
private void run(DocumentVectorBuilder builder, List<String> reqConcerns, List<String> archConcerns) {
    StopWatch sw = new StopWatch();
    sw.start("Start the traceability comparation");
    init();
    int i = 0;
    int count = reqConcerns.size();
    this.untracedCount = 0;
    for (String lineForVector1 : reqConcerns) {
        Entity req = Entity.buildFromString(lineForVector1, NodeType.CC);
        addNode(req);
        //create vector 1
        DoubleVector vector1 = new CompactSparseVector();
        vector1 = builder.buildVector(new BufferedReader(new StringReader(req.getFormattedLabel())), vector1);
        boolean hasTrace = false;
        for (String lineForVector2 : archConcerns) {
            Entity arch = Entity.buildFromString(lineForVector2, NodeType.DD);
            addNode(arch);
            //create vector 2
            DoubleVector vector2 = new CompactSparseVector();
            vector2 = builder.buildVector(new BufferedReader(new StringReader(arch.getFormattedLabel())),
                    vector2);

            //Math round is WAY faster than DoubleFormat
            Double linkWeight = ((double) Math.round(Similarity.cosineSimilarity(vector1, vector2) * 1000)
                    / 1000);

            //add the edge between the two nodes including the calculated weight
            if (linkWeight > threshold) {
                addEdge(req, arch, linkWeight);
                hasTrace = true;
            }
        }
        if (!hasTrace) {
            this.untracedCount++;
        }
    }
    sw.stop();
    logger.info(sw.shortSummary());
    String filename = saveGraph();
}

From source file:org.nebulaframework.grid.Grid.java

/**
 * Starts a {@link GridNode} with default settings, read from
 * default properties file.//from ww  w .  ja  va 2s . c  om
 * 
 * @param useConfigDiscovery indicates whether to use information
 * from configuration to discover
 * 
 * @return GridNode
 * 
 * @throws IllegalStateException if a Grid Member (Cluster / Node) has
 * already started with in the current VM. Nebula supports only one Grid
 * Member per VM.
 */
public synchronized static GridNode startGridNode(boolean useConfigDiscovery) throws IllegalStateException {

    if (isInitialized()) {
        // A Grid Member has already started in this VM
        throw new IllegalStateException("A Grid Memeber Already Started in VM");
    }

    initializeDefaultExceptionHandler();

    StopWatch sw = new StopWatch();

    try {

        sw.start();

        // Set Security Manager
        System.setSecurityManager(new SecurityManager());

        // Detect Configuration
        Properties config = ConfigurationSupport.detectNodeConfiguration();

        log.info("GridNode Attempting Discovery...");

        // Discover Cluster If Needed
        GridNodeDiscoverySupport.discover(config, useConfigDiscovery);

        checkJMSBroker(config.getProperty(ConfigurationKeys.CLUSTER_SERVICE.value()));

        log.debug("Starting up Spring Container...");

        applicationContext = new NebulaApplicationContext(GRIDNODE_CONTEXT, config);

        log.debug("Spring Container Started");

        node = true;

        sw.stop();

        log.info("GridNode Started Up. " + sw.getLastTaskTimeMillis() + " ms");

        return (GridNode) applicationContext.getBean("localNode");
    } finally {
        if (sw.isRunning()) {
            sw.stop();
        }
    }
}

From source file:org.dd4t.core.filters.impl.DefaultLinkResolverFilter.java

protected void resolveXhtmlField(XhtmlField xhtmlField) {

    StopWatch stopWatch = null;//www . ja va2s  .  c o  m
    if (logger.isDebugEnabled()) {
        stopWatch = new StopWatch();
        stopWatch.start();
    }

    List<Object> xhtmlValues = xhtmlField.getValues();
    List<String> newValues = new ArrayList<String>();

    if (useXslt) {
        // find all component links and try to resolve them
        for (Object xhtmlValue : xhtmlValues) {
            String result = xslTransformer.transformSourceFromFilesource(
                    "<ddtmproot>" + (String) xhtmlValue + "</ddtmproot>", "/resolveXhtmlWithLinks.xslt",
                    params);
            newValues.add(XSLTPattern.matcher(result).replaceAll(""));
        }
    } else {
        // find all component links and try to resolve them
        for (Object xhtmlValue : xhtmlValues) {

            Matcher m = RegExpPattern.matcher((String) xhtmlValue);

            StringBuffer sb = new StringBuffer();
            String resolvedLink = null;
            while (m.find()) {
                resolvedLink = getLinkResolver().resolve(m.group(1));
                // if not possible to resolve the link do nothing
                if (resolvedLink != null) {
                    m.appendReplacement(sb, "href=\"" + resolvedLink + "\"");
                }
            }
            m.appendTail(sb);
            newValues.add(sb.toString());
        }

    }

    xhtmlField.setTextValues(newValues);

    if (logger.isDebugEnabled()) {
        stopWatch.stop();
        logger.debug("Parsed rich text field '" + xhtmlField.getName() + "' in "
                + stopWatch.getTotalTimeMillis() + " ms.");
    }
}

From source file:com.auditbucket.client.Importer.java

static long processXMLFile(String file, AbRestClient abExporter, XmlMappable mappable, boolean simulateOnly)
        throws ParserConfigurationException, IOException, SAXException, JDOMException, DatagioException {
    try {/*w  w  w  .j a v  a  2 s. c  om*/
        long rows = 0;
        StopWatch watch = new StopWatch();
        StreamSource source = new StreamSource(file);
        XMLInputFactory xif = XMLInputFactory.newFactory();
        XMLStreamReader xsr = xif.createXMLStreamReader(source);
        mappable.positionReader(xsr);
        List<CrossReferenceInputBean> referenceInputBeans = new ArrayList<>();

        String docType = mappable.getDataType();
        watch.start();
        try {
            long then = new DateTime().getMillis();
            while (xsr.getLocalName().equals(docType)) {
                XmlMappable row = mappable.newInstance(simulateOnly);
                String json = row.setXMLData(xsr);
                MetaInputBean header = (MetaInputBean) row;
                if (!header.getCrossReferences().isEmpty()) {
                    referenceInputBeans.add(new CrossReferenceInputBean(header.getFortress(),
                            header.getCallerRef(), header.getCrossReferences()));
                    rows = rows + header.getCrossReferences().size();
                }
                LogInputBean logInputBean = new LogInputBean("system", new DateTime(header.getWhen()), json);
                header.setLog(logInputBean);
                //logger.info(json);
                xsr.nextTag();
                writeAudit(abExporter, header, mappable.getClass().getCanonicalName());
                rows++;
                if (rows % 500 == 0 && !simulateOnly)
                    logger.info("Processed {} elapsed seconds {}", rows,
                            new DateTime().getMillis() - then / 1000d);

            }
        } finally {
            abExporter.flush(mappable.getClass().getCanonicalName(), mappable.getABType());
        }
        if (!referenceInputBeans.isEmpty()) {
            logger.debug("Wrote [{}] cross references",
                    writeCrossReferences(abExporter, referenceInputBeans, "Cross References"));
        }
        return endProcess(watch, rows);

    } catch (XMLStreamException | JAXBException e1) {
        throw new IOException(e1);
    }
}

From source file:org.nebulaframework.grid.Grid.java

/**
 * Starts a Light-weight {@link GridNode} (a GridNode without
 * Job Execution Support, that is non-worker) with default
 * settings, read from default properties file.
 * //from w  w w.j a va2s  .  c  o  m
 * @param useConfigDiscovery indicates whether to use information
 * from configuration to discover
 * 
 * @param isGui indicates that the application is a GUI based
 * application and any disconnection notifications should be
 * done through message boxes.
 * 
 * @return GridNode
 * 
 * @throws IllegalStateException if a Grid Member (Cluster / Node) has
 * already started with in the current VM. Nebula supports only one Grid
 * Member per VM.
 */
public synchronized static GridNode startLightGridNode(boolean useConfigDiscovery, final boolean isGui)
        throws IllegalStateException {

    if (isInitialized()) {
        // A Grid Member has already started in this VM
        throw new IllegalStateException("A Grid Memeber Already Started in VM");
    }

    initializeDefaultExceptionHandler();

    StopWatch sw = new StopWatch();

    try {
        sw.start();

        // Set Security Manager
        System.setSecurityManager(new SecurityManager());

        Properties config = ConfigurationSupport.detectNodeConfiguration();

        log.info("GridNode Attempting Discovery...");

        // Discover Cluster If Needed
        GridNodeDiscoverySupport.discover(config, useConfigDiscovery);

        checkJMSBroker(config.getProperty(ConfigurationKeys.CLUSTER_SERVICE.value()));

        // If we reach here, connection test succeeded

        log.debug("Starting up Spring Container...");

        applicationContext = new NebulaApplicationContext(GRIDNODE_LIGHT_CONTEXT, config);

        log.debug("Spring Container Started");

        node = true;
        lightweight = true;

        sw.stop();
        log.info("GridNode Started Up. " + sw.getLastTaskTimeMillis() + " ms");

        GridNode node = (GridNode) applicationContext.getBean("localNode");
        ServiceEventsSupport.addServiceHook(new ServiceHookCallback() {

            @Override
            public void onServiceEvent(ServiceMessage message) {

                log.warn("[GridNode] Disconnected from Cluster");
                log.warn("[GridNode] Shutting Down");

                if (isGui) {
                    JOptionPane.showMessageDialog(UISupport.activeWindow(),
                            "Disconnected from Cluster, terminating VM");
                }
                System.exit(0);
            }

        }, node.getClusterId().toString(), ServiceMessageType.NODE_DISCONNECTED);

        return node;

    } finally {
        if (sw.isRunning()) {
            sw.stop();
        }
    }
}

From source file:com.auditbucket.client.Importer.java

static long processCSVFile(String file, AbRestClient abExporter, DelimitedMappable mappable, int skipCount,
        boolean simulateOnly)
        throws IOException, IllegalAccessException, InstantiationException, DatagioException {

    StopWatch watch = new StopWatch();
    DelimitedMappable row = mappable.newInstance(simulateOnly);
    int rows = 0;

    BufferedReader br;/*  w  ww.ja v a2 s  .  c o m*/
    br = new BufferedReader(new FileReader(file));
    try {
        CSVReader csvReader = new CSVReader(br, row.getDelimiter());

        String[] headerRow = null;
        String[] nextLine;
        if (mappable.hasHeader()) {
            while ((nextLine = csvReader.readNext()) != null) {
                if (!((nextLine[0].charAt(0) == '#') || nextLine[0].charAt(1) == '#')) {
                    headerRow = nextLine;
                    break;
                }
            }
        }
        watch.start();
        AbRestClient.type type = mappable.getABType();

        while ((nextLine = csvReader.readNext()) != null) {
            if (!nextLine[0].startsWith("#")) {
                rows++;
                if (rows >= skipCount) {
                    if (rows == skipCount)
                        logger.info("Starting to process from row {}", skipCount);
                    row = mappable.newInstance(simulateOnly);

                    String jsonData = row.setData(headerRow, nextLine);
                    //logger.info(jsonData);
                    if (type == AbRestClient.type.AUDIT) {
                        MetaInputBean header = (MetaInputBean) row;

                        if (!"".equals(jsonData)) {
                            jsonData = jsonData.replaceAll("[\\x00-\\x09\\x11\\x12\\x14-\\x1F\\x7F]", "");
                            LogInputBean logInputBean = new LogInputBean("system", new DateTime(), jsonData);
                            header.setLog(logInputBean);
                        } else {
                            // It's all Meta baby - no track information
                        }
                        writeAudit(abExporter, header, mappable.getClass().getCanonicalName());
                    } else {// Tag
                        if (!"".equals(jsonData)) {
                            TagInputBean tagInputBean = (TagInputBean) row;
                            logger.info(tagInputBean.toString());
                            writeTag(abExporter, tagInputBean, mappable.getClass().getCanonicalName());
                        }
                    }
                    if (rows % 500 == 0) {
                        if (!simulateOnly)
                            logger.info("Processed {} ", rows);
                    }
                }
            } else {
                if (rows % 500 == 0 && !simulateOnly)
                    logger.info("Skipping {} of {}", rows, skipCount);
            }
        }
    } finally {
        abExporter.flush(mappable.getClass().getCanonicalName(), mappable.getABType());
        br.close();
    }

    return endProcess(watch, rows);
}

From source file:com.github.totyumengr.minicubes.core.MiniCubeTest.java

@Test
public void test_5_1_Distinct_20140606() throws Throwable {

    StopWatch stopWatch = new StopWatch();
    stopWatch.start();/* w  w  w.  j  a va  2s. c  o  m*/
    Map<String, List<Integer>> filter = new HashMap<String, List<Integer>>(1);
    Map<Integer, RoaringBitmap> distinct = miniCube.distinct("postId", true, "tradeId", filter);
    stopWatch.stop();

    Assert.assertEquals(210, distinct.size());
    Assert.assertEquals(3089, distinct.get(1601).getCardinality());
    Assert.assertEquals(1825, distinct.get(1702).getCardinality());
    Assert.assertEquals(2058, distinct.get(-2).getCardinality());

    LOGGER.info(stopWatch.getTotalTimeSeconds() + " used for distinct result {}", distinct.toString());
}

From source file:com.ethlo.kfka.KfkaApplicationTests.java

@Test
public void testPerformance1() throws InterruptedException {
    kfkaManager.clearAll();//from ww  w .j av  a2s .co  m

    final int count = 10_000;
    final StopWatch sw = new StopWatch();
    sw.start("Insert1");
    for (int i = 1; i <= count; i++) {
        kfkaManager.add(new CustomKfkaMessageBuilder().userId(321).payload("otherMessage" + i)
                .timestamp(System.currentTimeMillis()).topic("bar").type("mytype").build());
    }
    sw.stop();

    sw.start("Insert2");
    for (int i = 1; i <= count; i++) {
        kfkaManager.add(new CustomKfkaMessageBuilder().userId(123).payload("myMessage" + 1)
                .timestamp(System.currentTimeMillis()).topic("bar").type("mytype").build());
    }
    sw.stop();

    sw.start("Query");
    final CollectingListener collListener = new CollectingListener();
    kfkaManager.addListener(collListener,
            new KfkaPredicate().topic("bar").relativeOffset(-(count + 10)).addPropertyMatch("userId", 123));
    sw.stop();
    assertThat(collListener.getReceived()).hasSize(count);
    assertThat(collListener.getReceived().get(0).getId()).isEqualTo(count + 1);
    assertThat(collListener.getReceived().get(count - 1).getId()).isEqualTo(count + count);
    logger.info("Timings: {}", sw);
}