List of usage examples for org.springframework.util StopWatch prettyPrint
public String prettyPrint()
From source file:fr.acxio.tools.agia.alfresco.configuration.SimpleNodeFactoryTest.java
@Test public void testEvaluationGetNodes() throws Exception { FolderDefinition aDefaultNodeDefinition = NodeFactoryUtils.createFolderDefinition("test", null, null, null); FolderDefinition aFolder1NodeDefinition = NodeFactoryUtils.createFolderDefinition("@{#in['level1']}", null, "@{#in['level1'].equals('s1')}", "@{#in['customfoldertype']}"); FolderDefinition aFolder2NodeDefinition = NodeFactoryUtils.createFolderDefinition("@{#in['level2']}", null, "@{#in['level1'].equals('s2')}", null); FolderDefinition aFolder3NodeDefinition = NodeFactoryUtils.createFolderDefinition("@{#in['date']}", null, "@{#in['level2'].equals('s2')}", null); aDefaultNodeDefinition.addFolder(aFolder1NodeDefinition); aDefaultNodeDefinition.addFolder(aFolder2NodeDefinition); aDefaultNodeDefinition.addFolder(aFolder3NodeDefinition); DocumentDefinition aDocument1Definition = NodeFactoryUtils.createDocumentDefinition("my_@{#in['content1']}", "Some @{#in['content1']} title", "@{#in['path1']}", "@{#in['encoding']}", "@{#in['mimetype']}", "@{#in['customdoctype']}"); DocumentDefinition aDocument2Definition = NodeFactoryUtils.createDocumentDefinition("my_@{#in['content2']}", "Some @{#in['content2']} title", "file:/otherpath/@{#in['filename2']}", null, null, null); DocumentDefinition aDocument3Definition = NodeFactoryUtils.createDocumentDefinition("doc3", null, null, null, null, null);//from w w w . ja va2s. c o m aFolder1NodeDefinition.addDocument(aDocument1Definition); aFolder1NodeDefinition.addDocument(aDocument2Definition); aFolder2NodeDefinition.addDocument(aDocument3Definition); StopWatch aStopWatch = new StopWatch("testEvaluationGetNodes"); aStopWatch.start("Create a node list"); SimpleNodeFactory aNodeFactory = new SimpleNodeFactory(); aNodeFactory.setNamespaceContext(namespaceContext); aNodeFactory.setNodeDefinition(aDefaultNodeDefinition); NodeList aNodeList = aNodeFactory.getNodes(evaluationContext); aStopWatch.stop(); assertEquals(5, aNodeList.size()); assertEquals("/cm:test", ((Folder) aNodeList.get(0)).getPath()); assertEquals("/cm:test/custom:s1", ((Folder) aNodeList.get(1)).getPath()); assertEquals("/cm:test/custom:s1/custom:my_doc1", ((Document) aNodeList.get(2)).getPath()); assertEquals("/cm:test/custom:s1/cm:my_doc2", ((Document) aNodeList.get(3)).getPath()); assertEquals("/cm:test/cm:_x0032_012-07-05", ((Folder) aNodeList.get(4)).getPath()); // TODO : check the encoding assertEquals("Some doc1 title", NodeFactoryUtils .getPropertyValues(aNodeList.get(2), "{http://www.alfresco.org/model/content/1.0}title").get(0)); assertEquals("Some doc2 title", NodeFactoryUtils .getPropertyValues(aNodeList.get(3), "{http://www.alfresco.org/model/content/1.0}title").get(0)); assertEquals("file:/somepath/content1.pdf", ((Document) aNodeList.get(2)).getContentPath()); assertEquals("file:/otherpath/content2.pdf", ((Document) aNodeList.get(3)).getContentPath()); assertEquals("UTF-8", ((Document) aNodeList.get(2)).getEncoding()); assertEquals("application/pdf", ((Document) aNodeList.get(2)).getMimeType()); assertEquals("{http://custom}doc", ((Document) aNodeList.get(2)).getType().toString()); assertEquals("{http://custom}folder", ((Folder) aNodeList.get(1)).getType().toString()); System.out.println(aStopWatch.prettyPrint()); }
From source file:sample.contact.ClientApplication.java
public void invokeContactService(Authentication authentication, int nrOfCalls) { StopWatch stopWatch = new StopWatch(nrOfCalls + " ContactService call(s)"); Map<String, ContactService> contactServices = this.beanFactory.getBeansOfType(ContactService.class, true, true);// www.j a v a 2 s .c om SecurityContextHolder.getContext().setAuthentication(authentication); for (String beanName : contactServices.keySet()) { Object object = this.beanFactory.getBean("&" + beanName); try { System.out.println("Trying to find setUsername(String) method on: " + object.getClass().getName()); Method method = object.getClass().getMethod("setUsername", new Class[] { String.class }); System.out.println("Found; Trying to setUsername(String) to " + authentication.getPrincipal()); method.invoke(object, authentication.getPrincipal()); } catch (NoSuchMethodException ignored) { System.out.println("This client proxy factory does not have a setUsername(String) method"); } catch (IllegalAccessException ignored) { ignored.printStackTrace(); } catch (InvocationTargetException ignored) { ignored.printStackTrace(); } try { System.out.println("Trying to find setPassword(String) method on: " + object.getClass().getName()); Method method = object.getClass().getMethod("setPassword", new Class[] { String.class }); method.invoke(object, authentication.getCredentials()); System.out.println("Found; Trying to setPassword(String) to " + authentication.getCredentials()); } catch (NoSuchMethodException ignored) { System.out.println("This client proxy factory does not have a setPassword(String) method"); } catch (IllegalAccessException ignored) { } catch (InvocationTargetException ignored) { } ContactService remoteContactService = contactServices.get(beanName); System.out.println("Calling ContactService '" + beanName + "'"); stopWatch.start(beanName); List<Contact> contacts = null; for (int i = 0; i < nrOfCalls; i++) { contacts = remoteContactService.getAll(); } stopWatch.stop(); if (contacts.size() != 0) { for (Contact contact : contacts) { System.out.println("Contact: " + contact); } } else { System.out.println("No contacts found which this user has permission to"); } System.out.println(); System.out.println(stopWatch.prettyPrint()); } SecurityContextHolder.clearContext(); }
From source file:org.alfresco.repo.search.impl.solr.DbOrIndexSwitchingQueryLanguage.java
private ResultSet executeHybridQuery(SearchParameters searchParameters, ADMLuceneSearcherImpl admLuceneSearcher) { if (indexQueryLanguage == null || dbQueryLanguage == null) { throw new QueryModelException("Both index and DB query language required for hybrid search [index=" + indexQueryLanguage + ", DB=" + dbQueryLanguage + "]"); }/*from w ww . j av a 2s.c o m*/ StopWatch stopWatch = new StopWatch("hybrid search"); if (logger.isDebugEnabled()) { logger.debug( "Hybrid search, using SOLR query: " + dbQueryLanguage.getName() + " for " + searchParameters); } stopWatch.start("index query"); ResultSet indexResults = indexQueryLanguage.executeQuery(searchParameters, admLuceneSearcher); stopWatch.stop(); if (logger.isDebugEnabled()) { logger.debug("SOLR query returned " + indexResults.length() + " results in " + stopWatch.getLastTaskTimeMillis() + "ms"); } // TODO: if the results are up-to-date, then nothing more to do - return the results. if (!(indexResults instanceof SolrJSONResultSet)) { if (logger.isWarnEnabled()) { logger.warn("Hybrid search can only use database when SOLR is also in use. " + "Skipping DB search, returning results from index."); } return indexResults; } long lastTxId = ((SolrJSONResultSet) indexResults).getLastIndexedTxId(); searchParameters.setSinceTxId(lastTxId); if (logger.isDebugEnabled()) { logger.debug( "Hybrid search, using DB query: " + dbQueryLanguage.getName() + " for " + searchParameters); } stopWatch.start("database query"); ResultSet dbResults = dbQueryLanguage.executeQuery(searchParameters, admLuceneSearcher); stopWatch.stop(); if (logger.isDebugEnabled()) { logger.debug("DB query returned " + dbResults.length() + " results in " + stopWatch.getLastTaskTimeMillis() + "ms"); } // Merge result sets List<ChildAssociationRef> childAssocs = new ArrayList<>(); NodeParameters nodeParameters = new NodeParameters(); nodeParameters.setFromTxnId(lastTxId + 1); // TODO: setToTxnId(null) when SolrDAO behaviour is fixed. nodeParameters.setToTxnId(Long.MAX_VALUE); stopWatch.start("get changed nodes"); List<Node> changedNodeList = solrDao.getNodes(nodeParameters, null); stopWatch.stop(); if (logger.isDebugEnabled()) { logger.debug("Nodes changed since last indexed transaction (ID " + lastTxId + ") = " + changedNodeList.size() + " (took " + stopWatch.getLastTaskTimeMillis() + "ms)"); } stopWatch.start("merge result sets"); Set<NodeRef> nodeRefs = new HashSet<>(changedNodeList.size()); for (Node n : changedNodeList) { nodeRefs.add(n.getNodeRef()); } // Only use the SOLR results for nodes that haven't changed since indexing. for (ChildAssociationRef car : indexResults.getChildAssocRefs()) { if (!nodeRefs.contains(car.getChildRef())) { childAssocs.add(car); } } // Merge in all the database results. childAssocs.addAll(dbResults.getChildAssocRefs()); ResultSet results = new ChildAssocRefResultSet(nodeService, childAssocs); stopWatch.stop(); // merge result sets if (logger.isDebugEnabled()) { String stats = String.format("SOLR=%d, DB=%d, total=%d", indexResults.length(), dbResults.length(), results.length()); logger.debug("Hybrid search returning combined results with counts: " + stats); logger.debug(stopWatch.prettyPrint()); } return results; }
From source file:org.hyperic.hq.ui.action.portlet.autoDisc.ViewAction.java
public ActionForward execute(ComponentContext context, ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { HttpSession session = request.getSession(); WebUser user = RequestUtils.getWebUser(request); int sessionId = user.getSessionId().intValue(); AIQueueForm queueForm = (AIQueueForm) form; PageControl page = new PageControl(); DashboardConfig dashConfig = dashboardManager .findDashboard((Integer) session.getAttribute(Constants.SELECTED_DASHBOARD_ID), user, authzBoss); ConfigResponse dashPrefs = dashConfig.getConfig(); page.setPagesize(Integer.parseInt(dashPrefs.getValue(".dashContent.autoDiscovery.range"))); StopWatch watch = new StopWatch(); if (log.isDebugEnabled()) { watch.start("getQueue"); }//from w w w . j a v a2 s . c o m // always show ignored platforms and already-processed platforms PageList<AIPlatformValue> aiQueue = aiBoss.getQueue(sessionId, true, false, true, page); if (log.isDebugEnabled()) { watch.stop(); log.debug(watch.prettyPrint()); } List<AIPlatformWithStatus> queueWithStatus = getStatuses(sessionId, aiQueue); context.putAttribute("resources", queueWithStatus); // If the queue is empty, check to see if there are ANY agents // defined in HQ inventory. if (aiQueue.size() == 0) { int agentCnt = appdefBoss.getAgentCount(sessionId); request.setAttribute("hasNoAgents", new Boolean(agentCnt == 0)); } // check every box for queue Integer[] platformsToProcess = new Integer[aiQueue.size()]; List<Integer> serversToProcess = new ArrayList<Integer>(); AIPlatformValue aiPlatform; AIServerValue[] aiServers; for (int i = 0; i < platformsToProcess.length; i++) { aiPlatform = aiQueue.get(i); platformsToProcess[i] = aiPlatform.getId(); // Add all non-virtual servers on this platform aiServers = aiPlatform.getAIServerValues(); for (int j = 0; j < aiServers.length; j++) { if (!BizappUtils.isAutoApprovedServer(sessionId, appdefBoss, aiServers[j])) { serversToProcess.add(aiServers[j].getId()); } } } queueForm.setPlatformsToProcess(platformsToProcess); queueForm.setServersToProcess(serversToProcess); // clean out the return path SessionUtils.resetReturnPath(request.getSession()); // Check for previous error // First, check for ignore error. Object ignoreErr = request.getSession().getAttribute(Constants.IMPORT_IGNORE_ERROR_ATTR); if (ignoreErr != null) { ActionMessage err = new ActionMessage("dash.autoDiscovery.import.ignore.Error"); RequestUtils.setError(request, err, ActionMessages.GLOBAL_MESSAGE); // Only show the error once request.getSession().setAttribute(Constants.IMPORT_IGNORE_ERROR_ATTR, null); } // Check for import exception Exception exc = (Exception) request.getSession().getAttribute(Constants.IMPORT_ERROR_ATTR); if (exc != null) { request.getSession().removeAttribute(Constants.IMPORT_ERROR_ATTR); log.error("Failed to approve AI report", exc); ActionMessage err = new ActionMessage("dash.autoDiscovery.import.Error", exc); RequestUtils.setError(request, err, ActionMessages.GLOBAL_MESSAGE); } return null; }
From source file:org.jahia.bin.ErrorFileDumperTest.java
@Test public void testHighLoadDeactivation() throws InterruptedException { logger.info("Starting testHighLoadDeactivation test..."); RequestLoadAverage.RequestCountProvider requestCountProvider = new RequestLoadAverage.RequestCountProvider() { public long getRequestCount() { return 100; }/*from w w w . java 2 s.c o m*/ }; RequestLoadAverage requestLoadAverage = new RequestLoadAverage("requestLoadAverage", requestCountProvider); requestLoadAverage.start(); logger.info("Waiting for load average to reach 10..."); while (requestLoadAverage.getOneMinuteLoad() < 10.0) { Thread.sleep(500); } StopWatch stopWatch = new StopWatch("testHighLoadDeactivation"); stopWatch.start(Thread.currentThread().getName() + " generating error dumps"); int fileCountBeforeTest = 0; if (todaysDirectory.exists()) { File[] files = todaysDirectory.listFiles(); fileCountBeforeTest = (files == null ? 0 : files.length); } ErrorFileDumper.setHighLoadBoundary(10.0); ErrorFileDumper.start(); generateExceptions(); stopWatch.stop(); long totalTime = stopWatch.getTotalTimeMillis(); double averageTime = ((double) totalTime) / ((double) LOOP_COUNT); logger.info("Milliseconds per exception = " + averageTime); logger.info(stopWatch.prettyPrint()); ErrorFileDumper.shutdown(10000L); RequestLoadAverage.getInstance().stop(); int fileCountAfterTest = 0; if (todaysDirectory.exists()) { File[] files = todaysDirectory.listFiles(); fileCountAfterTest = (files == null ? 0 : files.length); } Assert.assertEquals("File count should stay the same because high load deactivates file dumping !", fileCountBeforeTest, fileCountAfterTest); requestLoadAverage = new RequestLoadAverage("requestLoadAverage"); }
From source file:org.jahia.bin.ErrorFileDumperTest.java
@Test public void testDumperInSequence() throws InterruptedException { logger.info("Starting testDumperInSequence test..."); StopWatch stopWatch = new StopWatch("testDumperInSequence"); stopWatch.start(Thread.currentThread().getName() + " generating error dumps"); ErrorFileDumper.start();/*from w w w.j a va2s . c om*/ generateExceptions(); stopWatch.stop(); long totalTime = stopWatch.getTotalTimeMillis(); double averageTime = ((double) totalTime) / ((double) LOOP_COUNT); logger.info("Milliseconds per exception = " + averageTime); logger.info(stopWatch.prettyPrint()); ErrorFileDumper.shutdown(10000L); Assert.assertTrue("Error dump directory does not exist !", todaysDirectory.exists()); Assert.assertTrue("Error dump directory should have error files in it !", todaysDirectory.listFiles().length > 0); }
From source file:org.jahia.bin.ErrorFileDumperTest.java
@Test public void testDumpErrorsToFilesSetting() throws InterruptedException { logger.info("Starting testDumpErrorsToFilesSetting test..."); StopWatch stopWatch = new StopWatch("testDumpErrorsToFilesSetting"); stopWatch.start(Thread.currentThread().getName() + " generating error dumps"); ErrorFileDumper.start();//from w w w .j a va 2 s . c o m ErrorFileDumper.setFileDumpActivated(false); generateExceptions(); stopWatch.stop(); long totalTime = stopWatch.getTotalTimeMillis(); double averageTime = ((double) totalTime) / ((double) LOOP_COUNT); logger.info("Milliseconds per exception = " + averageTime); logger.info(stopWatch.prettyPrint()); ErrorFileDumper.shutdown(10000L); SettingsBean.getInstance().setDumpErrorsToFiles(true); Assert.assertFalse("Error dump directory should not exist !", todaysDirectory.exists()); }
From source file:org.jahia.bin.ErrorFileDumperTest.java
@Test public void testDumperInParallel() throws IOException, InterruptedException { logger.info("Starting testDumperInParallel test..."); StopWatch stopWatch = new StopWatch("testDumperInParallel"); stopWatch.start(Thread.currentThread().getName() + " generating error dumps"); ErrorFileDumper.start();/* w w w .j ava 2 s. c o m*/ threadSet.clear(); for (int i = 0; i < THREAD_COUNT; i++) { Thread newThread = new Thread(new Runnable() { public void run() { generateExceptions(); } }, "ErrorFileDumperTestThread" + i); threadSet.add(newThread); newThread.start(); } logger.info("Waiting for dumps to be processed..."); for (Thread curThread : threadSet) { curThread.join(); } ErrorFileDumper.shutdown(10000L); stopWatch.stop(); long totalTime = stopWatch.getTotalTimeMillis(); double averageTime = ((double) totalTime) / ((double) LOOP_COUNT); logger.info("Milliseconds per exception = " + averageTime); logger.info(stopWatch.prettyPrint()); Assert.assertTrue("Error dump directory does not exist !", todaysDirectory.exists()); Assert.assertTrue("Error dump directory should have error files in it !", todaysDirectory.listFiles().length > 0); }
From source file:org.jahia.bin.ErrorFileDumperTest.java
@Test public void testOutputSystemInfoAllInParallel() throws InterruptedException { logger.info("Starting testOutputSystemInfoAllInParallel test..."); StopWatch stopWatch = new StopWatch("testDumperInParallel"); stopWatch.start(Thread.currentThread().getName() + " generating error dumps"); ErrorFileDumper.start();/*ww w . j ava 2 s . co m*/ threadSet.clear(); final int[] dumpLengths = new int[(int) THREAD_COUNT]; for (int i = 0; i < THREAD_COUNT; i++) { final int threadCounter = i; Thread newThread = new Thread(new Runnable() { public void run() { // this is the call made in errors.jsp file. StringWriter stringWriter = new StringWriter(); ErrorFileDumper.outputSystemInfo(new PrintWriter(stringWriter)); dumpLengths[threadCounter] = stringWriter.toString().length(); stringWriter = null; } }, "ErrorFileDumperTestThread" + i); threadSet.add(newThread); newThread.start(); } logger.info("Waiting for dumps to be processed..."); for (Thread curThread : threadSet) { curThread.join(); } ErrorFileDumper.shutdown(10000L); stopWatch.stop(); long totalTime = stopWatch.getTotalTimeMillis(); double averageTime = ((double) totalTime) / ((double) LOOP_COUNT); logger.info("Milliseconds per exception = " + averageTime); logger.info(stopWatch.prettyPrint()); for (int dumpLength : dumpLengths) { Assert.assertTrue("System info dump is empty", dumpLength > 0); } }
From source file:org.jahia.tools.jvm.ThreadMonitorTestIT.java
private void runParallelTest(String testName, Runnable runnable) throws InterruptedException { StopWatch stopWatch = new StopWatch(testName); stopWatch.start(Thread.currentThread().getName() + " dumping thread info"); threadSet.clear();/* ww w.j av a2s .c o m*/ ThreadMonitor.getInstance().setActivated(true); ThreadMonitor.getInstance().setDebugLogging(enabledDebugLogging); ThreadMonitor.getInstance().setMinimalIntervalBetweenDumps(minimalIntervalBetweenDumps); for (int i = 0; i < THREAD_COUNT; i++) { Thread newThread = new Thread(runnable, testName + i); threadSet.add(newThread); Thread.yield(); Thread.sleep(50); newThread.start(); } logger.info("Waiting for test completion..."); Thread.yield(); while (ThreadMonitor.getInstance().isDumping()) { Thread.sleep(100); } for (Thread curThread : threadSet) { curThread.join(); } ThreadMonitor.shutdownInstance(); stopWatch.stop(); logger.info(stopWatch.prettyPrint()); Thread.sleep(minimalIntervalBetweenDumps * 2); }