List of usage examples for org.apache.commons.lang.time StopWatch start
public void start()
Start the stopwatch.
This method starts a new timing session, clearing any previous values.
From source file:com.nridge.connector.fs.con_fs.core.FileCrawler.java
private void processCSVFile(Path aPath, BasicFileAttributes aFileAttributes, String aViewURL) throws IOException { String docId;// ww w.j av a 2s. c o m StopWatch stopWatch; Document fsDocument; Logger appLogger = mAppMgr.getLogger(this, "processCSVFile"); appLogger.trace(mAppMgr.LOGMSG_TRACE_ENTER); File fsFile = aPath.toFile(); String pathFileName = aPath.toAbsolutePath().toString(); appLogger.debug(String.format("Processing CSV File: %s", pathFileName)); CSVDocument csvDocument = new CSVDocument(mAppMgr, mBag); csvDocument.open(pathFileName); int row = 1; DataBag csvBag = csvDocument.extractNext(); while (csvBag != null) { stopWatch = new StopWatch(); stopWatch.start(); docId = csvBag.generateUniqueHash(true); appLogger.debug(String.format(" Expanding Row [%d]: %s", row++, docId)); csvBag.setValueByName("nsd_id", mIdValuePrefix + docId); csvBag.setValueByName("nsd_url", fsFile.toURI().toURL().toString()); csvBag.setValueByName("nsd_url_view", aViewURL); csvBag.setValueByName("nsd_url_display", aViewURL); csvBag.setValueByName("nsd_file_name", fsFile.getName()); csvBag.setValueByName("nsd_mime_type", Content.CONTENT_TYPE_TXT_CSV); FileTime creationTime = aFileAttributes.creationTime(); Date cDate = new Date(creationTime.toMillis()); csvBag.setValueByName("nsd_doc_created_ts", cDate); FileTime lastModifiedTime = aFileAttributes.lastModifiedTime(); Date lmDate = new Date(lastModifiedTime.toMillis()); csvBag.setValueByName("nsd_doc_modified_ts", lmDate); csvBag.setValueByName("nsd_crawl_type", mCrawlQueue.getCrawlType()); fsDocument = new Document(Constants.FS_DOCUMENT_TYPE, csvBag); csvBag.setValueByName("nsd_doc_hash", fsDocument.generateUniqueHash(false)); saveAddQueueDocument(fsDocument, stopWatch); csvBag = csvDocument.extractNext(); } csvDocument.close(); appLogger.trace(mAppMgr.LOGMSG_TRACE_DEPART); }
From source file:com.ecyrd.jspwiki.ReferenceManager.java
/** * Reads the serialized data from the disk back to memory. * Returns the date when the data was last written on disk *///from w ww . j a v a2 s . c om @SuppressWarnings("unchecked") private synchronized long unserializeFromDisk() throws IOException, ClassNotFoundException { ObjectInputStream in = null; long saved = 0L; try { StopWatch sw = new StopWatch(); sw.start(); File f = new File(m_engine.getWorkDir(), SERIALIZATION_FILE); in = new ObjectInputStream(new BufferedInputStream(new FileInputStream(f))); long ver = in.readLong(); if (ver != serialVersionUID) { throw new IOException("File format has changed; I need to recalculate references."); } saved = in.readLong(); m_refersTo = (Map) in.readObject(); m_referredBy = (Map) in.readObject(); in.close(); m_unmutableReferredBy = Collections.unmodifiableMap(m_referredBy); m_unmutableRefersTo = Collections.unmodifiableMap(m_refersTo); sw.stop(); log.debug("Read serialized data successfully in " + sw); } finally { if (in != null) in.close(); } return saved; }
From source file:com.ecyrd.jspwiki.ReferenceManager.java
/** * Serializes hashmaps to disk. The format is private, don't touch it. *///w w w .j a v a 2s . co m private synchronized void serializeAttrsToDisk(WikiPage p) { ObjectOutputStream out = null; StopWatch sw = new StopWatch(); sw.start(); try { File f = new File(m_engine.getWorkDir(), SERIALIZATION_DIR); if (!f.exists()) f.mkdirs(); // // Create a digest for the name // f = new File(f, getHashFileName(p.getName())); // FIXME: There is a concurrency issue here... Set entries = p.getAttributes().entrySet(); if (entries.size() == 0) { // Nothing to serialize, therefore we will just simply remove the // serialization file so that the next time we boot, we don't // deserialize old data. f.delete(); return; } out = new ObjectOutputStream(new BufferedOutputStream(new FileOutputStream(f))); out.writeLong(serialVersionUID); out.writeLong(System.currentTimeMillis()); // Timestamp out.writeUTF(p.getName()); out.writeLong(entries.size()); for (Iterator i = entries.iterator(); i.hasNext();) { Map.Entry e = (Map.Entry) i.next(); if (e.getValue() instanceof Serializable) { out.writeUTF((String) e.getKey()); out.writeObject(e.getValue()); } } out.close(); } catch (IOException e) { log.error("Unable to serialize!"); try { if (out != null) out.close(); } catch (IOException ex) { } } catch (NoSuchAlgorithmException e) { log.fatal("No MD5 algorithm!?!"); } finally { sw.stop(); log.debug("serialization for " + p.getName() + " done - took " + sw); } }
From source file:com.ecyrd.jspwiki.ReferenceManager.java
/** * Reads the serialized data from the disk back to memory. * Returns the date when the data was last written on disk *///from w ww .j av a2s.c om private synchronized long unserializeAttrsFromDisk(WikiPage p) throws IOException, ClassNotFoundException { ObjectInputStream in = null; long saved = 0L; try { StopWatch sw = new StopWatch(); sw.start(); // // Find attribute cache, and check if it exists // File f = new File(m_engine.getWorkDir(), SERIALIZATION_DIR); f = new File(f, getHashFileName(p.getName())); if (!f.exists()) { return 0L; } log.debug("Deserializing attributes for " + p.getName()); in = new ObjectInputStream(new BufferedInputStream(new FileInputStream(f))); long ver = in.readLong(); if (ver != serialVersionUID) { log.debug("File format has changed; cannot deserialize."); return 0L; } saved = in.readLong(); String name = in.readUTF(); if (!name.equals(p.getName())) { log.debug("File name does not match (" + name + "), skipping..."); return 0L; // Not here } long entries = in.readLong(); for (int i = 0; i < entries; i++) { String key = in.readUTF(); Object value = in.readObject(); p.setAttribute(key, value); log.debug(" attr: " + key + "=" + value); } in.close(); sw.stop(); log.debug("Read serialized data for " + name + " successfully in " + sw); p.setHasMetadata(); } catch (NoSuchAlgorithmException e) { log.fatal("No MD5!?!"); } finally { if (in != null) in.close(); } return saved; }
From source file:com.ecyrd.jspwiki.ReferenceManager.java
/** * Initializes the entire reference manager with the initial set of pages * from the collection./*ww w. j a v a 2s. c om*/ * * @param pages A collection of all pages you want to be included in the reference * count. * @since 2.2 * @throws ProviderException If reading of pages fail. */ public void initialize(Collection pages) throws ProviderException { log.debug("Initializing new ReferenceManager with " + pages.size() + " initial pages."); StopWatch sw = new StopWatch(); sw.start(); log.info("Starting cross reference scan of WikiPages"); // // First, try to serialize old data from disk. If that fails, // we'll go and update the entire reference lists (which'll take // time) // try { // // Unserialize things. The loop below cannot be combined with // the other loop below, simply because engine.getPage() has // side effects such as loading initializing the user databases, // which in turn want all of the pages to be read already... // // Yes, this is a kludge. We know. Will be fixed. // long saved = unserializeFromDisk(); for (Iterator it = pages.iterator(); it.hasNext();) { WikiPage page = (WikiPage) it.next(); unserializeAttrsFromDisk(page); } // // Now we must check if any of the pages have been changed // while we were in the electronic la-la-land, and update // the references for them. // Iterator it = pages.iterator(); while (it.hasNext()) { WikiPage page = (WikiPage) it.next(); if (page instanceof Attachment) { // Skip attachments } else { // Refresh with the latest copy page = m_engine.getPage(page.getName()); if (page.getLastModified() == null) { log.fatal("Provider returns null lastModified. Please submit a bug report."); } else if (page.getLastModified().getTime() > saved) { updatePageReferences(page); } } } } catch (Exception e) { log.info("Unable to unserialize old refmgr information, rebuilding database: " + e.getMessage()); buildKeyLists(pages); // Scan the existing pages from disk and update references in the manager. Iterator it = pages.iterator(); while (it.hasNext()) { WikiPage page = (WikiPage) it.next(); if (page instanceof Attachment) { // We cannot build a reference list from the contents // of attachments, so we skip them. } else { updatePageReferences(page); serializeAttrsToDisk(page); } } serializeToDisk(); } sw.stop(); log.info("Cross reference scan done in " + sw); WikiEventUtils.addWikiEventListener(m_engine.getPageManager(), WikiPageEvent.PAGE_DELETED, this); }
From source file:com.liferay.portal.security.permission.AdvancedPermissionChecker.java
protected boolean hasUserPermissionImpl(long groupId, String name, String primKey, String actionId, boolean checkAdmin) throws Exception { StopWatch stopWatch = null; if (_log.isDebugEnabled()) { stopWatch = new StopWatch(); stopWatch.start(); }//from ww w . j a v a2s .c om long companyId = user.getCompanyId(); boolean hasLayoutManagerPermission = true; // Check if the layout manager has permission to do this action for the // current portlet if ((Validator.isNotNull(name)) && (Validator.isNotNull(primKey)) && (primKey.indexOf(PortletConstants.LAYOUT_SEPARATOR) != -1)) { hasLayoutManagerPermission = PortletPermissionUtil.hasLayoutManagerPermission(name, actionId); } if (checkAdmin && (isCompanyAdminImpl(companyId) || (isGroupAdminImpl(groupId) && hasLayoutManagerPermission))) { return true; } logHasUserPermission(groupId, name, primKey, actionId, stopWatch, 1); if ((PropsValues.PERMISSIONS_USER_CHECK_ALGORITHM == 6) && ResourceBlockLocalServiceUtil.isSupported(name)) { ResourceBlockIdsBag resourceBlockIdsBag = getResourceBlockIdsBag(companyId, groupId, getUserId(), name); boolean value = ResourceBlockLocalServiceUtil.hasPermission(name, GetterUtil.getLong(primKey), actionId, resourceBlockIdsBag); logHasUserPermission(groupId, name, primKey, actionId, stopWatch, 2); return value; } List<Resource> resources = getResources(companyId, groupId, name, primKey, actionId); logHasUserPermission(groupId, name, primKey, actionId, stopWatch, 3); // Check if user has access to perform the action on the given // resource scopes. The resources are scoped to check first for an // individual class, then for the group that the class may belong // to, and then for the company that the class belongs to. PermissionCheckerBag bag = getUserBag(user.getUserId(), groupId); boolean value = PermissionLocalServiceUtil.hasUserPermissions(user.getUserId(), groupId, resources, actionId, bag); logHasUserPermission(groupId, name, primKey, actionId, stopWatch, 4); return value; }
From source file:com.liferay.portal.security.permission.AdvancedPermissionChecker.java
public boolean hasPermission(long groupId, String name, String primKey, String actionId) { StopWatch stopWatch = null; if (_log.isDebugEnabled()) { stopWatch = new StopWatch(); stopWatch.start(); }//www . j a v a 2s. c o m Group group = null; // If the current group is a staging group, check the live group. If the // current group is a scope group for a layout, check the original // group. try { if (groupId > 0) { group = GroupLocalServiceUtil.getGroup(groupId); if (group.isUser() && (group.getClassPK() == getUserId())) { group = GroupLocalServiceUtil.getGroup(getCompanyId(), GroupConstants.USER_PERSONAL_SITE); groupId = group.getGroupId(); } if (group.isLayout()) { Layout layout = LayoutLocalServiceUtil.getLayout(group.getClassPK()); groupId = layout.getGroupId(); group = GroupLocalServiceUtil.getGroup(groupId); } if (group.isStagingGroup()) { if (primKey.equals(String.valueOf(groupId))) { primKey = String.valueOf(group.getLiveGroupId()); } groupId = group.getLiveGroupId(); group = group.getLiveGroup(); } } } catch (Exception e) { _log.error(e, e); } Boolean value = PermissionCacheUtil.getPermission(user.getUserId(), signedIn, checkGuest, groupId, name, primKey, actionId); if (value == null) { try { value = Boolean.valueOf(hasPermissionImpl(groupId, name, primKey, actionId)); if (_log.isDebugEnabled()) { _log.debug("Checking permission for " + groupId + " " + name + " " + primKey + " " + actionId + " takes " + stopWatch.getTime() + " ms"); } } finally { if (value == null) { value = Boolean.FALSE; } PermissionCacheUtil.putPermission(user.getUserId(), signedIn, checkGuest, groupId, name, primKey, actionId, value); } } return value.booleanValue(); }
From source file:com.liferay.exportimport.controller.LayoutImportController.java
protected void doImportFile(PortletDataContext portletDataContext, long userId) throws Exception { Map<String, String[]> parameterMap = portletDataContext.getParameterMap(); Group group = _groupLocalService.getGroup(portletDataContext.getGroupId()); String layoutsImportMode = MapUtil.getString(parameterMap, PortletDataHandlerKeys.LAYOUTS_IMPORT_MODE, PortletDataHandlerKeys.LAYOUTS_IMPORT_MODE_MERGE_BY_LAYOUT_UUID); boolean permissions = MapUtil.getBoolean(parameterMap, PortletDataHandlerKeys.PERMISSIONS); if (group.isLayoutSetPrototype()) { parameterMap.put(PortletDataHandlerKeys.LAYOUT_SET_PROTOTYPE_LINK_ENABLED, new String[] { Boolean.FALSE.toString() }); }// w w w. j a va2 s . c o m if (_log.isDebugEnabled()) { _log.debug("Import permissions " + permissions); } StopWatch stopWatch = new StopWatch(); stopWatch.start(); LayoutCache layoutCache = new LayoutCache(); long companyId = portletDataContext.getCompanyId(); ServiceContext serviceContext = ServiceContextThreadLocal.getServiceContext(); if (serviceContext == null) { serviceContext = new ServiceContext(); } serviceContext.setCompanyId(companyId); serviceContext.setSignedIn(false); serviceContext.setUserId(userId); ServiceContextThreadLocal.pushServiceContext(serviceContext); // LAR validation validateFile(companyId, portletDataContext.getGroupId(), parameterMap, portletDataContext.getZipReader()); // Source and target group id Map<Long, Long> groupIds = (Map<Long, Long>) portletDataContext.getNewPrimaryKeysMap(Group.class); groupIds.put(portletDataContext.getSourceGroupId(), portletDataContext.getGroupId()); // Manifest ManifestSummary manifestSummary = _exportImportHelper.getManifestSummary(portletDataContext); portletDataContext.setManifestSummary(manifestSummary); // Layout and layout set prototype Element rootElement = portletDataContext.getImportDataRootElement(); Element headerElement = rootElement.element("header"); String layoutSetPrototypeUuid = headerElement.attributeValue("layout-set-prototype-uuid"); String larType = headerElement.attributeValue("type"); portletDataContext.setType(larType); if (group.isLayoutPrototype() && larType.equals("layout-prototype")) { parameterMap.put(PortletDataHandlerKeys.DELETE_MISSING_LAYOUTS, new String[] { Boolean.FALSE.toString() }); LayoutPrototype layoutPrototype = _layoutPrototypeLocalService.getLayoutPrototype(group.getClassPK()); String layoutPrototypeUuid = GetterUtil.getString(headerElement.attributeValue("type-uuid")); LayoutPrototype existingLayoutPrototype = null; if (Validator.isNotNull(layoutPrototypeUuid)) { try { existingLayoutPrototype = _layoutPrototypeLocalService .getLayoutPrototypeByUuidAndCompanyId(layoutPrototypeUuid, companyId); } catch (NoSuchLayoutPrototypeException nslpe) { // LPS-52675 if (_log.isDebugEnabled()) { _log.debug(nslpe, nslpe); } } } if (existingLayoutPrototype == null) { List<Layout> layouts = _layoutLocalService .getLayoutsByLayoutPrototypeUuid(layoutPrototype.getUuid()); layoutPrototype.setUuid(layoutPrototypeUuid); _layoutPrototypeLocalService.updateLayoutPrototype(layoutPrototype); for (Layout layout : layouts) { layout.setLayoutPrototypeUuid(layoutPrototypeUuid); _layoutLocalService.updateLayout(layout); } } } else if (group.isLayoutSetPrototype() && larType.equals("layout-set-prototype")) { parameterMap.put(PortletDataHandlerKeys.LAYOUT_SET_PROTOTYPE_SETTINGS, new String[] { Boolean.TRUE.toString() }); LayoutSetPrototype layoutSetPrototype = _layoutSetPrototypeLocalService .getLayoutSetPrototype(group.getClassPK()); String importedLayoutSetPrototypeUuid = GetterUtil.getString(headerElement.attributeValue("type-uuid")); LayoutSetPrototype existingLayoutSetPrototype = null; if (Validator.isNotNull(importedLayoutSetPrototypeUuid)) { try { existingLayoutSetPrototype = _layoutSetPrototypeLocalService .getLayoutSetPrototypeByUuidAndCompanyId(importedLayoutSetPrototypeUuid, companyId); } catch (NoSuchLayoutSetPrototypeException nslspe) { // LPS-52675 if (_log.isDebugEnabled()) { _log.debug(nslspe, nslspe); } } } if (existingLayoutSetPrototype == null) { List<LayoutSet> layoutSets = _layoutSetLocalService .getLayoutSetsByLayoutSetPrototypeUuid(layoutSetPrototype.getUuid()); layoutSetPrototype.setUuid(importedLayoutSetPrototypeUuid); _layoutSetPrototypeLocalService.updateLayoutSetPrototype(layoutSetPrototype); for (LayoutSet curLayoutSet : layoutSets) { curLayoutSet.setLayoutSetPrototypeUuid(importedLayoutSetPrototypeUuid); _layoutSetLocalService.updateLayoutSet(curLayoutSet); } } } else if (larType.equals("layout-set-prototype")) { parameterMap.put(PortletDataHandlerKeys.LAYOUT_SET_PROTOTYPE_SETTINGS, new String[] { Boolean.TRUE.toString() }); layoutSetPrototypeUuid = GetterUtil.getString(headerElement.attributeValue("type-uuid")); } if (Validator.isNotNull(layoutSetPrototypeUuid)) { portletDataContext.setLayoutSetPrototypeUuid(layoutSetPrototypeUuid); } List<Element> portletElements = fetchPortletElements(rootElement); if (permissions) { for (Element portletElement : portletElements) { String portletPath = portletElement.attributeValue("path"); Document portletDocument = SAXReaderUtil.read(portletDataContext.getZipEntryAsString(portletPath)); _permissionImporter.checkRoles(layoutCache, companyId, portletDataContext.getGroupId(), userId, portletDocument.getRootElement()); } _permissionImporter.readPortletDataPermissions(portletDataContext); } if (!layoutsImportMode.equals(PortletDataHandlerKeys.LAYOUTS_IMPORT_MODE_CREATED_FROM_PROTOTYPE)) { _portletImportController.readExpandoTables(portletDataContext); } _portletImportController.readLocks(portletDataContext); // Import the group Element groupsElement = portletDataContext.getImportDataGroupElement(StagedGroup.class); for (Element groupElement : groupsElement.elements()) { StagedModelDataHandlerUtil.importStagedModel(portletDataContext, groupElement); } // Asset links _portletImportController.importAssetLinks(portletDataContext); // Site _groupLocalService.updateSite(portletDataContext.getGroupId(), true); if (_log.isInfoEnabled()) { _log.info("Importing layouts takes " + stopWatch.getTime() + " ms"); } ZipReader zipReader = portletDataContext.getZipReader(); zipReader.close(); }
From source file:com.ecyrd.jspwiki.filters.SpamFilter.java
/** * Checks against the akismet system.//w ww . j av a 2 s . co m * * @param context * @param change * @throws RedirectException */ private void checkAkismet(WikiContext context, Change change) throws RedirectException { if (m_akismetAPIKey != null) { if (m_akismet == null) { log.info("Initializing Akismet spam protection."); m_akismet = new Akismet(m_akismetAPIKey, context.getEngine().getBaseURL()); if (!m_akismet.verifyAPIKey()) { log.error("Akismet API key cannot be verified. Please check your config."); m_akismetAPIKey = null; m_akismet = null; } } HttpServletRequest req = context.getHttpRequest(); // // Akismet will mark all empty statements as spam, so we'll just // ignore them. // if (change.m_adds == 0 && change.m_removals > 0) { return; } if (req != null && m_akismet != null) { log.debug("Calling Akismet to check for spam..."); StopWatch sw = new StopWatch(); sw.start(); String ipAddress = req.getRemoteAddr(); String userAgent = req.getHeader("User-Agent"); String referrer = req.getHeader("Referer"); String permalink = context.getViewURL(context.getPage().getName()); String commentType = context.getRequestContext().equals(WikiContext.COMMENT) ? "comment" : "edit"; String commentAuthor = context.getCurrentUser().getName(); String commentAuthorEmail = null; String commentAuthorURL = null; boolean isSpam = m_akismet.commentCheck(ipAddress, userAgent, referrer, permalink, commentType, commentAuthor, commentAuthorEmail, commentAuthorURL, change.toString(), null); sw.stop(); log.debug("Akismet request done in: " + sw); if (isSpam) { // Host host = new Host( ipAddress, null ); // m_temporaryBanList.add( host ); String uid = log(context, REJECT, REASON_AKISMET, change.toString()); log.info("SPAM:Akismet (" + uid + "). Akismet thinks this change is spam; added host to temporary ban list."); checkStrategy(context, REASON_AKISMET, "Akismet tells Herb you're a spammer, Herb trusts Akismet, and I trust Herb! (Incident code " + uid + ")"); } } } }
From source file:edu.internet2.middleware.psp.grouper.PspChangeLogConsumer.java
/** * Call the method of the {@link EventType} enum which matches the {@link ChangeLogEntry} category and action (the * change log type).//from w w w . j a v a 2 s. co m * * @param changeLogEntry the change log entry * @throws Exception if an error occurs processing the change log entry */ public void processChangeLogEntry(ChangeLogEntry changeLogEntry) throws Exception { try { // find the method to run via the enum String enumKey = changeLogEntry.getChangeLogType().getChangeLogCategory() + "__" + changeLogEntry.getChangeLogType().getActionName(); EventType ldappcEventType = EventType.valueOf(enumKey); if (ldappcEventType == null) { LOG.debug("PSP Consumer '{}' - Change log entry '{}' Unsupported category and action.", name, toString(changeLogEntry)); } else { // process the change log event LOG.info("PSP Consumer '{}' - Change log entry '{}'", name, toStringDeep(changeLogEntry)); StopWatch stopWatch = new StopWatch(); stopWatch.start(); ldappcEventType.process(this, changeLogEntry); stopWatch.stop(); LOG.info("PSP Consumer '{}' - Change log entry '{}' Finished processing. Elapsed time {}", new Object[] { name, toString(changeLogEntry), stopWatch, }); if (LOG.isDebugEnabled()) { for (String stats : PspCLI.getAllCacheStats()) { LOG.debug(stats); } } } } catch (IllegalArgumentException e) { LOG.debug("PSP Consumer '{}' - Change log entry '{}' Unsupported category and action.", name, toString(changeLogEntry)); } }