List of usage examples for org.apache.commons.lang.time StopWatch StopWatch
public StopWatch()
Constructor.
From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java
private void indexDocs(List<RequestDocument> docs, long records, long recCount, List<ReIndexingBatchStatus> batchStatusList, ReIndexingBatchStatus reIndexingBatchStatus) { try {/*from w w w .j a v a2 s. c o m*/ StopWatch indexTimer = new StopWatch(); DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss:SSS"); Date startDate = new Date(); reIndexingBatchStatus.setBatchStartTime(dateFormat.format(startDate)); indexTimer.start(); reIndexingBatchStatus.setStatus("Indexing"); reIndexingBatchStatus.setBatchIndexingTime(indexTimer.toString()); reIndexingBatchStatus.setRecordsProcessed(records); reIndexingBatchStatus.setBatchEndTime(" "); batchStatusList.add(reIndexingBatchStatus); ReIndexingStatus.getInstance().getDocTypeList().setReIndBatStatusList(batchStatusList); IndexerService indexerService = BeanLocator.getDocstoreFactory().getDocumentIndexManager( docs.get(0).getCategory(), docs.get(0).getType(), docs.get(0).getFormat()); String result = indexerService.indexDocuments(docs, false); logger.debug(result); indexTimer.stop(); Date endDate = new Date(); reIndexingBatchStatus.setBatchEndTime(dateFormat.format(endDate)); reIndexingBatchStatus.setBatchIndexingTime(indexTimer.toString()); reIndexingBatchStatus.setRecordsProcessed(records); reIndexingBatchStatus.setStatus("Done"); reIndexingBatchStatus.setRecordsRemaining(recCount - records); ReIndexingStatus.getInstance().getDocTypeList().setReIndBatStatusList(batchStatusList); docs.clear(); } catch (Exception e) { logger.error("Rebuild Indexes Processed(" + (records - docs.size()) + "), Failed @ batch(" + docs.size() + "): Cause: " + e + "\n\tContinuous", e); } }
From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java
private void workBibMarcAndDublinAll(String docCategory, String docType, String docFormat) throws SolrServerException, IOException { long totalCount = 0; long nodeCount = 0; int start = 0; String sqlQuery = null;//w w w . ja va 2 s.c o m long startTime = 0; long commitEndTime = 0; long commitStartTime = 0; int batchSize = 50000; //ProcessParameters.BULK_PROCESSOR_SPLIT_SIZE; int commitSize = 50000; long endIndexBatch = 0; String dbVendor = ConfigContext.getCurrentContextConfig().getProperty("db.vendor"); List<BibTree> bibTrees = new ArrayList<BibTree>(); //Map<String, BibTree> bibMap = new HashMap<String, BibTree>(); try { String prefix = DocumentUniqueIDPrefix.getPrefix(docCategory, docType, docFormat); Map prefixMap = new HashMap(0); prefixMap.put("uniqueIdPrefix", prefix); startTime = System.currentTimeMillis(); //t1 BusinessObjectService businessObjectService = KRADServiceLocator.getBusinessObjectService(); int bibCount = businessObjectService.countMatching(BibRecord.class, prefixMap); Connection connection = null; PreparedStatement preparedStatement = null; if (bibCount > 0) { connection = getConnection(); if (dbVendor.equalsIgnoreCase("mysql")) { sqlQuery = "select * from ole_ds_bib_t b ORDER BY b.bib_id LIMIT ?,?"; } else { sqlQuery = "select * from (select b.*,ROWNUM r from OLE_DS_BIB_T b) where r between ? and ?"; } preparedStatement = connection.prepareStatement(sqlQuery); } List<ReIndexingBatchStatus> batchStatusList = new ArrayList<ReIndexingBatchStatus>(); StopWatch loadTimer = new StopWatch(); StopWatch batchTimer = new StopWatch(); loadTimer.start(); batchTimer.start(); for (int i = 0; i < bibCount; i++) { if (bibTrees.size() == batchSize) { if (!isStop()) { ReIndexingBatchStatus reIndexingBatchStatus = indexBeforeParams(loadTimer); indexBibDocs(bibTrees, totalCount, nodeCount, batchStatusList, reIndexingBatchStatus); indexAfterParams(batchTimer, reIndexingBatchStatus, batchStatusList); resetTimers(batchTimer, loadTimer); totalCount = 0; logger.info("Rebuild"); i = start; if (start % commitSize == 0) { commitStartTime = System.currentTimeMillis(); //t2 logger.info("Time elapsed since start ====>>>>>> " + (commitStartTime - startTime)); // t2-t1 logger.info("Time elapsed since last commit ====>>>>>> " + (commitStartTime - commitEndTime)); //t2-t3 logger.info("commit started ====>>>>>> " + commitStartTime); SolrServer solr = SolrServerManager.getInstance().getSolrServer(); solr.commit(); logger.info("No..of records committed ====>>>>>> " + start); commitEndTime = System.currentTimeMillis(); //t3 logger.info("Time Taken for commit ====>>>>>> " + (commitEndTime - commitStartTime)); //t3-t2 } } else { return; } } else { if (start < bibCount) { long b2time = System.currentTimeMillis(); if (dbVendor.equalsIgnoreCase("mysql")) { preparedStatement.setInt(1, start); preparedStatement.setInt(2, batchSize); } else { preparedStatement.setInt(1, start + 1); preparedStatement.setInt(2, start + batchSize); } ResultSet resultSet = preparedStatement.executeQuery(); logger.info("time taking for getting records from DB end======>>>>>" + (System.currentTimeMillis() - b2time)); while (resultSet.next()) { BibTree bibTree = new BibTree(); Bib bib = new BibMarc(); bib.setCategory(docCategory); bib.setType(docType); bib.setFormat(docFormat); bib.setCreatedBy(resultSet.getString("CREATED_BY")); bib.setCreatedOn(resultSet.getString("DATE_CREATED")); bib.setStaffOnly((resultSet.getString("STAFF_ONLY").equalsIgnoreCase("Y") ? Boolean.TRUE : Boolean.FALSE)); bib.setContent(resultSet.getString("CONTENT")); bib.setUpdatedBy(resultSet.getString("UPDATED_BY")); bib.setUpdatedOn(resultSet.getString("DATE_UPDATED")); bib.setLastUpdated(resultSet.getString("DATE_UPDATED")); bib.setStatus(resultSet.getString("STATUS")); bib.setStatusUpdatedBy(resultSet.getString("STATUS_UPDATED_BY")); bib.setStatusUpdatedOn(resultSet.getString("STATUS_UPDATED_DATE")); String uuid = DocumentUniqueIDPrefix .getPrefixedId(resultSet.getString("UNIQUE_ID_PREFIX"), resultSet.getString(1)); bib.setId(uuid); bib.setLocalId(uuid); bibTree.setBib(bib); start++; totalCount++; bibTrees.add(bibTree); } resultSet.close(); } } } if (bibTrees.size() > 0 && !isStop()) { ReIndexingBatchStatus reIndexingBatchStatus = indexBeforeParams(loadTimer); indexBibDocs(bibTrees, totalCount, nodeCount, batchStatusList, reIndexingBatchStatus); indexAfterParams(batchTimer, reIndexingBatchStatus, batchStatusList); commitStartTime = System.currentTimeMillis(); logger.info("commit started : ----> " + commitStartTime); SolrServer solr = SolrServerManager.getInstance().getSolrServer(); solr.commit(); logger.info("No..of records committed : ----> " + start); commitEndTime = System.currentTimeMillis(); logger.info("Time Taken for commit ======>>> " + (commitEndTime - commitStartTime)); } endIndexBatch = System.currentTimeMillis(); //t1 logger.info("Time elapsed since end ====>>>>>> " + endIndexBatch); } catch (Exception e) { logger.error("Rebuild Indexes Process(" + docCategory + " : " + docType + " : " + docFormat + ") Processed(" + (totalCount - bibTrees.size()) + "), Failed @ batch(" + bibTrees.size() + "): Cause: " + e, e); } finally { if (isStop) { ReIndexingStatus.getInstance().getDocTypeList().setStatus("Stopped"); } else { ReIndexingStatus.getInstance().getDocTypeList().setStatus("Done"); } } }
From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java
private void workInstanceOLEML(String docCategory, String docType, String docFormat) { long totalCount = 0; long nodeCount = 0; List<RequestDocument> docs = new ArrayList<RequestDocument>(); InstanceOlemlRecordProcessor instanceOlemlRecordProcessor = new InstanceOlemlRecordProcessor(); try {/*from ww w . jav a 2 s . co m*/ RequestDocument rd = new RequestDocument(); rd.setCategory(docCategory); rd.setType(docType); rd.setFormat(docFormat); List<ReIndexingBatchStatus> batchStatusList = new ArrayList<ReIndexingBatchStatus>(); BusinessObjectService businessObjectService = KRADServiceLocator.getBusinessObjectService(); List<InstanceRecord> instanceRecords = (List<InstanceRecord>) businessObjectService .findAll(InstanceRecord.class); StopWatch loadTimer = new StopWatch(); StopWatch batchTimer = new StopWatch(); loadTimer.start(); batchTimer.start(); for (int i = 0; i < instanceRecords.size(); i++) { if (docs.size() == ProcessParameters.BULK_PROCESSOR_SPLIT_SIZE) { if (!isStop()) { ReIndexingBatchStatus reIndexingBatchStatus = indexBeforeParams(loadTimer); indexDocs(docs, totalCount, nodeCount, batchStatusList, reIndexingBatchStatus); indexAfterParams(batchTimer, reIndexingBatchStatus, batchStatusList); resetTimers(batchTimer, loadTimer); totalCount = 0; logger.info("Rebuild"); } else { return; } } else { InstanceRecord instanceRecord = instanceRecords.get(i); String uuid = DocumentUniqueIDPrefix.getPrefixedId(instanceRecord.getUniqueIdPrefix(), instanceRecord.getInstanceId()); RequestDocument requestDocument = buildRequestDocumentForCheckout(docCategory, docType, docFormat, uuid); ResponseDocument responseDocument = RdbmsWorkInstanceDocumentManager.getInstance() .checkoutContent(requestDocument, businessObjectService); String content = responseDocument.getContent().getContent(); RequestDocument requestDocumentForIndex = (RequestDocument) rd.clone(); requestDocumentForIndex.setAdditionalAttributes(responseDocument.getAdditionalAttributes()); requestDocumentForIndex.setId(uuid); requestDocumentForIndex.setUuid(uuid); InstanceCollection instanceCollection = instanceOlemlRecordProcessor.fromXML(content); // logger.debug("REBUILD_INDEXING_LINKING " + ProcessParameters.REBUILD_INDEXING_LINKING); // if (!ProcessParameters.REBUILD_INDEXING_LINKING) { // instanceCollection.getInstance().get(0).getResourceIdentifier().clear(); // } content = instanceOlemlRecordProcessor.toXML(instanceCollection); Content contentObj = new Content(); contentObj.setContent(content); contentObj.setContentObject(instanceCollection); requestDocumentForIndex.setContent(contentObj); docs.add(requestDocumentForIndex); totalCount++; } } if (docs.size() > 0 && !isStop()) { ReIndexingBatchStatus reIndexingBatchStatus = indexBeforeParams(loadTimer); indexDocs(docs, totalCount, nodeCount, batchStatusList, reIndexingBatchStatus); indexAfterParams(batchTimer, reIndexingBatchStatus, batchStatusList); } } catch (Exception e) { logger.error( "Rebuild Indexes Process(" + docCategory + " : " + docType + " : " + docFormat + ") Processed(" + (totalCount - docs.size()) + "), Failed @ batch(" + docs.size() + "): Cause: " + e, e); } finally { if (isStop) { ReIndexingStatus.getInstance().getDocTypeList().setStatus("Stopped"); } else { ReIndexingStatus.getInstance().getDocTypeList().setStatus("Done"); } } }
From source file:org.kuali.ole.docstore.process.RebuildIndexesHandler.java
private void workLicense(String docCategory, String docType, String docFormat) { Session session = null;//ww w . ja v a 2 s . c om long totalCount = 0; long nodeCount = 0; List<RequestDocument> docs = new ArrayList<RequestDocument>(); try { session = RepositoryManager.getRepositoryManager().getSession(ProcessParameters.BULK_DEFAULT_USER, ProcessParameters.BULK_DEFUALT_ACTION); RequestDocument rd = new RequestDocument(); rd.setCategory(docCategory); rd.setType(docType); rd.setFormat(docFormat); DocumentIngester docIngester = new DocumentIngester(); Node nodeFormat = docIngester.getStaticFormatNode(rd, session); NodeIterator nodesL1 = nodeFormat.getNodes(); List<ReIndexingBatchStatus> batchStatusList = new ArrayList<ReIndexingBatchStatus>(); StopWatch loadTimer = new StopWatch(); StopWatch batchTimer = new StopWatch(); loadTimer.start(); RepositoryBrowser repositoryBrowser = new RepositoryBrowser(); while (nodesL1.hasNext()) { Node nodeL1 = nodesL1.nextNode(); NodeIterator nodesFile = nodeL1.getNodes(); nodeCount = nodesFile.getSize(); batchTimer.start(); while (nodesFile.hasNext()) { if (docs.size() == ProcessParameters.BULK_PROCESSOR_SPLIT_SIZE && !isStop()) { if (!isStop()) { ReIndexingBatchStatus reIndexingBatchStatus = indexBeforeParams(loadTimer); indexDocs(docs, totalCount, nodeCount, batchStatusList, reIndexingBatchStatus); indexAfterParams(batchTimer, reIndexingBatchStatus, batchStatusList); resetTimers(batchTimer, loadTimer); totalCount = 0; logger.info("Rebuild"); } else { return; } } else { Node fileNode = nodesFile.nextNode(); String content = null; if (docFormat.equals(DocFormat.ONIXPL.getCode())) { content = checkoutManager.getData(fileNode); } else if (docFormat.equals(DocFormat.PDF.getCode()) || docFormat.equals(DocFormat.DOC.getCode())) { content = checkoutManager.checkOutBinary(fileNode.getIdentifier(), ProcessParameters.BULK_DEFAULT_USER, ProcessParameters.BULK_DEFUALT_ACTION, docFormat); } RequestDocument reqDoc = (RequestDocument) rd.clone(); reqDoc.setId(fileNode.getIdentifier()); reqDoc.setUuid(fileNode.getIdentifier()); Content contentObj = new Content(); contentObj.setContent(content); reqDoc.setContent(contentObj); docs.add(reqDoc); totalCount++; } } } if (docs.size() > 0 && !isStop()) { ReIndexingBatchStatus reIndexingBatchStatus = indexBeforeParams(loadTimer); indexDocs(docs, totalCount, nodeCount, batchStatusList, reIndexingBatchStatus); indexAfterParams(batchTimer, reIndexingBatchStatus, batchStatusList); } } catch (Exception e) { logger.error( "Rebuild Indexes Process(" + docCategory + " : " + docType + " : " + docFormat + ") Processed(" + (totalCount - docs.size()) + "), Failed @ batch(" + docs.size() + "): Cause: " + e, e); } finally { try { if (isStop) { ReIndexingStatus.getInstance().getDocTypeList().setStatus("Stopped"); } else { ReIndexingStatus.getInstance().getDocTypeList().setStatus("Done"); } RepositoryManager.getRepositoryManager().logout(session); } catch (OleException e) { logger.error(e.getMessage(), e); } } }
From source file:org.kuali.ole.docstore.service.DocumentIngester.java
/** * Method to ingest a Bib RequestDocument using Btree manager. * * @param reqDocs// www. j ava2 s. co m * @param session * @param formatNode * @return * @throws Exception */ protected synchronized List<Node> ingestBibDocumentUsingBTreeMgr(List<RequestDocument> reqDocs, Session session, Node formatNode) throws Exception { List<Node> fileNodes = null; fileNodes = new ArrayList<Node>(); try { /*String file = "file"; if (DocFormat.MARC.isEqualTo(reqDoc.getFormat())) file = FILE_MARC; else file = reqDoc.getFormat() + FILE; Node bibFormatNode = null; if (formatNode == null) bibFormatNode = getStaticFormatNode(reqDoc, session); else bibFormatNode = formatNode; Node l3 = null; synchronized (nodeHandler) { Node l1 = nodeHandler.initLevelNode(NODE_LEVEL1, bibFormatNode, false, session); Node l2 = nodeHandler.initLevelNode(NODE_LEVEL2, l1, false, session); l3 = nodeHandler.initLevelNode(NODE_LEVEL3, l2, false, session); } */ StopWatch btreeTimer = new StopWatch(); DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss:SSS"); Date date = new Date(); btreeTimer.start(); treeManager = new BTreeManager(formatNode, 500, 1000, Rank.<String>comparableComparator(), true); // Create a new NodeSequence with that tree manager nodeSequence = ItemSequence.createNodeSequence(treeManager); btreeTimer.stop(); logger.info("Time taken for initializing btree manager sequence=" + btreeTimer.toString()); StopWatch btreeAddNodeTimer = new StopWatch(); Node node = null; btreeAddNodeTimer.start(); Random generator = new Random(19580427); Format formatter = new SimpleDateFormat("dd-MM-yyyy HH-mm-ss"); Date date1 = null; for (RequestDocument reqDoc : reqDocs) { node = null; date1 = new Date(); String dateStr = formatter.format(date1); node = nodeSequence.addNode(dateStr + "-" + generator.nextInt(), NodeType.NT_UNSTRUCTURED); nodeHandler.initFileNode(node, reqDoc, FILE_MARC, null, session); fileNodes.add(node); //i++; } btreeAddNodeTimer.stop(); logger.info( "Time taken for adding " + reqDocs.size() + " nodes to btree: " + btreeAddNodeTimer.toString()); } catch (Exception e) { logger.error("Ingest failed for RequestDocument: ", e); throw new Exception(e); } return fileNodes; }
From source file:org.kuali.ole.docstore.service.IngestNIndexHandlerService.java
/** * Method to ingest and index bulk Request. * * @param request/* ww w . j ava 2 s.c o m*/ * @return */ public List<String> bulkIngestNIndex(Request request, Session session) { //RequestDocument requestDocument = request.getRequestDocuments().get(0); //DocumentManager documentManager = BeanLocator.getDocumentManagerFactory().getDocumentManager(requestDocument); BatchIngestStatistics batchStatistics = BulkIngestStatistics.getInstance().getCurrentBatch(); BulkIngestStatistics bulkLoadStatistics = BulkIngestStatistics.getInstance(); long commitSize = ProcessParameters.BULK_INGEST_COMMIT_SIZE; logger.debug("commitSize = " + commitSize); logger.debug("bulkIngestNIndex(" + request.getRequestDocuments().size() + ") START"); logger.debug("BULK_INGEST_IS_LINKING_ENABLED=" + ProcessParameters.BULK_INGEST_IS_LINKING_ENABLED); //Session session = null; List<String> docUUIDs = new ArrayList<String>(); StopWatch ingestTimer = new StopWatch(); StopWatch indexTimer = new StopWatch(); StopWatch totalTimer = new StopWatch(); StopWatch createNodesTimer = new StopWatch(); StopWatch sessionSaveTimer = new StopWatch(); StopWatch solrOptimizeTimer = new StopWatch(); long recCount = request.getRequestDocuments().size(); boolean isCommit = false; totalTimer.start(); try { ingestTimer.start(); createNodesTimer.start(); //session = RepositoryManager.getRepositoryManager().getSession(request.getUser(), request.getOperation()); List<RequestDocument> reqDocs = request.getRequestDocuments(); if (prevRequestDocs == null) { prevRequestDocs = new ArrayList<RequestDocument>(); } prevRequestDocs.addAll(request.getRequestDocuments()); logger.info("prevRequestDocs" + prevRequestDocs.size()); docUUIDs.addAll(documentIngester.ingestRequestDocumentsForBulk(reqDocs, session)); //docUUIDs.addAll(documentIngester.ingestRequestDocumentsForBulkUsingBTreeMgr(reqDocs, session)); //documentManager.store(reqDocs,session); createNodesTimer.stop(); try { ingestTimer.suspend(); indexTimer.start(); } catch (Exception e2) { logger.error(e2.getMessage(), e2); } bulkLoadStatistics.setCommitRecCount(bulkLoadStatistics.getCommitRecCount() + recCount); if (bulkLoadStatistics.getCommitRecCount() == commitSize || bulkLoadStatistics.isLastBatch()) { isCommit = true; } documentIndexer.indexDocumentsForBulk(reqDocs, isCommit); //documentManager.index(reqDocs,isCommit); try { indexTimer.suspend(); ingestTimer.resume(); } catch (Exception e2) { logger.error(e2.getMessage(), e2); } if (isCommit) { sessionSaveTimer.start(); logger.info("Bulk ingest: Repository commit started. Number of records being committed : " + bulkLoadStatistics.getCommitRecCount()); session.save(); bulkLoadStatistics.setCommitRecCount(0); prevRequestDocs = null; sessionSaveTimer.stop(); } try { ingestTimer.stop(); } catch (Exception e2) { logger.error(e2.getMessage(), e2); } // Documents processed can be different from records processed as in the case of Instance data. logger.debug("Documents processed:" + recCount); bulkLoadStatistics.setFileRecCount(bulkLoadStatistics.getFileRecCount() + recCount); logger.info( "Bulk ingest: Records processed in the current file :" + bulkLoadStatistics.getFileRecCount()); } catch (Exception e) { bulkLoadStatistics.setCommitRecCount(0); try { ingestTimer.resume(); } catch (Exception e2) { logger.error(e2.getMessage(), e2); } //documentIngester.rollbackDocStoreIngestedData(session, request.getRequestDocuments()); documentIngester.rollbackDocStoreIngestedData(session, prevRequestDocs); ingestTimer.stop(); try { indexTimer.resume(); } catch (Exception e2) { logger.error(e2.getMessage(), e2); } //documentIndexer.rollbackIndexedData(request.getRequestDocuments()); //prevRequestDocs = prevRequestDocs.subList(0, prevRequestDocs.size() - request.getRequestDocuments().size()); //logger.info("prevRequestDocs before remove INDEXES = " + prevRequestDocs.size()); documentIndexer.rollbackIndexedData(prevRequestDocs); prevRequestDocs = null; try { indexTimer.stop(); } catch (Exception e2) { logger.error(e2.getMessage(), e2); } logger.error("Document Ingest & Index Failed, Cause: " + e.getMessage(), e); try { totalTimer.stop(); } catch (Exception e2) { logger.error(e2.getMessage(), e2); } logger.debug("Time Consumptions...:\tcreatingNodes(" + docUUIDs.size() + "):" + createNodesTimer + "\tSessionSave(" + docUUIDs.size() + "):" + sessionSaveTimer + "\tIngest(" + docUUIDs.size() + "):" + ingestTimer + "\tIndexing(" + docUUIDs.size() + "):" + indexTimer + "\tTotal Time: " + totalTimer); docUUIDs.clear(); } finally { /*if (session != null) { try { RepositoryManager.getRepositoryManager().logout(session); } catch (OleException e) { } } */ } try { totalTimer.stop(); } catch (Exception exe) { logger.error(exe.getMessage(), exe); } logger.debug("Time Consumptions...:\tcreatingNodes(" + docUUIDs.size() + "):" + createNodesTimer + "\tSessionSave(" + docUUIDs.size() + "):" + sessionSaveTimer + "\tIngest(" + docUUIDs.size() + "):" + ingestTimer + "\tIndexing(" + docUUIDs.size() + "):" + indexTimer + "\tTotal Time: " + totalTimer); logger.debug("bulkIngestNIndex(" + request.getRequestDocuments().size() + ") END"); batchStatistics.setTimeToCreateNodesInJcr(createNodesTimer.getTime()); batchStatistics.setTimeToSaveJcrSession(sessionSaveTimer.getTime()); batchStatistics.setIngestingTime(ingestTimer.getTime()); batchStatistics.setIndexingTime(indexTimer.getTime()); batchStatistics.setIngestNIndexTotalTime(totalTimer.getTime()); updateProcessTimer(docUUIDs.size(), ingestTimer, indexTimer, totalTimer); solrOptimizeTimer.start(); optimizeSolr(docUUIDs.size()); solrOptimizeTimer.stop(); batchStatistics.setTimeToSolrOptimize(solrOptimizeTimer.getTime()); return docUUIDs; }
From source file:org.kuali.rice.kns.web.struts.form.pojo.PojoFormBase.java
/** * Populates the form with values from the current request. Uses instances of Formatter to convert strings to the Java types of * the properties to which they are bound. Values that can't be converted are cached in a map of unconverted values. Returns an * ActionErrors containing ActionMessage instances for each conversion error that occured, if any. *///from w ww. j ava2 s. c o m @Override public void populate(HttpServletRequest request) { StopWatch watch = null; if (LOG.isDebugEnabled()) { watch = new StopWatch(); watch.start(); LOG.debug(WATCH_NAME + ": started"); } unconvertedValues.clear(); unknownKeys = new ArrayList(); addRequiredNonEditableProperties(); Map params = request.getParameterMap(); String contentType = request.getContentType(); String method = request.getMethod(); if ("POST".equalsIgnoreCase(method) && contentType != null && contentType.startsWith("multipart/form-data")) { Map fileElements = (HashMap) request.getAttribute(KRADConstants.UPLOADED_FILE_REQUEST_ATTRIBUTE_KEY); Enumeration names = Collections.enumeration(fileElements.keySet()); while (names.hasMoreElements()) { String name = (String) names.nextElement(); params.put(name, fileElements.get(name)); } } postprocessRequestParameters(params); /** * Iterate through request parameters, if parameter matches a form variable, get the property type, formatter and convert, * if not add to the unknowKeys map. */ Comparator<String> nestedPathComparator = new Comparator<String>() { public int compare(String prop1, String prop2) { Integer i1 = new Integer(prop1.split("\\.").length); Integer i2 = new Integer(prop2.split("\\.").length); return (i1.compareTo(i2)); } }; List<String> pathKeyList = new ArrayList<String>(params.keySet()); Collections.sort(pathKeyList, nestedPathComparator); for (String keypath : pathKeyList) { if (shouldPropertyBePopulatedInForm(keypath, request)) { Object param = params.get(keypath); //LOG.debug("(keypath,paramType)=(" + keypath + "," + param.getClass().getName() + ")"); populateForProperty(keypath, param, params); } } this.registerIsNewForm(false); if (LOG.isDebugEnabled()) { watch.stop(); LOG.debug(WATCH_NAME + ": " + watch.toString()); } }
From source file:org.kuali.rice.krad.service.impl.DocumentServiceImpl.java
/** * Creates a new document by document type name. The principal name * passed in will be used as the document initiator. If the initiatorPrincipalNm * is null or blank, the current user will be used. * * @see org.kuali.rice.krad.service.DocumentService#getNewDocument(String, String) *//*from www . ja v a2 s . c o m*/ @Override public Document getNewDocument(String documentTypeName, String initiatorPrincipalNm) throws WorkflowException { // argument validation String watchName = "DocumentServiceImpl.getNewDocument"; StopWatch watch = new StopWatch(); watch.start(); if (LOG.isDebugEnabled()) { LOG.debug(watchName + ": started"); } if (StringUtils.isBlank(documentTypeName)) { throw new IllegalArgumentException("invalid (blank) documentTypeName"); } if (GlobalVariables.getUserSession() == null) { throw new IllegalStateException( "GlobalVariables must be populated with a valid UserSession before a new document can be created"); } // get the class for this docTypeName Class<? extends Document> documentClass = getDocumentClassByTypeName(documentTypeName); // get the initiator Person initiator = null; if (StringUtils.isBlank(initiatorPrincipalNm)) { initiator = GlobalVariables.getUserSession().getPerson(); } else { initiator = KimApiServiceLocator.getPersonService().getPersonByPrincipalName(initiatorPrincipalNm); if (initiator == null) { initiator = GlobalVariables.getUserSession().getPerson(); } } // get the authorization DocumentAuthorizer documentAuthorizer = getDocumentDictionaryService() .getDocumentAuthorizer(documentTypeName); DocumentPresentationController documentPresentationController = getDocumentDictionaryService() .getDocumentPresentationController(documentTypeName); // make sure this person is authorized to initiate if (LOG.isDebugEnabled()) { LOG.debug("calling canInitiate from getNewDocument(" + documentTypeName + "," + initiatorPrincipalNm + ")"); } if (!documentPresentationController.canInitiate(documentTypeName) || !documentAuthorizer.canInitiate(documentTypeName, initiator)) { throw new DocumentAuthorizationException(initiator.getPrincipalName(), "initiate", documentTypeName); } // initiate new workflow entry, get the workflow doc WorkflowDocument workflowDocument = getWorkflowDocumentService().createWorkflowDocument(documentTypeName, initiator); UserSessionUtils.addWorkflowDocument(GlobalVariables.getUserSession(), workflowDocument); // create a new document header object DocumentHeader documentHeader = new DocumentHeader(); documentHeader.setWorkflowDocument(workflowDocument); documentHeader.setDocumentNumber(workflowDocument.getDocumentId()); // build Document of specified type Document document = null; try { // all maintenance documents have same class if (MaintenanceDocumentBase.class.isAssignableFrom(documentClass)) { Class<?>[] defaultConstructor = new Class[] { String.class }; Constructor<? extends Document> cons = documentClass.getConstructor(defaultConstructor); if (cons == null) { throw new ConfigurationException( "Could not find constructor with document type name parameter needed for Maintenance Document Base class"); } document = cons.newInstance(documentTypeName); } else { // non-maintenance document document = documentClass.newInstance(); } } catch (IllegalAccessException e) { throw new RuntimeException("Error instantiating Document", e); } catch (InstantiationException e) { throw new RuntimeException("Error instantiating Document", e); } catch (SecurityException e) { throw new RuntimeException("Error instantiating Maintenance Document", e); } catch (NoSuchMethodException e) { throw new RuntimeException( "Error instantiating Maintenance Document: No constructor with String parameter found", e); } catch (IllegalArgumentException e) { throw new RuntimeException("Error instantiating Maintenance Document", e); } catch (InvocationTargetException e) { throw new RuntimeException("Error instantiating Maintenance Document", e); } document.setDocumentHeader(documentHeader); document.setDocumentNumber(documentHeader.getDocumentNumber()); watch.stop(); if (LOG.isDebugEnabled()) { LOG.debug(watchName + ": " + watch.toString()); } return document; }
From source file:org.kuali.rice.krad.workflow.service.impl.WorkflowDocumentServiceImpl.java
@Override public WorkflowDocument createWorkflowDocument(String documentTypeName, Person person) { String watchName = "createWorkflowDocument"; StopWatch watch = null;//from ww w .j ava 2 s.c om if (LOG.isDebugEnabled()) { watch = new StopWatch(); watch.start(); LOG.debug(watchName + ": started"); } if (StringUtils.isBlank(documentTypeName)) { throw new IllegalArgumentException("invalid (blank) documentTypeName"); } if (person == null) { throw new IllegalArgumentException("invalid (null) person"); } if (StringUtils.isBlank(person.getPrincipalName())) { throw new IllegalArgumentException("invalid (empty) PrincipalName"); } if (LOG.isDebugEnabled()) { LOG.debug("creating workflowDoc(" + documentTypeName + "," + person.getPrincipalName() + ")"); } WorkflowDocument document = WorkflowDocumentFactory.createDocument(person.getPrincipalId(), documentTypeName); if (watch != null) { watch.stop(); LOG.debug(watchName + ": " + watch.toString()); } return document; }
From source file:org.kuali.rice.test.ClearDatabaseLifecycle.java
protected void clearTables(final PlatformTransactionManager transactionManager, final DataSource dataSource) { Assert.assertNotNull("DataSource could not be located.", dataSource); try {//from w ww. j a v a2 s .co m StopWatch s = new StopWatch(); s.start(); new TransactionTemplate(transactionManager).execute(new TransactionCallback() { public Object doInTransaction(final TransactionStatus status) { verifyTestEnvironment(dataSource); return new JdbcTemplate(dataSource).execute(new StatementCallback() { public Object doInStatement(Statement statement) throws SQLException { String schemaName = statement.getConnection().getMetaData().getUserName().toUpperCase(); LOG.info("Clearing tables for schema " + schemaName); if (StringUtils.isBlank(schemaName)) { Assert.fail("Empty schema name given"); } final List<String> reEnableConstraints = new ArrayList<String>(); DatabaseMetaData metaData = statement.getConnection().getMetaData(); Map<String, List<String[]>> exportedKeys = indexExportedKeys(metaData, schemaName); final ResultSet resultSet = metaData.getTables(null, schemaName, null, new String[] { "TABLE" }); final StringBuilder logStatements = new StringBuilder(); while (resultSet.next()) { String tableName = resultSet.getString("TABLE_NAME"); if (shouldTableBeCleared(tableName)) { if (!isUsingDerby(metaData) && isUsingOracle(metaData)) { List<String[]> exportedKeyNames = exportedKeys.get(tableName); if (exportedKeyNames != null) { for (String[] exportedKeyName : exportedKeyNames) { final String fkName = exportedKeyName[0]; final String fkTableName = exportedKeyName[1]; final String disableConstraint = "ALTER TABLE " + fkTableName + " DISABLE CONSTRAINT " + fkName; logStatements.append("Disabling constraints using statement ->" + disableConstraint + "<-\n"); statement.addBatch(disableConstraint); reEnableConstraints.add("ALTER TABLE " + fkTableName + " ENABLE CONSTRAINT " + fkName); } } } else if (isUsingMySQL(metaData)) { statement.addBatch("SET FOREIGN_KEY_CHECKS = 0"); } String deleteStatement = "DELETE FROM " + tableName; logStatements.append( "Clearing contents using statement ->" + deleteStatement + "<-\n"); statement.addBatch(deleteStatement); } } for (final String constraint : reEnableConstraints) { logStatements .append("Enabling constraints using statement ->" + constraint + "<-\n"); statement.addBatch(constraint); } if (isUsingMySQL(metaData)) { statement.addBatch("SET FOREIGN_KEY_CHECKS = 1"); } LOG.info(logStatements); int[] results = statement.executeBatch(); for (int index = 0; index < results.length; index++) { if (results[index] == Statement.EXECUTE_FAILED) { Assert.fail("Execution of database clear statement failed."); } } resultSet.close(); LOG.info("Tables successfully cleared for schema " + schemaName); return null; } }); } }); s.stop(); LOG.info("Time to clear tables: " + DurationFormatUtils.formatDurationHMS(s.getTime())); } catch (Exception e) { LOG.error(e); throw new RuntimeException(e); } }