List of usage examples for org.apache.shiro SecurityUtils getSubject
public static Subject getSubject()
From source file:au.org.theark.lims.web.component.subjectlims.subject.SubjectContainerPanel.java
License:Open Source License
protected WebMarkupContainer initialiseSearchResults() { searchResultListPanel = new SearchResultListPanel("searchResults", arkContextMarkup, containerForm, arkCrudContainerVO, studyNameMarkup, studyLogoMarkup, treeModel); subjectProvider = new ArkDataProvider2<LimsVO, LinkSubjectStudy>() { private static final long serialVersionUID = 1L; public int size() { // Restrict search if Study selected in Search form List<Study> studyList = new ArrayList<Study>(0); Long sessionStudyId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); if (sessionStudyId != null) { Study study = iArkCommonService.getStudy(sessionStudyId); if (criteriaModel.getObject().getStudy() != null && criteriaModel.getObject().getStudy().getId() != null) { // Restrict search to study in drop-down studyList.add(criteriaModel.getObject().getStudy()); } else { if (study.getParentStudy() != null && study.getParentStudy().equals(study)) { studyList = criteriaModel.getObject().getStudyList(); if (studyList.isEmpty()) { studyList = containerForm.getStudyListForUser(); }/* w ww . j a v a 2 s .c o m*/ } else { studyList.add(study); } } } return iLimsSubjectService.getSubjectCount(criteriaModel.getObject(), studyList); } public Iterator<LinkSubjectStudy> iterator(int first, int count) { List<LinkSubjectStudy> listSubjects = new ArrayList<LinkSubjectStudy>(0); // Restrict search if Study selected in Search form List<Study> studyList = new ArrayList<Study>(0); Long sessionStudyId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); if (sessionStudyId != null) { Study study = iArkCommonService.getStudy(sessionStudyId); if (criteriaModel.getObject().getStudy() != null && criteriaModel.getObject().getStudy().getId() != null) { // Restrict search to study in drop-down studyList.add(criteriaModel.getObject().getStudy()); } else { if (study.getParentStudy() != null && study.getParentStudy().equals(study)) { studyList = criteriaModel.getObject().getStudyList(); if (studyList.isEmpty()) { studyList = containerForm.getStudyListForUser(); } } else { studyList.add(study); } } } listSubjects = iLimsSubjectService.searchPageableSubjects(criteriaModel.getObject(), studyList, first, count); return listSubjects.iterator(); } }; subjectProvider.setCriteriaModel(this.cpModel); dataView = searchResultListPanel.buildDataView(subjectProvider); dataView.setItemsPerPage(iArkCommonService.getRowsPerPage()); AjaxPagingNavigator pageNavigator = new AjaxPagingNavigator("navigator", dataView) { private static final long serialVersionUID = 1L; @Override protected void onAjaxEvent(AjaxRequestTarget target) { target.add(arkCrudContainerVO.getSearchResultPanelContainer()); } }; searchResultListPanel.add(pageNavigator); searchResultListPanel.add(dataView); arkCrudContainerVO.getSearchResultPanelContainer().add(searchResultListPanel); return arkCrudContainerVO.getSearchResultPanelContainer(); }
From source file:au.org.theark.phenotypic.job.PhenoDataUploadExecutor.java
License:Open Source License
public void run() throws Exception { SchedulerFactory sf = new StdSchedulerFactory(); Scheduler sched = sf.getScheduler(); Log.warn("executor " + uidsToUpload.size()); JobDetail phenoDataUploadJob = newJob(PhenoDataUploadJob.class).withIdentity("PhenoDataUploadJob", "group2") .build();// www.j av a2 s . c o m // pass initialization parameters into the job phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.IARKCOMMONSERVICE, iArkCommonService); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.IPHENOSERVICE, iPhenoService); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.UPLOADID, uploadId); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.STUDY_ID, studyId); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.REPORT, report); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.FILE_FORMAT, fileFormat); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.INPUT_STREAM, inputStream); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.DELIMITER, delimiter); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.SIZE, size); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.LIST_OF_UIDS_TO_UPDATE, uidsToUpload); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.PHENO_COLLECTION, phenoCollection); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.PHENO_FIELD_GROUP, phenoDataSetGroup); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.OVERWRITE_EXISTING, overwriteExisting); phenoDataUploadJob.getJobDataMap().put(PhenoDataUploadJob.USERNAME, SecurityUtils.getSubject().getPrincipal().toString()); Date startTime = nextGivenSecondDate(null, 1); SimpleTrigger trigger1 = newTrigger().withIdentity("PhenoDataUploadJobTrigger", "group1").startAt(startTime) .withSchedule(simpleSchedule()).build(); sched.scheduleJob(phenoDataUploadJob, trigger1); // log.warn(studyUploadJob.getKey() + " will run at: " + scheduleTime1 + " and repeat: " + trigger1.getRepeatCount() + " times, every " + trigger1.getRepeatInterval() / 1000 + " seconds"); // All of the jobs have been added to the scheduler, but none of the jobs will run until the scheduler has been started sched.start(); //sched.triggerJob(phenoDataUploadJob.getKey()); }
From source file:au.org.theark.phenotypic.model.dao.PhenotypicDao.java
License:Open Source License
public void createAuditHistory(AuditHistory auditHistory, String userId, Study study) { Date date = new Date(System.currentTimeMillis()); if (userId == null) {// if not forcing a userID manually, get // currentuser Subject currentUser = SecurityUtils.getSubject(); auditHistory.setArkUserId((String) currentUser.getPrincipal()); } else {/*from w w w .j a va 2 s. com*/ auditHistory.setArkUserId(userId); } if (study == null) { Long sessionStudyId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); if (sessionStudyId != null && auditHistory.getStudyStatus() == null) { auditHistory.setStudyStatus(getStudy(sessionStudyId).getStudyStatus()); } else { if (auditHistory.getEntityType().equalsIgnoreCase(au.org.theark.core.Constants.ENTITY_TYPE_STUDY)) { Study studyFromDB = getStudy(auditHistory.getEntityId()); if (studyFromDB != null) { auditHistory.setStudyStatus(studyFromDB.getStudyStatus()); } } } } else { auditHistory.setStudyStatus(study.getStudyStatus()); } auditHistory.setDateTime(date); getSession().save(auditHistory); }
From source file:au.org.theark.phenotypic.util.PhenoDataSetFieldImporter.java
License:Open Source License
/** * PhenotypicImport constructor/*from w ww . j av a 2 s .c o m*/ * * @param studyId * study identifier in context * @param arkFunction * the function that this CustomField import should attach to * @param iArkCommonService * the common service for dao * @param fileFormat * format of the file uploaded * @param delimiterChar * delimiter of the file data (comma, tab etc) */ public PhenoDataSetFieldImporter(Study study, ArkFunction arkFunction, IArkCommonService<Void> iArkCommonService, IPhenotypicService iPhenotypicService, String fileFormat, char delimiterChar) { this.study = study; this.iArkCommonService = iArkCommonService; this.iPhenotypicService = iPhenotypicService; this.fileFormat = fileFormat; this.phenotypicDelimChr = delimiterChar; this.arkFunction = arkFunction; Long sessionModuleId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.ARK_MODULE_KEY); this.arkModule = iArkCommonService.getArkModuleById(sessionModuleId); }
From source file:au.org.theark.phenotypic.util.PhenoDataSetFieldImportValidator.java
License:Open Source License
/** * CustomFieldImportValidator constructor * @param iArkCommonService/* w w w . j a v a 2s. c o m*/ * @param uploadVo */ public PhenoDataSetFieldImportValidator(IArkCommonService<Void> iArkCommonService, IPhenotypicService iPhenotypicService, UploadVO uploadVo) { this.iArkCommonService = iArkCommonService; this.iPhenotypicService = iPhenotypicService; this.arkFunction = uploadVo.getUpload().getArkFunction(); // Set study in context Long studyId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); if (studyId != null) { Study study = iArkCommonService.getStudy(studyId); this.study = study; } this.fileValidationMessages = new ArrayList<String>(); this.dataValidationMessages = new ArrayList<String>(); String filename = uploadVo.getFileUpload().getClientFileName(); this.fileFormat = filename.substring(filename.lastIndexOf('.') + 1).toUpperCase(); this.delimChr = uploadVo.getUpload().getDelimiterType().getDelimiterCharacter(); }
From source file:au.org.theark.phenotypic.util.PhenoDataUploadValidator.java
License:Open Source License
public PhenoDataUploadValidator() { super();//from ww w . j av a 2s. c om Subject currentUser = SecurityUtils.getSubject(); studyId = (Long) currentUser.getSession().getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); this.study = iArkCommonService.getStudy(studyId); this.existantSubjectUIDRows = new HashSet<Integer>(); this.nonExistantUIDs = new HashSet<Integer>(); this.errorCells = new HashSet<ArkGridCell>(); this.warningRows = new HashSet<Integer>(); this.insertCells = new HashSet<ArkGridCell>(); simpleDateFormat.setLenient(false); }
From source file:au.org.theark.phenotypic.util.PhenoDataUploadValidator.java
License:Open Source License
@SuppressWarnings("unchecked") public PhenoDataUploadValidator(IArkCommonService iArkCommonService, IPhenotypicService iPhenotypicService) { super();/*from w w w.j a v a 2 s.co m*/ this.iArkCommonService = iArkCommonService; this.iPhenotypicService = iPhenotypicService; Subject currentUser = SecurityUtils.getSubject(); studyId = (Long) currentUser.getSession().getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); this.study = iArkCommonService.getStudy(studyId); this.existantSubjectUIDRows = new HashSet<Integer>(); this.nonExistantUIDs = new HashSet<Integer>(); this.errorCells = new HashSet<ArkGridCell>(); this.warningRows = new HashSet<Integer>(); this.insertCells = new HashSet<ArkGridCell>(); simpleDateFormat.setLenient(false); }
From source file:au.org.theark.phenotypic.web.component.phenodatacategory.form.SearchForm.java
License:Open Source License
/** * @param id//w ww. j a v a2s. com */ public SearchForm(String id, CompoundPropertyModel<PhenoDataSetCategoryVO> cpModel, FeedbackPanel feedBackPanel, ArkCrudContainerVO arkCrudContainerVO) { super(id, cpModel, feedBackPanel, arkCrudContainerVO); this.cpModel = cpModel; this.feedbackPanel = feedBackPanel; this.arkCrudContainerVO = arkCrudContainerVO; initialiseFieldForm(); Long sessionStudyId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); disableSearchForm(sessionStudyId, "There is no study in context. Please select a study"); }
From source file:au.org.theark.phenotypic.web.component.phenodatacategory.form.SearchForm.java
License:Open Source License
/** * Get custom field category collection from model. * @return//from www .java2 s.c o m */ /*private Collection<PhenoDataSetCategory> getParentCategoryCollectionFromModel(){ PhenoDataSetCategory phenoDataSetCategory=cpModel.getObject().getPhenoDataSetCategory(); Study study=phenoDataSetCategory.getStudy(); ArkFunction arkFunction=phenoDataSetCategory.getArkFunction(); try { //phenoDataSetCategoryCollection = iPhenotypicService.getPhenoParentCategoryList(study, arkFunction); } catch (ArkSystemException e) { // TODO Auto-generated catch block e.printStackTrace(); } return phenoDataSetCategoryCollection; }*/ @Override protected void onSearch(AjaxRequestTarget target) { target.add(feedbackPanel); final Long sessionStudyId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); // Get a list of all Fields for the Study in context Study study = iArkCommonService.getStudy(sessionStudyId); getModelObject().getPhenoDataSetCategory().setStudy(study); long count = iPhenotypicService.getPhenoDataSetCategoryCount(getModelObject().getPhenoDataSetCategory()); if (count <= 0L) { this.info("No records match the specified criteria."); target.add(feedbackPanel); } arkCrudContainerVO.getSearchResultPanelContainer().setVisible(true);// Make the WebMarkupContainer that houses the search results visible target.add(arkCrudContainerVO.getSearchResultPanelContainer()); }
From source file:au.org.theark.phenotypic.web.component.phenodatacategory.PhenoDataCategoryContainerPanel.java
License:Open Source License
protected void prerenderContextCheck() { // Get the Study and Module out of context Long sessionStudyId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.STUDY_CONTEXT_ID); Long sessionModuleId = (Long) SecurityUtils.getSubject().getSession() .getAttribute(au.org.theark.core.Constants.ARK_MODULE_KEY); if ((sessionStudyId != null) && (sessionModuleId != null)) { Study study = null;/*ww w. jav a2s. c o m*/ study = iArkCommonService.getStudy(sessionStudyId); ArkModule arkModule = iArkCommonService.getArkModuleById(sessionModuleId); if (study != null && arkModule != null) { cpModel.getObject().getPhenoDataSetCategory().setStudy(study); // TODO: Maybe check that the primary function supplied is of the same module? } } }