List of usage examples for org.apache.commons.lang ArrayUtils getLength
public static int getLength(Object array)
Returns the length of the specified array.
From source file:org.apache.jetspeed.portlets.custom.CustomConfigModePortlet.java
private boolean hasEqualItems(String[] array, List<String> list) { if (ArrayUtils.isEmpty(array)) { return (list == null || list.isEmpty()); } else if (list == null) { return ArrayUtils.isEmpty(array); } else if (ArrayUtils.getLength(array) == list.size()) { for (String item : array) { if (!list.contains(item)) { return false; }/*w ww . ja v a 2 s . c om*/ } return true; } return false; }
From source file:org.geoserver.security.iride.entity.identity.IrideIdentityValidator.java
/** * Check that the given tokens length equals the expected length, which should be {@link IrideIdentityToken#values()} length * (i.e.: the number of tokens defined in {@link IrideIdentityToken} enum).<br /> * {@code null} tokens array is considered of length 0.<p> * If not so, an {@link IllegalArgumentException} is thrown, detailing the given tokens length vs the expected one. * * @param tokens the given tokens array to check for valid length *///from w ww . jav a 2 s .c o m private void checkTokens(String[] tokens) { final int expectedTokenLength = IrideIdentityToken.values().length; if (ArrayUtils.getLength(tokens) != expectedTokenLength) { throw new IllegalArgumentException("Tokens array length is " + ArrayUtils.getLength(tokens) + " instead of the expected mandatory length of " + expectedTokenLength + " elements."); } }
From source file:org.geoserver.security.iride.entity.IrideRole.java
/** * Utility method to parse an <code>IRIDE</code> Role mnemonic string representation.<br /> * It accepts a mnemonic string representation, expressed with the following format: <code>"role code{@link #SEPARATOR}domain code"</code>. * * @param mnemonic <code>IRIDE</code> Role mnemonic string representation * @return an <code>IRIDE</code> Role entity object * @throws IllegalArgumentException if the given mnemonic string representation is not in the expected format *//* ww w . java2 s .c om*/ public static IrideRole parseRole(String mnemonic) { final String[] tokens = StringUtils.splitByWholeSeparator(mnemonic, SEPARATOR); if (ArrayUtils.getLength(tokens) != 2) { throw new IllegalArgumentException(mnemonic); } return new IrideRole(tokens[0], tokens[1]); }
From source file:org.gravidence.gravifon.validation.AbstractValidator.java
/** * Validates that required <code>Authorization</code> header is actually presented and technically valid. * /*from w w w . j a va 2 s .com*/ * @param headers request headers * @throws GravifonException in case required <code>Authorization</code> header is not found or invalid * * @see AuthUtils#extractCredentials(javax.ws.rs.core.MultivaluedMap) */ protected void checkRequiredAuthorizationHeader(MultivaluedMap<String, String> headers) throws GravifonException { String[] credentials = RequestUtils.extractCredentials(headers); if (ArrayUtils.getLength(credentials) != 2 || StringUtils.isBlank(credentials[0]) || StringUtils.isBlank(credentials[1])) { throw new GravifonException(GravifonError.NOT_AUTHORIZED, "Authorization details are not presented or invalid.", null, false); } }
From source file:org.openmrs.module.clinicalsummary.task.DateCreatedEvaluatorProcessor.java
public void processSummary() { // remove the concept cache every night CacheUtils.clearConceptCache();/* www. ja v a 2 s . co m*/ CacheUtils.clearEncounterTypeCache(); // location is clustered, clusters are separated by comma String clusterNames = Context.getAdministrationService() .getGlobalProperty(TaskParameters.LOCATION_GROUP_LIST); if (clusterNames != null) { String[] clusterName = StringUtils.split(clusterNames, TaskParameters.CLUSTER_SEPARATOR); GlobalProperty globalProperty = Context.getAdministrationService() .getGlobalPropertyObject(TaskParameters.PROCESSOR_COUNTER); // start with the first cluster (offset = 0) when the counter is not a number Integer clusterOffset = NumberUtils.toInt(globalProperty.getPropertyValue(), 0); if (clusterOffset >= 0 && clusterOffset < ArrayUtils.getLength(clusterName)) { GlobalProperty initProperty = Context.getAdministrationService() .getGlobalPropertyObject(TaskParameters.PROCESSOR_INITIALIZED); String currentCluster = clusterName[clusterOffset]; // check whether all cluster have been initialized or not Boolean initialized = BooleanUtils.toBoolean(initProperty.getPropertyValue()); Cohort cohort; String[] locationIds = StringUtils.split(currentCluster); for (int i = 0; i < ArrayUtils.getLength(locationIds); i++) { log.info("Processing location with id: " + locationIds[i]); // default return to -1 because no such location with id -1 Location location = Context.getLocationService() .getLocation(NumberUtils.toInt(locationIds[i], -1)); if (!initialized) { cohort = Context.getService(CoreService.class).getDateCreatedCohort(location, null, null); } else { // regenerate when there's new obs Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, -(clusterName.length + 1)); Date date = calendar.getTime(); cohort = Context.getService(CoreService.class).getDateCreatedCohort(location, date, new Date()); } evaluate(cohort); } clusterOffset++; if (clusterOffset == ArrayUtils.getLength(clusterName)) { clusterOffset = 0; initialized = true; } globalProperty.setPropertyValue(String.valueOf(clusterOffset)); Context.getAdministrationService().saveGlobalProperty(globalProperty); initProperty.setPropertyValue(String.valueOf(initialized)); Context.getAdministrationService().saveGlobalProperty(initProperty); } } }
From source file:org.openmrs.module.clinicalsummary.task.DateCreatedReminderProcessor.java
public void processReminder() { // location is clustered, clusters are separated by comma String clusterNames = Context.getAdministrationService() .getGlobalProperty(TaskParameters.LOCATION_GROUP_LIST); if (clusterNames != null) { String[] clusterName = StringUtils.split(clusterNames, TaskParameters.CLUSTER_SEPARATOR); String processorCounter = Context.getAdministrationService() .getGlobalProperty(TaskParameters.PROCESSOR_COUNTER); // start with the first cluster (offset = 0) when the counter is not a number Integer clusterOffset = NumberUtils.toInt(processorCounter, 0); if (clusterOffset >= 0 && clusterOffset < ArrayUtils.getLength(clusterName)) { String initProperty = Context.getAdministrationService() .getGlobalProperty(TaskParameters.PROCESSOR_INITIALIZED); String currentCluster = clusterName[clusterOffset]; // check whether all cluster have been initialized or not Boolean initialized = BooleanUtils.toBoolean(initProperty); Cohort cohort;/* w w w . jav a 2s . com*/ String[] locationIds = StringUtils.split(currentCluster); for (int i = 0; i < ArrayUtils.getLength(locationIds); i++) { log.info("Processing location with id: " + locationIds[i]); // default return to -1 because no such location with id -1 Location location = Context.getLocationService() .getLocation(NumberUtils.toInt(locationIds[i], -1)); if (!initialized) { cohort = Context.getService(CoreService.class).getDateCreatedCohort(location, null, null); } else { // regenerate when there's new obs Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, -(clusterName.length + 1)); Date date = calendar.getTime(); cohort = Context.getService(CoreService.class).getDateCreatedCohort(location, date, new Date()); } evaluate(cohort); } } } }
From source file:org.openmrs.module.clinicalsummary.task.OrderedObsProcessor.java
public void processObservations() { // location is clustered, clusters are separated by comma String clusterNames = Context.getAdministrationService() .getGlobalProperty(TaskParameters.LOCATION_GROUP_LIST); if (clusterNames != null) { String[] clusterName = StringUtils.split(clusterNames, TaskParameters.CLUSTER_SEPARATOR); String processorCounter = Context.getAdministrationService() .getGlobalProperty(TaskParameters.PROCESSOR_COUNTER); // start with the first cluster (offset = 0) when the counter is not a number Integer clusterOffset = NumberUtils.toInt(processorCounter, 0); if (clusterOffset >= 0 && clusterOffset < ArrayUtils.getLength(clusterName)) { String initProperty = Context.getAdministrationService() .getGlobalProperty(TaskParameters.PROCESSOR_INITIALIZED); String currentCluster = clusterName[clusterOffset]; // check whether all cluster have been initialized or not Boolean initialized = BooleanUtils.toBoolean(initProperty); Cohort cohort;/*from w w w.ja va 2s . com*/ String[] locationIds = StringUtils.split(currentCluster); for (int i = 0; i < ArrayUtils.getLength(locationIds); i++) { log.info("Processing location with id: " + locationIds[i]); // default return to -1 because no such location with id -1 Location location = Context.getLocationService() .getLocation(NumberUtils.toInt(locationIds[i], -1)); if (!initialized) { cohort = Context.getService(CoreService.class).getDateCreatedCohort(location, null, null); } else { // regenerate when there's new obs Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, -(clusterName.length + 1)); Date date = calendar.getTime(); cohort = Context.getService(CoreService.class).getDateCreatedCohort(location, date, new Date()); } // this processing is similar with the flow-sheet processing but we also include the duplicate processing here CoreService coreService = Context.getService(CoreService.class); for (Map<String, List<String>> parameter : parameters) { // process each parameter List<String> conceptNames = parameter.get(EvaluableConstants.OBS_CONCEPT); List<String> valueCodedNames = parameter.get(EvaluableConstants.OBS_VALUE_CODED); if (CollectionUtils.isNotEmpty(conceptNames) && CollectionUtils.isNotEmpty(valueCodedNames)) { // prepare the concept restriction Collection<OpenmrsObject> concepts = new ArrayList<OpenmrsObject>(); for (String conceptName : conceptNames) { Concept concept = CacheUtils.getConcept(conceptName); if (concept != null) concepts.add(concept); } // test ordered concept Concept testOrderedConcept = CacheUtils.getConcept(TaskParameters.TESTS_ORDERED); Collection<OpenmrsObject> testedConcepts = new ArrayList<OpenmrsObject>(); testedConcepts.add(testOrderedConcept); // prepare the value coded restriction Collection<OpenmrsObject> valueCodeds = new ArrayList<OpenmrsObject>(); for (String valueCodedName : valueCodedNames) { Concept concept = CacheUtils.getConcept(valueCodedName); if (concept != null) valueCodeds.add(concept); } Map<String, Collection<OpenmrsObject>> restrictions = new HashMap<String, Collection<OpenmrsObject>>(); for (Integer patientId : cohort.getMemberIds()) { // search for the results restrictions.put(EvaluableConstants.OBS_CONCEPT, concepts); List<Obs> testResultObservations = coreService.getPatientObservations(patientId, restrictions, new FetchRestriction()); // remove and then save the duplicates testResultObservations = stripDuplicate(testResultObservations); // search for the tests restrictions.put(EvaluableConstants.OBS_CONCEPT, testedConcepts); restrictions.put(EvaluableConstants.OBS_VALUE_CODED, valueCodeds); List<Obs> testOrderedObservations = coreService.getPatientObservations(patientId, restrictions, new FetchRestriction()); // remove and then save the duplicates testOrderedObservations = stripDuplicate(testOrderedObservations); // try to pair the obs and then save the un-pair-able obs pair(testOrderedObservations, testResultObservations); } } } } } } }
From source file:org.openmrs.module.clinicalsummary.task.ReturnDateEvaluatorProcessor.java
public void processSummary() { // remove the concept cache every night CacheUtils.clearConceptCache();//from w ww .j a v a 2 s .c o m CacheUtils.clearEncounterTypeCache(); // location is clustered, clusters are separated by comma String clusterNames = Context.getAdministrationService() .getGlobalProperty(TaskParameters.LOCATION_GROUP_LIST); if (clusterNames != null) { String[] clusterName = StringUtils.split(clusterNames, TaskParameters.CLUSTER_SEPARATOR); String processorCounter = Context.getAdministrationService() .getGlobalProperty(TaskParameters.PROCESSOR_COUNTER); // start with the first cluster (offset = 0) when the counter is not a number Integer clusterOffset = NumberUtils.toInt(processorCounter, 0); if (clusterOffset >= 0 && clusterOffset < ArrayUtils.getLength(clusterName)) { String initProperty = Context.getAdministrationService() .getGlobalProperty(TaskParameters.PROCESSOR_INITIALIZED); String currentCluster = clusterName[clusterOffset]; // check whether all cluster have been initialized or not Boolean initialized = BooleanUtils.toBoolean(initProperty); Cohort cohort; String[] locationIds = StringUtils.split(currentCluster); for (int i = 0; i < ArrayUtils.getLength(locationIds); i++) { log.info("Processing location with id: " + locationIds[i]); // default return to -1 because no such location with id -1 Location location = Context.getLocationService() .getLocation(NumberUtils.toInt(locationIds[i], -1)); if (!initialized) { cohort = Context.getService(CoreService.class).getReturnDateCohort(location, null, null); } else { // regenerate when there's new obs Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, clusterName.length * 5); Date date = calendar.getTime(); cohort = Context.getService(CoreService.class).getReturnDateCohort(location, new Date(), date); } evaluate(cohort); } } } }
From source file:org.openmrs.module.clinicalsummary.task.ReturnDateReminderProcessor.java
public void processReminder() { // location is clustered, clusters are separated by comma String clusterNames = Context.getAdministrationService() .getGlobalProperty(TaskParameters.LOCATION_GROUP_LIST); if (clusterNames != null) { String[] clusterName = StringUtils.split(clusterNames, TaskParameters.CLUSTER_SEPARATOR); String processorCounter = Context.getAdministrationService() .getGlobalProperty(TaskParameters.PROCESSOR_COUNTER); // start with the first cluster (offset = 0) when the counter is not a number Integer clusterOffset = NumberUtils.toInt(processorCounter, 0); if (clusterOffset >= 0 && clusterOffset < ArrayUtils.getLength(clusterName)) { String initProperty = Context.getAdministrationService() .getGlobalProperty(TaskParameters.PROCESSOR_INITIALIZED); String currentCluster = clusterName[clusterOffset]; // check whether all cluster have been initialized or not Boolean initialized = BooleanUtils.toBoolean(initProperty); Cohort cohort;//w w w. j a va2 s . co m String[] locationIds = StringUtils.split(currentCluster); for (int i = 0; i < ArrayUtils.getLength(locationIds); i++) { log.info("Processing location with id: " + locationIds[i]); // default return to -1 because no such location with id -1 Location location = Context.getLocationService() .getLocation(NumberUtils.toInt(locationIds[i], -1)); if (!initialized) { cohort = Context.getService(CoreService.class).getDateCreatedCohort(location, null, null); } else { // regenerate when there's new obs Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, clusterName.length * 2); Date date = calendar.getTime(); cohort = Context.getService(CoreService.class).getReturnDateCohort(location, new Date(), date); } evaluate(cohort); } } } }
From source file:org.pdfsam.console.business.pdf.handlers.ConcatCmdExecutor.java
public void execute(AbstractParsedCommand parsedCommand) throws ConsoleException { if ((parsedCommand != null) && (parsedCommand instanceof ConcatParsedCommand)) { ConcatParsedCommand inputCommand = (ConcatParsedCommand) parsedCommand; setPercentageOfWorkDone(0);/*from ww w . j ava 2s . c om*/ // xml or csv parsing PdfFile[] fileList = inputCommand.getInputFileList(); if (fileList == null || !(fileList.length > 0)) { File listFile = inputCommand.getInputCvsOrXmlFile(); if (listFile != null && listFile.exists()) { fileList = parseListFile(listFile); } else if (inputCommand.getInputDirectory() != null) { fileList = getPdfFiles(inputCommand.getInputDirectory()); } } // no input file found if (fileList == null || !(fileList.length > 0)) { throw new ConcatException(ConcatException.CMD_NO_INPUT_FILE); } // init int pageOffset = 0; ArrayList master = new ArrayList(); Document pdfDocument = null; int totalProcessedPages = 0; try { String[] pageSelections = inputCommand.getPageSelections(); File tmpFile = FileUtility.generateTmpFile(inputCommand.getOutputFile()); int length = ArrayUtils.getLength(pageSelections); for (int i = 0; i < fileList.length; i++) { String currentPageSelection = ValidationUtility.ALL_STRING; int currentDocumentPages = 0; if (!ArrayUtils.isEmpty(pageSelections) && i <= length) { currentPageSelection = pageSelections[i].toLowerCase(); } String[] selectionGroups = StringUtils.split(currentPageSelection, ","); pdfReader = PdfUtility.readerFor(fileList[i]); pdfReader.removeUnusedObjects(); pdfReader.consolidateNamedDestinations(); int pdfNumberOfPages = pdfReader.getNumberOfPages(); BookmarksProcessor bookmarkProcessor = new BookmarksProcessor( SimpleBookmark.getBookmark(pdfReader), pdfNumberOfPages); List boundsList = getBounds(pdfNumberOfPages, selectionGroups); ValidationUtility.assertNotIntersectedBoundsList(boundsList); String boundsString = ""; for (Iterator iter = boundsList.iterator(); iter.hasNext();) { Bounds bounds = (Bounds) iter.next(); boundsString += (boundsString.length() > 0) ? "," + bounds.toString() : bounds.toString(); // bookmarks List bookmarks = bookmarkProcessor.processBookmarks(bounds.getStart(), bounds.getEnd(), pageOffset); if (bookmarks != null) { master.addAll(bookmarks); } int relativeOffset = (bounds.getEnd() - bounds.getStart()) + 1; currentDocumentPages += relativeOffset; pageOffset += relativeOffset; } // add pages LOG.info(fileList[i].getFile().getAbsolutePath() + ": " + currentDocumentPages + " pages to be added."); if (pdfWriter == null) { if (inputCommand.isCopyFields()) { // step 1: we create a writer pdfWriter = new PdfCopyFieldsConcatenator(new FileOutputStream(tmpFile), inputCommand.isCompress()); LOG.debug("PdfCopyFieldsConcatenator created."); // output document version if (inputCommand.getOutputPdfVersion() != null) { pdfWriter.setPdfVersion(inputCommand.getOutputPdfVersion().charValue()); } HashMap meta = pdfReader.getInfo(); meta.put("Creator", ConsoleServicesFacade.CREATOR); } else { // step 1: creation of a document-object pdfDocument = new Document(pdfReader.getPageSizeWithRotation(1)); // step 2: we create a writer that listens to the document pdfWriter = new PdfSimpleConcatenator(pdfDocument, new FileOutputStream(tmpFile), inputCommand.isCompress()); LOG.debug("PdfSimpleConcatenator created."); // output document version if (inputCommand.getOutputPdfVersion() != null) { pdfWriter.setPdfVersion(inputCommand.getOutputPdfVersion().charValue()); } // step 3: we open the document pdfDocument.addCreator(ConsoleServicesFacade.CREATOR); pdfDocument.open(); } } // step 4: we add content pdfReader.selectPages(boundsString); pdfWriter.addDocument(pdfReader); // fix 03/07 // pdfReader = null; pdfReader.close(); pdfWriter.freeReader(pdfReader); totalProcessedPages += currentDocumentPages; LOG.info(currentDocumentPages + " pages processed correctly."); setPercentageOfWorkDone(((i + 1) * WorkDoneDataModel.MAX_PERGENTAGE) / fileList.length); } if (master.size() > 0) { pdfWriter.setOutlines(master); } LOG.info("Total processed pages: " + totalProcessedPages + "."); if (pdfDocument != null) { pdfDocument.close(); } // rotations if (inputCommand.getRotations() != null && inputCommand.getRotations().length > 0) { LOG.info("Applying pages rotation."); File rotatedTmpFile = applyRotations(tmpFile, inputCommand); FileUtility.deleteFile(tmpFile); FileUtility.renameTemporaryFile(rotatedTmpFile, inputCommand.getOutputFile(), inputCommand.isOverwrite()); } else { FileUtility.renameTemporaryFile(tmpFile, inputCommand.getOutputFile(), inputCommand.isOverwrite()); } LOG.debug("File " + inputCommand.getOutputFile().getCanonicalPath() + " created."); } catch (ConsoleException consoleException) { throw consoleException; } catch (Exception e) { throw new ConcatException(e); } finally { setWorkCompleted(); } } else { throw new ConsoleException(ConsoleException.ERR_BAD_COMMAND); } }