List of usage examples for org.apache.commons.collections Transformer Transformer
Transformer
From source file:org.dspace.discovery.SolrServiceImpl.java
@Override public List<Item> getRelatedItems(Context context, Item item, DiscoveryMoreLikeThisConfiguration mltConfig) { List<Item> results = new ArrayList<Item>(); try {// w w w. j a va 2 s . c om SolrQuery solrQuery = new SolrQuery(); //Set the query to handle since this is unique solrQuery.setQuery("handle: " + item.getHandle()); //Add the more like this parameters ! solrQuery.setParam(MoreLikeThisParams.MLT, true); //Add a comma separated list of the similar fields @SuppressWarnings("unchecked") java.util.Collection<String> similarityMetadataFields = CollectionUtils .collect(mltConfig.getSimilarityMetadataFields(), new Transformer() { @Override public Object transform(Object input) { //Add the mlt appendix ! return input + "_mlt"; } }); solrQuery.setParam(MoreLikeThisParams.SIMILARITY_FIELDS, StringUtils.join(similarityMetadataFields, ',')); solrQuery.setParam(MoreLikeThisParams.MIN_TERM_FREQ, String.valueOf(mltConfig.getMinTermFrequency())); solrQuery.setParam(MoreLikeThisParams.DOC_COUNT, String.valueOf(mltConfig.getMax())); solrQuery.setParam(MoreLikeThisParams.MIN_WORD_LEN, String.valueOf(mltConfig.getMinWordLength())); QueryResponse rsp = getSolr().query(solrQuery); NamedList mltResults = (NamedList) rsp.getResponse().get("moreLikeThis"); if (mltResults != null && mltResults.get(item.getType() + "-" + item.getID()) != null) { SolrDocumentList relatedDocs = (SolrDocumentList) mltResults .get(item.getType() + "-" + item.getID()); for (Object relatedDoc : relatedDocs) { SolrDocument relatedDocument = (SolrDocument) relatedDoc; DSpaceObject relatedItem = findDSpaceObject(context, relatedDocument); if (relatedItem.getType() == Constants.ITEM) { results.add((Item) relatedItem); } } } } catch (Exception e) { log.error(LogManager.getHeader(context, "Error while retrieving related items", "Handle: " + item.getHandle()), e); } return results; }
From source file:org.eclipse.wb.internal.layout.group.gef.GroupLayoutEditPolicy2.java
@Override protected Command getAddCommand(final ChangeBoundsRequest request) { return new EditCommand(getJavaInfo()) { @Override//from ww w .j a v a 2s.com protected void executeEdit() throws Exception { List<EditPart> editParts = request.getEditParts(); @SuppressWarnings("unchecked") List<AbstractComponentInfo> models = (List<AbstractComponentInfo>) CollectionUtils .collect(editParts, new Transformer() { public Object transform(Object input) { return ((EditPart) input).getModel(); } }); m_layout.command_add(models); } }; }
From source file:org.exoplatform.services.jcr.impl.core.query.lucene.SearchIndex.java
/** * {@inheritDoc}/*from w w w . j a v a 2s . c o m*/ */ public ChangesHolder getChanges(Iterator<String> remove, Iterator<NodeData> add) { final Map<String, NodeData> aggregateRoots = new HashMap<String, NodeData>(); final Set<String> removedNodeIds = new HashSet<String>(); final Set<String> addedNodeIds = new HashSet<String>(); Collection<String> docIdsToRemove = IteratorUtils.toList(new TransformIterator(remove, new Transformer() { public Object transform(Object input) { String uuid = ((String) input); removedNodeIds.add(uuid); return uuid; } })); Collection<Document> docsToAdd = IteratorUtils.toList(new TransformIterator(add, new Transformer() { public Object transform(Object input) { NodeData state = (NodeData) input; if (state == null) { return null; } String uuid = state.getIdentifier(); addedNodeIds.add(uuid); removedNodeIds.remove(uuid); Document doc = null; try { doc = createDocument(state, getNamespaceMappings(), index.getIndexFormatVersion()); retrieveAggregateRoot(state, aggregateRoots); } catch (RepositoryException e) { log.warn("Exception while creating document for node: " + state.getIdentifier() + ": " + e.toString(), e); } return doc; } })); // remove any aggregateRoot nodes that are new // and therefore already up-to-date aggregateRoots.keySet().removeAll(addedNodeIds); // based on removed UUIDs get affected aggregate root nodes retrieveAggregateRoot(removedNodeIds, aggregateRoots); // update aggregates if there are any affected if (aggregateRoots.size() > 0) { Collection modified = TransformedCollection.decorate(new ArrayList(), new Transformer() { public Object transform(Object input) { NodeData state = (NodeData) input; try { return createDocument(state, getNamespaceMappings(), index.getIndexFormatVersion()); } catch (RepositoryException e) { log.warn("Exception while creating document for node: " + state.getIdentifier() + ": " + e.toString()); } return null; } }); modified.addAll(aggregateRoots.values()); docIdsToRemove.addAll(aggregateRoots.keySet()); docsToAdd.addAll(modified); } if (docIdsToRemove.isEmpty() && docsToAdd.isEmpty()) { return null; } return new ChangesHolder(docIdsToRemove, docsToAdd); }
From source file:org.fao.geonet.kernel.mef.Importer.java
public static List<String> doImport(final Element params, final ImportMetadataDTO dto, final ServiceContext context, File mefFile, final String stylePath, final boolean indexGroup) throws Exception { final GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME); final DataManager dm = gc.getDataManager(); // Load preferred schema and set to iso19139 by default final String preferredSchema = (gc.getHandlerConfig().getMandatoryValue("preferredSchema") != null ? gc.getHandlerConfig().getMandatoryValue("preferredSchema") : "iso19139"); final Dbms dbms = (Dbms) context.getResourceManager().open(Geonet.Res.MAIN_DB); final List<String> id = new ArrayList<String>(); final List<Element> md = new ArrayList<Element>(); final List<Element> fc = new ArrayList<Element>(); // Try to define MEF version from mef file not from parameter String fileType = dto.getFileType(); if (fileType.equals("mef")) { MEFLib.Version version = MEFLib.getMEFVersion(mefFile); if (version.equals(MEFLib.Version.V2)) fileType = "mef2"; }/*from w w w.ja v a 2s . co m*/ IVisitor visitor; if (fileType.equals("single")) visitor = new XmlVisitor(); else if (fileType.equals("mef")) visitor = new MEFVisitor(); else if (fileType.equals("mef2")) visitor = new MEF2Visitor(); else throw new BadArgumentException("Bad file type parameter."); // --- import metadata from MEF, Xml, ZIP files MEFLib.visit(mefFile, visitor, new IMEFVisitor() { @Override public void handleMetadata(Element metadata, int index) throws Exception { Log.debug(Geonet.MEF, "Collecting metadata:\n" + Xml.getString(metadata)); md.add(index, metadata); } @Override public void handleMetadataFiles(File[] Files, int index) throws Exception { Log.debug(Geonet.MEF, "Multiple metadata files"); Element metadataValidForImport = null; for (File file : Files) { if (file != null && !file.isDirectory()) { Element metadata = Xml.loadFile(file); String metadataSchema = dm.autodetectSchema(metadata); // If local node doesn't know metadata // schema try to load next xml file. if (metadataSchema == null) { continue; } // If schema is preferred local node schema // load that file. if (metadataSchema.equals(preferredSchema)) { Log.debug(Geonet.MEF, "Found metadata file " + file.getName() + " with preferred schema (" + preferredSchema + ")."); handleMetadata(metadata, index); return; } else { Log.debug(Geonet.MEF, "Found metadata file " + file.getName() + " with known schema (" + metadataSchema + ")."); metadataValidForImport = metadata; } } } // Import a valid metadata if not one found // with preferred schema. if (metadataValidForImport != null) { Log.debug(Geonet.MEF, "Importing metadata with valide schema but not preferred one."); handleMetadata(metadataValidForImport, index); } else throw new BadFormatEx("No valid metadata file found."); } // -------------------------------------------------------------------- @Override public void handleFeatureCat(Element featureCat, int index) throws Exception { if (featureCat != null) { Log.debug(Geonet.MEF, "Collecting feature catalog:\n" + Xml.getString(featureCat)); } fc.add(index, featureCat); } // -------------------------------------------------------------------- /** * Record is not a template by default. No category attached to * record by default. No stylesheet used by default. If no site * identifier provided, use current node id by default. No * validation by default. * * If record is a template and not a MEF file always generate a new * UUID. */ @Override @SuppressWarnings("unchecked") public void handleInfo(Element info, int index) throws Exception { String FS = File.separator; String uuid = null; String createDate = null; String changeDate = null; String source; String sourceName = null; // Schema in info.xml is not used anymore. // as we use autodetect schema to define // metadata schema. // String schema = null; String isTemplate = "n"; String localId = null; String rating = null; String popularity = null; // Category Category category = null; boolean validate = false; Element metadata = md.get(index); String schema = dm.autodetectSchema(metadata); if (schema == null) throw new Exception("Unknown schema format : " + schema); // Handle non MEF files insertion if (info.getChildren().size() == 0) { source = gc.getSiteId(); category = dto.getCategory(); String style = "None"; if (dto.getStylesheet() != null) { style = dto.getStylesheet().getName(); } // Apply a stylesheet transformation if requested if (!style.equals("None")) md.add(index, Xml.transform(md.get(index), stylePath + FS + style)); // Get the Metadata uuid if it's not a template. if (isTemplate.equals("n")) uuid = dm.extractUUID(schema, md.get(index)); validate = Util.getParam(params, Params.VALIDATE, "off").equals("on"); } else { Element categsElt = info.getChild("categories"); Collection<String> categNames = CollectionUtils.collect(categsElt.getChildren("category"), new Transformer() { @Override public Object transform(Object input) { return ((Element) input).getAttributeValue("name"); } }); CategoryManager cm = new CategoryManager(dbms); // FIXME Category may be null ... NPE!! category = mapLocalCategory(cm.getAllCategories(), categNames); Element general = info.getChild("general"); uuid = general.getChildText("uuid"); createDate = general.getChildText("createDate"); changeDate = general.getChildText("changeDate"); source = general.getChildText("siteId"); sourceName = general.getChildText("siteName"); localId = general.getChildText("localId"); isTemplate = "n"; rating = general.getChildText("rating"); popularity = general.getChildText("popularity"); } if (validate) dm.validate(schema, metadata); String uuidAction = Params.NOTHING; importRecord(uuid, localId, uuidAction, md, schema, index, source, sourceName, context, id, createDate, changeDate, isTemplate, category.getId()); if (fc.size() != 0 && fc.get(index) != null) { // UUID is set as @uuid in root element uuid = UUID.randomUUID().toString(); fc.add(index, dm.setUUID("iso19110", uuid, fc.get(index))); String fcId = dm.insertMetadataExt(dbms, "iso19110", fc.get(index), source, createDate, changeDate, uuid, context.getUserSession().getUsername()); Log.debug(Geonet.MEF, "Adding Feature catalog with uuid: " + uuid); // Create database relation between metadata and feature // catalog String mdId = id.get(index); String query = "INSERT INTO Relations (id, relatedId) VALUES (?, ?)"; dbms.execute(query, Integer.parseInt(mdId), Integer.parseInt(fcId)); id.add(fcId); // TODO : privileges not handled for feature // catalog ... } int iId = Integer.parseInt(id.get(index)); if (rating != null) dbms.execute("UPDATE Metadata SET rating=? WHERE id=?", new Integer(rating), iId); if (popularity != null) dbms.execute("UPDATE Metadata SET popularity=? WHERE id=?", new Integer(popularity), iId); dm.setTemplateExt(dbms, uuid, isTemplate, null); dm.setHarvestedExt(dbms, uuid, null); String pubDir = Lib.resource.getDir(context, "public", id.get(index)); String priDir = Lib.resource.getDir(context, "private", id.get(index)); new File(pubDir).mkdirs(); new File(priDir).mkdirs(); if (indexGroup) { dm.indexMetadataGroup(dbms, uuid, null); } else { dm.indexMetadata(dbms, uuid, null); } } // -------------------------------------------------------------------- @Override public void handlePublicFile(String file, String changeDate, InputStream is, int index) throws IOException { Log.debug(Geonet.MEF, "Adding public file with name=" + file); saveFile(context, id.get(index), "public", file, changeDate, is); } // -------------------------------------------------------------------- @Override public void handlePrivateFile(String file, String changeDate, InputStream is, int index) throws IOException { Log.debug(Geonet.MEF, "Adding private file with name=" + file); saveFile(context, id.get(index), "private", file, changeDate, is); } }); return id; }
From source file:org.fao.geonet.lib.ResourceLib.java
/** * Check that the operation is allowed for current user. See * {@link AccessManager#getOperations(ServiceContext, String, String)}. * /*from w w w . j av a2s . c om*/ * @param context * @param id The metadata identifier * @param operation See {@link AccessManager}. * @throws Exception */ @SuppressWarnings("unchecked") public void checkPrivilege(ServiceContext context, String id, int operation) throws Exception { // Always grant VIEW privileges for non authenticated users. if (OperationEnum.VIEW.getId() == operation) { return; } Dbms dbms = (Dbms) context.getResourceManager().open(Geonet.Res.MAIN_DB); AccessManager accessMan = new AccessManager(dbms); UserSession us = context.getUserSession(); List<Group> groups = null; GroupManager gm = new GroupManager(dbms); IDataPolicyManager dpm = new DataPolicyManager(dbms); if (us.isAuthenticated()) { if (accessMan.isOwner(context, id)) { groups = gm.getAllGroups(); } else { groups = gm.getAllUserGroups(us.getUserId()); } } Collection<Operation> operations = dpm.getAllOperationAllowedByMetadataId(id, groups); Collection<Integer> operationsEnum = CollectionUtils.collect(operations, new Transformer() { @Override public Object transform(Object arg0) { return ((Operation) arg0).getId(); } }); if (!operationsEnum.contains(operation)) { throw new OperationNotAllowedEx(); } }
From source file:org.fenixedu.academic.dto.guide.reimbursementGuide.InfoReimbursementGuide.java
public void copyFromDomain(ReimbursementGuide reimbursementGuide) { super.copyFromDomain(reimbursementGuide); if (reimbursementGuide != null) { setCreationDate(reimbursementGuide.getCreationDate()); setInfoGuide(InfoGuideWithPersonAndExecutionDegreeAndDegreeCurricularPlanAndDegree .newInfoFromDomain(reimbursementGuide.getGuide())); setNumber(reimbursementGuide.getNumber()); List infoReimbursementGuideEntries = (List) CollectionUtils .collect(reimbursementGuide.getReimbursementGuideEntriesSet(), new Transformer() { @Override/* ww w . ja v a 2 s . c om*/ public Object transform(Object arg0) { ReimbursementGuideEntry reimbursementGuideEntry = (ReimbursementGuideEntry) arg0; return InfoReimbursementGuideEntry.newInfoFromDomain(reimbursementGuideEntry); } }); setInfoReimbursementGuideEntries(infoReimbursementGuideEntries); List infoReimbursementGuideSituations = (List) CollectionUtils .collect(reimbursementGuide.getReimbursementGuideSituationsSet(), new Transformer() { @Override public Object transform(Object arg0) { ReimbursementGuideSituation reimbursementGuideSituation = (ReimbursementGuideSituation) arg0; return InfoReimbursementGuideSituation.newInfoFromDomain(reimbursementGuideSituation); } }); setInfoReimbursementGuideSituations(infoReimbursementGuideSituations); } }
From source file:org.fenixedu.academic.presentationTier.Action.gep.TeachingStaffDispatchAction.java
protected List readActiveDegreeCurricularPlansByExecutionYear(String executionYearID) throws FenixServiceException { ExecutionYear executionYear = FenixFramework.getDomainObject(executionYearID); Collection executionDegrees = null; if (executionYear != null) { executionDegrees = executionYear.getExecutionDegreesSet(); }/*from w w w. j a v a 2 s . c o m*/ if (executionDegrees == null) { throw new FenixServiceException("nullDegree"); } List infoDegreeCurricularPlans = (List) CollectionUtils.collect(executionDegrees, new Transformer() { @Override public Object transform(Object obj) { ExecutionDegree cursoExecucao = (ExecutionDegree) obj; DegreeCurricularPlan degreeCurricularPlan = cursoExecucao.getDegreeCurricularPlan(); return InfoDegreeCurricularPlan.newInfoFromDomain(degreeCurricularPlan); } }); return infoDegreeCurricularPlans; }
From source file:org.fenixedu.academic.service.services.commons.ReadActiveDegreeCurricularPlansByDegreeType.java
private static Collection<InfoDegreeCurricularPlan> getActiveDegreeCurricularPlansByDegreeType( Predicate<DegreeType> degreeType, AccessControlPredicate<Object> permission) { List<DegreeCurricularPlan> degreeCurricularPlans = new ArrayList<DegreeCurricularPlan>(); for (DegreeCurricularPlan dcp : DegreeCurricularPlan.readByDegreeTypeAndState(degreeType, DegreeCurricularPlanState.ACTIVE)) { if (permission != null) { if (!permission.evaluate(dcp.getDegree())) { continue; }//from w ww . j a v a2 s . co m } degreeCurricularPlans.add(dcp); } return CollectionUtils.collect(degreeCurricularPlans, new Transformer() { @Override public Object transform(Object arg0) { DegreeCurricularPlan degreeCurricularPlan = (DegreeCurricularPlan) arg0; return InfoDegreeCurricularPlan.newInfoFromDomain(degreeCurricularPlan); } }); }
From source file:org.fenixedu.academic.service.services.commons.student.ReadStudentsFromDegreeCurricularPlan.java
protected List run(String degreeCurricularPlanID) throws FenixServiceException { // Read the Students DegreeCurricularPlan degreeCurricularPlan = FenixFramework.getDomainObject(degreeCurricularPlanID); Collection students = degreeCurricularPlan.getStudentCurricularPlansSet(); if ((students == null) || (students.isEmpty())) { throw new NonExistingServiceException(); }/*from w ww . j a va 2s. co m*/ return (List) CollectionUtils.collect(students, new Transformer() { @Override public Object transform(Object arg0) { StudentCurricularPlan studentCurricularPlan = (StudentCurricularPlan) arg0; return InfoStudentCurricularPlan.newInfoFromDomain(studentCurricularPlan); } }); }
From source file:org.fenixedu.academic.service.services.resourceAllocationManager.SearchExecutionCourses.java
private List<InfoExecutionCourse> fillInfoExecutionCourses(final AcademicInterval academicInterval, List<ExecutionCourse> executionCourses) { List<InfoExecutionCourse> result; result = (List<InfoExecutionCourse>) CollectionUtils.collect(executionCourses, new Transformer() { @Override/*from www.jav a 2 s . c o m*/ public Object transform(Object arg0) { InfoExecutionCourse infoExecutionCourse = null; infoExecutionCourse = getOccupancyLevels(arg0); return infoExecutionCourse; } private InfoExecutionCourse getOccupancyLevels(Object arg0) { InfoExecutionCourse infoExecutionCourse; ExecutionCourse executionCourse = (ExecutionCourse) arg0; Integer theoreticalCapacity = Integer.valueOf(0); Integer theoPraticalCapacity = Integer.valueOf(0); Integer praticalCapacity = Integer.valueOf(0); Integer labCapacity = Integer.valueOf(0); Integer doubtsCapacity = Integer.valueOf(0); Integer reserveCapacity = Integer.valueOf(0); Integer semCapacity = Integer.valueOf(0); Integer probCapacity = Integer.valueOf(0); Integer fieldCapacity = Integer.valueOf(0); Integer trainCapacity = Integer.valueOf(0); Integer tutCapacity = Integer.valueOf(0); Set<Shift> shifts = executionCourse.getAssociatedShifts(); Iterator<Shift> iterator = shifts.iterator(); while (iterator.hasNext()) { Shift shift = iterator.next(); if (shift.containsType(ShiftType.TEORICA)) { theoreticalCapacity = Integer .valueOf(theoreticalCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.TEORICO_PRATICA)) { theoPraticalCapacity = Integer .valueOf(theoPraticalCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.DUVIDAS)) { doubtsCapacity = Integer.valueOf(doubtsCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.LABORATORIAL)) { labCapacity = Integer.valueOf(labCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.PRATICA)) { praticalCapacity = Integer .valueOf(praticalCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.RESERVA)) { reserveCapacity = Integer .valueOf(reserveCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.SEMINARY)) { semCapacity = Integer.valueOf(semCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.PROBLEMS)) { probCapacity = Integer.valueOf(probCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.FIELD_WORK)) { fieldCapacity = Integer.valueOf(fieldCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.TRAINING_PERIOD)) { trainCapacity = Integer.valueOf(trainCapacity.intValue() + shift.getLotacao().intValue()); } else if (shift.containsType(ShiftType.TUTORIAL_ORIENTATION)) { tutCapacity = Integer.valueOf(tutCapacity.intValue() + shift.getLotacao().intValue()); } } infoExecutionCourse = InfoExecutionCourse.newInfoFromDomain(executionCourse); List<Integer> capacities = new ArrayList<Integer>(); if (theoreticalCapacity.intValue() != 0) { capacities.add(theoreticalCapacity); } if (theoPraticalCapacity.intValue() != 0) { capacities.add(theoPraticalCapacity); } if (doubtsCapacity.intValue() != 0) { capacities.add(doubtsCapacity); } if (labCapacity.intValue() != 0) { capacities.add(labCapacity); } if (praticalCapacity.intValue() != 0) { capacities.add(praticalCapacity); } if (reserveCapacity.intValue() != 0) { capacities.add(reserveCapacity); } if (semCapacity.intValue() != 0) { capacities.add(semCapacity); } if (probCapacity.intValue() != 0) { capacities.add(probCapacity); } if (fieldCapacity.intValue() != 0) { capacities.add(fieldCapacity); } if (trainCapacity.intValue() != 0) { capacities.add(trainCapacity); } if (tutCapacity.intValue() != 0) { capacities.add(tutCapacity); } int total = 0; if (!capacities.isEmpty()) { total = (Collections.min(capacities)).intValue(); } if (total == 0) { infoExecutionCourse.setOccupancy(Double.valueOf(-1)); } else { infoExecutionCourse.setOccupancy(NumberUtils.formatNumber(Double.valueOf( (Double.valueOf(executionCourse.getAttendsSet().size()).floatValue() * 100 / total)), 1)); } return infoExecutionCourse; } }); return result; }