List of usage examples for org.apache.commons.collections SequencedHashMap SequencedHashMap
public SequencedHashMap()
From source file:org.apache.openjpa.persistence.jdbc.common.apps.HorizRelation.java
public Map getCollections() { Map map = new SequencedHashMap(); map.put("HorizA", cHorizA); map.put("HorizB", cHorizB); map.put("HorizC", cHorizC); map.put("HorizD", cHorizD); map.put("HorizE", cHorizE); map.put("HorizF", cHorizF); map.put("HorizG", cHorizG); map.put("HorizH", cHorizH); map.put("HorizI", cHorizI); map.put("HorizJ", cHorizJ); map.put("HorizK", cHorizK); map.put("HorizL", cHorizL); map.put("HorizM", cHorizM); map.put("HorizN", cHorizN); map.put("HorizO", cHorizO); map.put("HorizAppSingleA", cHorizAppSingleA); map.put("HorizAppSingleB", cHorizAppSingleB); map.put("HorizAppSingleC", cHorizAppSingleC); map.put("HorizAppSingleD", cHorizAppSingleD); map.put("HorizAppMultiA", cHorizAppMultiA); map.put("HorizAppMultiB", cHorizAppMultiB); map.put("HorizAppMultiC", cHorizAppMultiC); map.put("HorizAppMultiD", cHorizAppMultiD); map.put("HorizInterFlatA", cHorizInterFlatA); map.put("HorizInterFlatB", cHorizInterFlatB); map.put("HorizInterFlatC", cHorizInterFlatC); map.put("HorizInterFlatD", cHorizInterFlatD); map.put("HorizInterVerticalA", cHorizInterVerticalA); map.put("HorizInterVerticalB", cHorizInterVerticalB); map.put("HorizInterVerticalC", cHorizInterVerticalC); map.put("HorizInterVerticalD", cHorizInterVerticalD); return map;//from w w w . java 2 s .c o m }
From source file:org.infoscoop.dao.KeywordLogDAO.java
/** * Get the map to add up keyword-ranking. * /* ww w .ja v a 2s . c o m*/ * @param startDate * @param endDate * @param keywordLogType * @return */ public Map getCountMap(final String startDate, final String endDate, final Integer keywordLogType) { Map countMap = (Map) super.getHibernateTemplate().execute(new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { Map countMap = new SequencedHashMap(); Criteria cri = session.createCriteria(Keyword.class); SimpleExpression se = Expression.eq("Type", keywordLogType); LogicalExpression le = Expression.and(Expression.ge("Date", startDate), Expression.le("Date", endDate)); LogicalExpression le2 = Expression.and(se, le); cri.add(le2); Projection projection = Projections.projectionList().add(Projections.property("Keyword")) .add(Projections.count("Keyword").as("KwdCount")).add(Projections.groupProperty("Keyword")); cri.setProjection(projection); cri.addOrder(Order.desc("KwdCount")); try { Object[] resultObjs; for (Iterator ite = cri.list().iterator(); ite.hasNext();) { resultObjs = (Object[]) ite.next(); String keyword = (String) resultObjs[0]; Integer count = (Integer) resultObjs[1]; countMap.put(keyword, count); } } catch (Exception e) { logger.error("parsing error", e); throw new RuntimeException(); } if (log.isInfoEnabled()) log.info("getCountMap successfully. : startDate=" + startDate + ", endDate=" + endDate + ", keywordLogType=" + keywordLogType); return countMap; } }); return countMap; }
From source file:org.infoscoop.service.PortalAdminsService.java
/** * @param adminsList/*from w w w .j a va 2s. c om*/ * @throws Exception * @throws Exception */ @SuppressWarnings("unchecked") public synchronized void updatePortalAdmins(Map adminsMap) throws Exception { if (adminsMap == null) return; boolean myIdExists = true; ISPrincipal p = SecurityController.getPrincipalByType("UIDPrincipal"); List getNotAllowDeleteRoleIds = adminRoleDAO.getNotAllowDeleteRoleIds(); adminRoleDAO.delete(); List rolesList = (List) adminsMap.get("roles"); List roleIdList = new ArrayList(); // insert roles for (Iterator ite = rolesList.iterator(); ite.hasNext();) { Map map = (Map) ite.next(); String roleId = (String) map.get("id"); String name = (String) map.get("name"); String permission = (String) map.get("permission"); adminRoleDAO.insert(roleId, name, permission, !getNotAllowDeleteRoleIds.contains(roleId)); roleIdList.add(roleId); } Map adminsData = new SequencedHashMap(); List adminsList = (List) adminsMap.get("admins"); String myRoleId = ""; for (Iterator ite = adminsList.iterator(); ite.hasNext();) { Map map = (Map) ite.next(); String uid = (String) map.get("uid"); String roleId = (String) map.get("roleId"); if (p.getName().equals(uid)) { myRoleId = roleId; myIdExists = true; } if (uid != null && roleId != null) { adminsData.put(uid, roleId); } } if (!myIdExists) throw new Exception("Same ID as oneself cannot be deleted."); portalAdminsDAO.delete(); // insert admins for (Iterator ite = adminsData.keySet().iterator(); ite.hasNext();) { String uid = (String) ite.next(); String roleId = (String) adminsData.get(uid); roleId = (roleIdList.contains(roleId)) ? roleId : null; portalAdminsDAO.insert(uid, roleId); } if (!roleIdList.contains(myRoleId) || !roleIdList.containsAll(getNotAllowDeleteRoleIds)) { throw new Exception("The roll that cannot be deleted is contained."); } }
From source file:org.infoscoop.service.TabLayoutService.java
/** * Return map of Customization information related to role information. * Return default Customization information if role can not be found. * * @param resource// w w w . ja v a2s . co m * @return Map * <UL> * <LI>key : tabId</LI> * <LI>value : layout</LI> * </UL> * * @throws DataResourceException * @throws ClassNotFoundException * @throws Exception */ public Map<String, TabLayout> getMyTabLayoutHTML() throws ClassNotFoundException, Exception { Map map = getMyTabLayout(); Map customizationMap = new SequencedHashMap(); Iterator ite = map.keySet().iterator(); while (ite.hasNext()) { String tabId = (String) ite.next(); TabLayout tabLayout = (TabLayout) map.get(tabId); customizationMap.put(tabId, tabLayout); } return customizationMap; }
From source file:org.infoscoop.service.TabLayoutService.java
private Map sortMapBySortId(Map map) { ArrayList entries = new ArrayList(map.entrySet()); Collections.sort(entries, new Comparator() { public int compare(Object o1, Object o2) { Map.Entry e1 = (Map.Entry) o1; Map.Entry e2 = (Map.Entry) o2; TabLayout x1 = (TabLayout) e1.getValue(); TabLayout x2 = (TabLayout) e2.getValue(); int i = 0; int j = 0; try { i = x1.getTabnumber() != null ? x1.getTabnumber().intValue() : 0; } catch (NumberFormatException e) { return 1; }/*from w w w . j a v a 2 s .c om*/ try { j = x2.getTabnumber() != null ? x2.getTabnumber().intValue() : 0; } catch (NumberFormatException e) { return 0; } return (i > j) ? 1 : 0; } }); Iterator ite = entries.iterator(); Map sortedMap = new SequencedHashMap(); while (ite.hasNext()) { Map.Entry e1 = (Map.Entry) ite.next(); sortedMap.put(e1.getKey(), e1.getValue()); } return sortedMap; }
From source file:org.infoscoop.util.Xml2Json.java
public JSONObject xml2jsonObj(NodeList nodes) throws Exception { this.basePath = null; if (nodes == null || nodes.getLength() == 0) return null; Node baseNode = nodes.item(0).getParentNode(); if (baseNode == null) return null; this.basePath = getXPath((Element) baseNode); Map map = new SequencedHashMap(); nodelist2json(map, nodes);/* w w w . ja v a 2 s .c om*/ return new JSONObject(map); }
From source file:org.infoscoop.util.Xml2Json.java
private Object node2json(Element element) throws Exception { Map map = new SequencedHashMap(); String xpath = getXPath(element); if (singles.contains(xpath)) { if (element.getFirstChild() != null) return listner.text(element.getFirstChild().getNodeValue()); else/* w ww.j av a2 s.c o m*/ return ""; } NamedNodeMap attrs = element.getAttributes(); for (int i = 0; i < attrs.getLength(); i++) { Node attr = attrs.item(i); String name = attr.getNodeName(); String value = attr.getNodeValue(); map.put(name, listner.text(value)); } NodeList childs = element.getChildNodes(); nodelist2json(map, childs); return new JSONObject(map); }
From source file:phex.common.AlternateLocationContainer.java
private void initMap() { if (altLocationMap == null) { altLocationMap = new SequencedHashMap(); } }
From source file:xdoclet.modules.ojb.constraints.ModelConstraints.java
/** * Ensures that the foreign keys required by the given collection are present in the element class. * // w ww. j a v a 2s. c o m * @param modelDef The model * @param collDef The collection * @throws ConstraintException If there is a problem with the foreign keys */ private void ensureReferencedFKs(ModelDef modelDef, CollectionDescriptorDef collDef) throws ConstraintException { String elementClassName = collDef.getProperty(PropertyHelper.OJB_PROPERTY_ELEMENT_CLASS_REF); ClassDescriptorDef elementClassDef = modelDef.getClass(elementClassName); String fkFieldNames = collDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY); ArrayList missingFields = new ArrayList(); SequencedHashMap fkFields = new SequencedHashMap(); // first we gather all field names for (CommaListIterator it = new CommaListIterator(fkFieldNames); it.hasNext();) { String fieldName = (String) it.next(); FieldDescriptorDef fieldDef = elementClassDef.getField(fieldName); if (fieldDef == null) { missingFields.add(fieldName); } fkFields.put(fieldName, fieldDef); } // next we traverse all sub types and gather fields as we go for (Iterator it = elementClassDef.getAllExtentClasses(); it.hasNext() && !missingFields.isEmpty();) { ClassDescriptorDef subTypeDef = (ClassDescriptorDef) it.next(); for (int idx = 0; idx < missingFields.size();) { FieldDescriptorDef fieldDef = subTypeDef.getField((String) missingFields.get(idx)); if (fieldDef != null) { fkFields.put(fieldDef.getName(), fieldDef); missingFields.remove(idx); } else { idx++; } } } if (!missingFields.isEmpty()) { throw new ConstraintException( "Cannot find field " + missingFields.get(0).toString() + " in the hierarchy with root type " + elementClassDef.getName() + " which is used as foreignkey in collection " + collDef.getName() + " in " + collDef.getOwner().getName()); } // copy the found fields into the element class ensureFields(elementClassDef, fkFields.values()); }
From source file:xdoclet.modules.ojb.constraints.ModelConstraints.java
/** * Gathers the pk fields from the hierarchy of the given class, and copies them into the class. * /*www.j av a2s . c om*/ * @param classDef The root of the hierarchy * @throws ConstraintException If there is a conflict between the pk fields */ private void ensurePKsFromHierarchy(ClassDescriptorDef classDef) throws ConstraintException { SequencedHashMap pks = new SequencedHashMap(); for (Iterator it = classDef.getAllExtentClasses(); it.hasNext();) { ClassDescriptorDef subTypeDef = (ClassDescriptorDef) it.next(); ArrayList subPKs = subTypeDef.getPrimaryKeys(); // check against already present PKs for (Iterator pkIt = subPKs.iterator(); pkIt.hasNext();) { FieldDescriptorDef fieldDef = (FieldDescriptorDef) pkIt.next(); FieldDescriptorDef foundPKDef = (FieldDescriptorDef) pks.get(fieldDef.getName()); if (foundPKDef != null) { if (!isEqual(fieldDef, foundPKDef)) { throw new ConstraintException( "Cannot pull up the declaration of the required primary key " + fieldDef.getName() + " because its definitions in " + fieldDef.getOwner().getName() + " and " + foundPKDef.getOwner().getName() + " differ"); } } else { pks.put(fieldDef.getName(), fieldDef); } } } ensureFields(classDef, pks.values()); }