List of usage examples for org.apache.commons.collections SequencedHashMap put
public Object put(Object key, Object value)
From source file:org.openlaszlo.cache.Cache.java
/** * Find and optionally lock item in the cache that matches this key * If the item doesn't exist, create it. If the item * does exist, move it to the top of the LRU list by removing * and re-adding. //from w w w . j a v a 2s. com * @return the locked item */ protected synchronized Item findItem(Serializable key, String enc, boolean doLockAndLeaveActive) throws IOException { //mLogger.debug("findItem"); SequencedHashMap curMap = mMemMap; SequencedHashMap newMap = mMemMap; boolean hasMemCache = (mMaxMemSize != 0); if (!hasMemCache) { newMap = mDiskMap; } Item item = (Item) curMap.get(key); if (item == null) { curMap = mDiskMap; item = (Item) curMap.get(key); if (item == null && doLockAndLeaveActive) { curMap = mActiveMap; item = (Item) curMap.get(key); // TODO: [2003-08-27 bloch] add assert in java 1.4 item.active() } } // New items default to the mem cache if it exists. // Note that some items that are too big may // be cached on disk (and not in mem) but may // temporarily be in the "mem cache" map. // This is confusing, but simple and safe to implement // w/out complicated locking. This is because // we don't want to lock the maps while we're // fetching the items and we won't know the size // until we fetch the item. // // TODO: [2003-09-04 bloch] // Note: this now only happens when doLockAndLeaveActive is // false. At some point we could rearchitect to remove // this wart try { if (item == null) { item = new Item(key, enc, hasMemCache); if (doLockAndLeaveActive) { item.lock(); item.setActive(true); } // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Made new item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-270", new Object[] {key.toString()}) //); } else { if (doLockAndLeaveActive) { item.lock(); item.setActive(true); } if (item.needsReckoning()) { // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Reckoning an old item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-285", new Object[] {key.toString()}) //); item.reckon(); } if (item.dirty()) { // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Found a dirty item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-296", new Object[] {key.toString()}) //); if (doLockAndLeaveActive) { item.removeAndUnlock(); } else { item.remove(); } curMap.remove(key); item = new Item(key, enc, hasMemCache); if (doLockAndLeaveActive) { item.lock(); item.setActive(true); } // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Removed and made new item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-315", new Object[] {key.toString()}) //); } else { // Remove the item and re-add it below // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Found old item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-325", new Object[] {key.toString()}) //); curMap.remove(key); if (curMap == mDiskMap) { if (newMap == mMemMap) { // Update sizes when we're moving from disk to mem long size = item.getSize(); if (size <= mMaxMemItemSize || mMaxMemItemSize <= 0) { item.readIntoMemory(); // Update sizes after we read into memory in case // the above read fails for some bizarro reason. mDiskSize -= size; mMemSize += size; } else { newMap = mDiskMap; } } } } } if (!doLockAndLeaveActive) { newMap.put(key, item); } else { mActiveMap.put(key, item); } } catch (IOException e) { // If we get any kind of exception, we better unlock the item // since no one will be able to unlock it. if (doLockAndLeaveActive && item != null) { item.unlock(); } throw e; } catch (RuntimeException re) { // If we get any kind of exception, we better unlock the item // since no one will be able to unlock it. if (doLockAndLeaveActive && item != null) { item.unlock(); } throw re; } return item; }
From source file:xdoclet.modules.ojb.constraints.ModelConstraints.java
/** * Ensures that the foreign keys required by the given collection are present in the element class. * //www .j a v a 2 s. c o m * @param modelDef The model * @param collDef The collection * @throws ConstraintException If there is a problem with the foreign keys */ private void ensureReferencedFKs(ModelDef modelDef, CollectionDescriptorDef collDef) throws ConstraintException { String elementClassName = collDef.getProperty(PropertyHelper.OJB_PROPERTY_ELEMENT_CLASS_REF); ClassDescriptorDef elementClassDef = modelDef.getClass(elementClassName); String fkFieldNames = collDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY); ArrayList missingFields = new ArrayList(); SequencedHashMap fkFields = new SequencedHashMap(); // first we gather all field names for (CommaListIterator it = new CommaListIterator(fkFieldNames); it.hasNext();) { String fieldName = (String) it.next(); FieldDescriptorDef fieldDef = elementClassDef.getField(fieldName); if (fieldDef == null) { missingFields.add(fieldName); } fkFields.put(fieldName, fieldDef); } // next we traverse all sub types and gather fields as we go for (Iterator it = elementClassDef.getAllExtentClasses(); it.hasNext() && !missingFields.isEmpty();) { ClassDescriptorDef subTypeDef = (ClassDescriptorDef) it.next(); for (int idx = 0; idx < missingFields.size();) { FieldDescriptorDef fieldDef = subTypeDef.getField((String) missingFields.get(idx)); if (fieldDef != null) { fkFields.put(fieldDef.getName(), fieldDef); missingFields.remove(idx); } else { idx++; } } } if (!missingFields.isEmpty()) { throw new ConstraintException( "Cannot find field " + missingFields.get(0).toString() + " in the hierarchy with root type " + elementClassDef.getName() + " which is used as foreignkey in collection " + collDef.getName() + " in " + collDef.getOwner().getName()); } // copy the found fields into the element class ensureFields(elementClassDef, fkFields.values()); }
From source file:xdoclet.modules.ojb.constraints.ModelConstraints.java
/** * Gathers the pk fields from the hierarchy of the given class, and copies them into the class. * /*www.jav a 2s. c o m*/ * @param classDef The root of the hierarchy * @throws ConstraintException If there is a conflict between the pk fields */ private void ensurePKsFromHierarchy(ClassDescriptorDef classDef) throws ConstraintException { SequencedHashMap pks = new SequencedHashMap(); for (Iterator it = classDef.getAllExtentClasses(); it.hasNext();) { ClassDescriptorDef subTypeDef = (ClassDescriptorDef) it.next(); ArrayList subPKs = subTypeDef.getPrimaryKeys(); // check against already present PKs for (Iterator pkIt = subPKs.iterator(); pkIt.hasNext();) { FieldDescriptorDef fieldDef = (FieldDescriptorDef) pkIt.next(); FieldDescriptorDef foundPKDef = (FieldDescriptorDef) pks.get(fieldDef.getName()); if (foundPKDef != null) { if (!isEqual(fieldDef, foundPKDef)) { throw new ConstraintException( "Cannot pull up the declaration of the required primary key " + fieldDef.getName() + " because its definitions in " + fieldDef.getOwner().getName() + " and " + foundPKDef.getOwner().getName() + " differ"); } } else { pks.put(fieldDef.getName(), fieldDef); } } } ensureFields(classDef, pks.values()); }