List of usage examples for org.apache.commons.collections SequencedHashMap get
public Object get(int index)
From source file:org.openlaszlo.cache.Cache.java
/** * Find and optionally lock item in the cache that matches this key * If the item doesn't exist, create it. If the item * does exist, move it to the top of the LRU list by removing * and re-adding. /*from w ww. j ava 2 s.c o m*/ * @return the locked item */ protected synchronized Item findItem(Serializable key, String enc, boolean doLockAndLeaveActive) throws IOException { //mLogger.debug("findItem"); SequencedHashMap curMap = mMemMap; SequencedHashMap newMap = mMemMap; boolean hasMemCache = (mMaxMemSize != 0); if (!hasMemCache) { newMap = mDiskMap; } Item item = (Item) curMap.get(key); if (item == null) { curMap = mDiskMap; item = (Item) curMap.get(key); if (item == null && doLockAndLeaveActive) { curMap = mActiveMap; item = (Item) curMap.get(key); // TODO: [2003-08-27 bloch] add assert in java 1.4 item.active() } } // New items default to the mem cache if it exists. // Note that some items that are too big may // be cached on disk (and not in mem) but may // temporarily be in the "mem cache" map. // This is confusing, but simple and safe to implement // w/out complicated locking. This is because // we don't want to lock the maps while we're // fetching the items and we won't know the size // until we fetch the item. // // TODO: [2003-09-04 bloch] // Note: this now only happens when doLockAndLeaveActive is // false. At some point we could rearchitect to remove // this wart try { if (item == null) { item = new Item(key, enc, hasMemCache); if (doLockAndLeaveActive) { item.lock(); item.setActive(true); } // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Made new item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-270", new Object[] {key.toString()}) //); } else { if (doLockAndLeaveActive) { item.lock(); item.setActive(true); } if (item.needsReckoning()) { // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Reckoning an old item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-285", new Object[] {key.toString()}) //); item.reckon(); } if (item.dirty()) { // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Found a dirty item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-296", new Object[] {key.toString()}) //); if (doLockAndLeaveActive) { item.removeAndUnlock(); } else { item.remove(); } curMap.remove(key); item = new Item(key, enc, hasMemCache); if (doLockAndLeaveActive) { item.lock(); item.setActive(true); } // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Removed and made new item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-315", new Object[] {key.toString()}) //); } else { // Remove the item and re-add it below // mLogger.debug( /* (non-Javadoc) * @i18n.test * @org-mes="Found old item for " + p[0] */ // org.openlaszlo.i18n.LaszloMessages.getMessage( // Cache.class.getName(),"051018-325", new Object[] {key.toString()}) //); curMap.remove(key); if (curMap == mDiskMap) { if (newMap == mMemMap) { // Update sizes when we're moving from disk to mem long size = item.getSize(); if (size <= mMaxMemItemSize || mMaxMemItemSize <= 0) { item.readIntoMemory(); // Update sizes after we read into memory in case // the above read fails for some bizarro reason. mDiskSize -= size; mMemSize += size; } else { newMap = mDiskMap; } } } } } if (!doLockAndLeaveActive) { newMap.put(key, item); } else { mActiveMap.put(key, item); } } catch (IOException e) { // If we get any kind of exception, we better unlock the item // since no one will be able to unlock it. if (doLockAndLeaveActive && item != null) { item.unlock(); } throw e; } catch (RuntimeException re) { // If we get any kind of exception, we better unlock the item // since no one will be able to unlock it. if (doLockAndLeaveActive && item != null) { item.unlock(); } throw re; } return item; }
From source file:org.openlaszlo.cache.Cache.java
/** * @param buf buffer to append// w w w . j av a 2s .co m * @param map map to dump */ private synchronized void dumpMap(StringBuffer buf, SequencedHashMap map, boolean lockItems) { Iterator<?> iter = map.iterator(); while (iter.hasNext()) { buf.append("<item "); Object key = iter.next(); Item item = (Item) map.get(key); if (lockItems) { item.lock(); } String keystring = key.toString(); if (keystring.length() > 128) { keystring = keystring.substring(0, 127) + "..."; } buf.append("key=\"" + XMLUtils.escapeXml(keystring) + "\" "); buf.append("in-memory=\"" + Boolean.toString(item.isInMemory()) + "\" "); buf.append("dirty=\"" + Boolean.toString(item.dirty()) + "\" "); buf.append("active=\"" + Boolean.toString(item.active()) + "\" "); buf.append("needs-reckon=\"" + Boolean.toString(item.needsReckoning()) + "\" "); buf.append("mem-to-reckon=\"" + item.memToReckon() + "\" "); buf.append("disk-to-reckon=\"" + item.diskToReckon() + "\" "); buf.append("size=\"" + item.getSize() + "\" "); buf.append("key-size=\"" + item.getKeySize() + "\" "); buf.append("path-name=\"" + item.getPathName() + "\" "); buf.append("info-name=\"" + item.getInfoName() + "\" "); long lm = item.getInfo().getLastModified(); buf.append("last-modified=\"" + lm + "\" "); buf.append("last-modified-gmt=\"" + LZHttpUtils.getDateString(lm) + "\" "); buf.append("/>\n"); if (lockItems) { item.unlock(); } } }
From source file:xdoclet.modules.ojb.constraints.ModelConstraints.java
/** * Gathers the pk fields from the hierarchy of the given class, and copies them into the class. * /*from w w w.java2s . com*/ * @param classDef The root of the hierarchy * @throws ConstraintException If there is a conflict between the pk fields */ private void ensurePKsFromHierarchy(ClassDescriptorDef classDef) throws ConstraintException { SequencedHashMap pks = new SequencedHashMap(); for (Iterator it = classDef.getAllExtentClasses(); it.hasNext();) { ClassDescriptorDef subTypeDef = (ClassDescriptorDef) it.next(); ArrayList subPKs = subTypeDef.getPrimaryKeys(); // check against already present PKs for (Iterator pkIt = subPKs.iterator(); pkIt.hasNext();) { FieldDescriptorDef fieldDef = (FieldDescriptorDef) pkIt.next(); FieldDescriptorDef foundPKDef = (FieldDescriptorDef) pks.get(fieldDef.getName()); if (foundPKDef != null) { if (!isEqual(fieldDef, foundPKDef)) { throw new ConstraintException( "Cannot pull up the declaration of the required primary key " + fieldDef.getName() + " because its definitions in " + fieldDef.getOwner().getName() + " and " + foundPKDef.getOwner().getName() + " differ"); } } else { pks.put(fieldDef.getName(), fieldDef); } } } ensureFields(classDef, pks.values()); }