asmuts 02/05/13 12:58:09
Modified: src/java/org/apache/jcs/engine/memory/lru
LRUMemoryCache.java
Log:
Cleaned some old comments.
There is a commented method.
Revision Changes Path
1.8 +49 -7
jakarta-turbine-jcs/src/java/org/apache/jcs/engine/memory/lru/LRUMemoryCache.java
Index: LRUMemoryCache.java
===================================================================
RCS file:
/home/cvs/jakarta-turbine-jcs/src/java/org/apache/jcs/engine/memory/lru/LRUMemoryCache.java,v
retrieving revision 1.7
retrieving revision 1.8
diff -u -r1.7 -r1.8
--- LRUMemoryCache.java 13 May 2002 18:36:33 -0000 1.7
+++ LRUMemoryCache.java 13 May 2002 19:58:09 -0000 1.8
@@ -22,18 +22,18 @@
/**
* A fast reference management system. The least recently used items move to
* the end of the list and get spooled to disk if the cache hub is configured
- * to use a disk cache. Most of the cache bottelnecks ar ein IO. There are no
+ * to use a disk cache. Most of the cache bottelnecks are in IO. There are no
* io bottlenecks here, it's all about processing power. Even though there are
* only a few adjustments necessary to maintain the double linked list, we
* might want to find a more efficient memory manager for large cache regions.
* The LRUMemoryCache is most efficeint when the first element is selected. The
- * smaller teh region, the better the chance that this will be the case. < .04
+ * smaller the region, the better the chance that this will be the case. < .04
* ms per put, p3 866, 1/10 of that per get
*
*@author <a href="mailto:[EMAIL PROTECTED]">Aaron Smuts</a>
*@author <a href="mailto:[EMAIL PROTECTED]">James Taylor</a>
*@created May 13, 2002
- *@version $Id: LRUMemoryCache.java,v 1.7 2002/05/13 18:36:33 asmuts Exp $
+ *@version $Id: LRUMemoryCache.java,v 1.8 2002/05/13 19:58:09 asmuts Exp $
*/
public class LRUMemoryCache implements MemoryCache, Serializable
{
@@ -54,7 +54,7 @@
private int max;
/**
- * Region Elemental Attributes
+ * Region Elemental Attributes, used as a default.
*/
public IElementAttributes attr;
@@ -64,7 +64,7 @@
public ICompositeCacheAttributes cattr;
/**
- * The cache this store is associated with
+ * The cache region this store is associated with
*/
Cache cache;
@@ -229,7 +229,7 @@
* Get an item from the cache
*
*@param key Identifies item to find
- *@return Element mathinh key if found, or null
+ *@return ICacheElement if found, else null
*@exception IOException
*/
public ICacheElement get( Serializable key )
@@ -273,7 +273,10 @@
}
/**
- * Removes an item from the cache.
+ * Removes an item from the cache. This method handles hierarchical
+ * removal. If the key is a String and ends with the
+ * CacheConstants.NAME_COMPONENT_DELIMITER, then all items with keys
+ * starting with the argument String will be removed.
*
*@param key
*@return
@@ -404,6 +407,45 @@
return map.keySet().toArray();
}
}
+
+
+// /**
+// * Get an Array of the keys for elements in the specified range of
+// * the memory cache. If the end position is greater than the size of the
+// * Map, the method will return an array of the remaining elements after
+// * the start position. If the start element is greater than the size of
+// * Map, a error will be thrown.
+// *
+// *@return An Object[]
+// */
+// public Object[] getKeyArray(int start, int end) throws
java.lang.IllegalArgumentException
+// {
+//
+// int size = getSize();
+// if ( start >= size ) {
+// throw new java.lang.IllegalArgumentException( "Start value is greater
than the size of the cache" );
+// }
+// int stop = Math.min( size, end );
+// int count = 0;
+//
+// // need a better locking strategy here.
+// synchronized ( this )
+// {
+// Object[] result = new Object[stop-start];
+// Iterator e = this.map.keySet().iterator();
+// for (int i=0; e.hasNext(); i++)
+// {
+// if ( i >= start && i < stop ) {
+// result[count] = e.next();
+// count++;
+// }
+// if ( i >= stop ) {
+// continue;
+// }
+// }
+// return result;
+// }
+// }
/**
--
To unsubscribe, e-mail: <mailto:[EMAIL PROTECTED]>
For additional commands, e-mail: <mailto:[EMAIL PROTECTED]>