diff options
-rw-r--r-- | Doc/library/functools.rst | 29 | ||||
-rw-r--r-- | Lib/functools.py | 60 | ||||
-rw-r--r-- | Lib/test/test_functools.py | 14 |
3 files changed, 78 insertions, 25 deletions
diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index c593d70..0708162 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -32,7 +32,7 @@ The :mod:`functools` module defines the following functions: A compare function is any callable that accept two arguments, compares them, and returns a negative number for less-than, zero for equality, or a positive number for greater-than. A key function is a callable that accepts one - argument and returns another value that indicates the position in the desired + argument and returns another value indicating the position in the desired collation sequence. Example:: @@ -51,10 +51,14 @@ The :mod:`functools` module defines the following functions: Since a dictionary is used to cache results, the positional and keyword arguments to the function must be hashable. + If *maxsize* is set to None, the LRU feature is disabled and the cache + can grow without bound. + To help measure the effectiveness of the cache and tune the *maxsize* parameter, the wrapped function is instrumented with a :func:`cache_info` function that returns a :term:`named tuple` showing *hits*, *misses*, - *maxsize* and *currsize*. + *maxsize* and *currsize*. In a multi-threaded environment, the hits + and misses are approximate. The decorator also provides a :func:`cache_clear` function for clearing or invalidating the cache. @@ -89,12 +93,25 @@ The :mod:`functools` module defines the following functions: >>> print(get_pep.cache_info()) CacheInfo(hits=3, misses=8, maxsize=20, currsize=8) - .. versionadded:: 3.2 + Example of efficiently computing + `Fibonacci numbers <http://en.wikipedia.org/wiki/Fibonacci_number>`_ + using a cache to implement a + `dynamic programming <http://en.wikipedia.org/wiki/Dynamic_programming>`_ + technique:: + + @lru_cache(maxsize=None) + def fib(n): + if n < 2: + return n + return fib(n-1) + fib(n-2) - .. seealso:: + >>> print([fib(n) for n in range(16)]) + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610] - Recipe for a `plain cache without the LRU feature - <http://code.activestate.com/recipes/577479-simple-caching-decorator/>`_. + >>> print(fib.cache_info()) + CacheInfo(hits=28, misses=16, maxsize=None, currsize=16) + + .. versionadded:: 3.2 .. decorator:: total_ordering diff --git a/Lib/functools.py b/Lib/functools.py index 0fbf3cd..d450634 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -119,6 +119,9 @@ _CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize") def lru_cache(maxsize=100): """Least-recently-used cache decorator. + If *maxsize* is set to None, the LRU features are disabled and the cache + can grow without bound. + Arguments to the cached function must be hashable. View the cache statistics named tuple (hits, misses, maxsize, currsize) with @@ -136,32 +139,51 @@ def lru_cache(maxsize=100): def decorating_function(user_function, tuple=tuple, sorted=sorted, len=len, KeyError=KeyError): - cache = OrderedDict() # ordered least recent to most recent - cache_popitem = cache.popitem - cache_renew = cache.move_to_end hits = misses = 0 kwd_mark = object() # separates positional and keyword args lock = Lock() - @wraps(user_function) - def wrapper(*args, **kwds): - nonlocal hits, misses - key = args - if kwds: - key += (kwd_mark,) + tuple(sorted(kwds.items())) - try: - with lock: + if maxsize is None: + cache = dict() # simple cache without ordering or size limit + + @wraps(user_function) + def wrapper(*args, **kwds): + nonlocal hits, misses + key = args + if kwds: + key += (kwd_mark,) + tuple(sorted(kwds.items())) + try: result = cache[key] - cache_renew(key) # record recent use of this key hits += 1 - except KeyError: - result = user_function(*args, **kwds) - with lock: - cache[key] = result # record recent use of this key + except KeyError: + result = user_function(*args, **kwds) + cache[key] = result misses += 1 - if len(cache) > maxsize: - cache_popitem(0) # purge least recently used cache entry - return result + return result + else: + cache = OrderedDict() # ordered least recent to most recent + cache_popitem = cache.popitem + cache_renew = cache.move_to_end + + @wraps(user_function) + def wrapper(*args, **kwds): + nonlocal hits, misses + key = args + if kwds: + key += (kwd_mark,) + tuple(sorted(kwds.items())) + try: + with lock: + result = cache[key] + cache_renew(key) # record recent use of this key + hits += 1 + except KeyError: + result = user_function(*args, **kwds) + with lock: + cache[key] = result # record recent use of this key + misses += 1 + if len(cache) > maxsize: + cache_popitem(0) # purge least recently used cache entry + return result def cache_info(): """Report cache statistics""" diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 8f48e9e..e5921a7 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -586,6 +586,20 @@ class TestLRU(unittest.TestCase): self.assertEqual(misses, 4) self.assertEqual(currsize, 2) + def test_lru_with_maxsize_none(self): + @functools.lru_cache(maxsize=None) + def fib(n): + if n < 2: + return n + return fib(n-1) + fib(n-2) + self.assertEqual([fib(n) for n in range(16)], + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]) + self.assertEqual(fib.cache_info(), + functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16)) + fib.cache_clear() + self.assertEqual(fib.cache_info(), + functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0)) + def test_main(verbose=None): test_classes = ( TestPartial, |