Browse Source

Add LRU cache memoizing decorator for functions

tags/replace-pytables
Jim Paris 11 years ago
parent
commit
24d4752bc3
2 changed files with 118 additions and 0 deletions
  1. +63
    -0
      nilmdb/lrucache.py
  2. +55
    -0
      tests/test_lrucache.py

+ 63
- 0
nilmdb/lrucache.py View File

@@ -0,0 +1,63 @@
# Memoize a function's return value with a least-recently-used cache
# Based on:
# http://code.activestate.com/recipes/498245-lru-and-lfu-cache-decorators/
# with added 'destructor' functionality.

import collections
import functools

def lrucache(size = 10, onremove = None):
"""Least-recently-used cache decorator.

@lrucache(size = 10, onevict = None)
def f(...):
pass

Given a function and arguments, memoize its return value.
Up to 'size' elements are cached.

When evicting a value from the cache, call the function
'onremove' with the value that's being evicted.

Call f.cache_remove(...) to evict the cache entry with the given
arguments. Call f.cache_remove_all() to evict all entries.
f.cache_hits and f.cache_misses give statistics.
"""

def decorator(func):
cache = collections.OrderedDict() # order: least- to most-recent

def evict(value):
if onremove:
onremove(value)

@functools.wraps(func)
def wrapper(*args, **kwargs):
key = args + tuple(sorted(kwargs.items()))
try:
value = cache.pop(key)
wrapper.cache_hits += 1
except KeyError:
value = func(*args, **kwargs)
wrapper.cache_misses += 1
if len(cache) >= size:
evict(cache.popitem(0)[1]) # evict LRU cache entry
cache[key] = value # (re-)insert this key at end
return value

def cache_remove(*args, **kwargs):
key = args + tuple(sorted(kwargs.items()))
if key in cache:
evict(cache.pop(key))

def cache_remove_all():
for key in cache:
evict(cache.pop(key))

wrapper.cache_hits = 0
wrapper.cache_misses = 0
wrapper.cache_remove = cache_remove
wrapper.cache_remove_all = cache_remove_all

return wrapper
return decorator

+ 55
- 0
tests/test_lrucache.py View File

@@ -0,0 +1,55 @@
import nilmdb
from nilmdb.printf import *

import nose
from nose.tools import *
from nose.tools import assert_raises
import threading
import time

from test_helpers import *

from nilmdb.lrucache import lrucache

@lrucache(size = 3)
def foo1(n):
return n

@lrucache(size = 5)
def foo2(n):
return n

def foo3d(n):
foo3d.destructed.append(n)
foo3d.destructed = []
@lrucache(size = 3, onremove = foo3d)
def foo3(n):
return n

class TestLRUCache(object):
def test(self):
[ foo1(n) for n in [ 1, 2, 3, 1, 2, 3, 1, 2, 3 ] ]
eq_((foo1.cache_hits, foo1.cache_misses), (6, 3))
[ foo1(n) for n in [ 1, 2, 3, 1, 2, 3, 1, 2, 3 ] ]
eq_((foo1.cache_hits, foo1.cache_misses), (15, 3))
[ foo1(n) for n in [ 4, 2, 1, 1, 4 ] ]
eq_((foo1.cache_hits, foo1.cache_misses), (18, 5))

[ foo2(n) for n in [ 1, 2, 3, 1, 2, 3, 1, 2, 3 ] ]
eq_((foo2.cache_hits, foo2.cache_misses), (6, 3))
[ foo2(n) for n in [ 1, 2, 3, 1, 2, 3, 1, 2, 3 ] ]
eq_((foo2.cache_hits, foo2.cache_misses), (15, 3))
[ foo2(n) for n in [ 4, 2, 1, 1, 4 ] ]
eq_((foo2.cache_hits, foo2.cache_misses), (19, 4))

[ foo3(n) for n in [ 1, 2, 3, 1, 2, 3, 1, 2, 3 ] ]
eq_((foo3.cache_hits, foo3.cache_misses), (6, 3))
[ foo3(n) for n in [ 1, 2, 3, 1, 2, 3, 1, 2, 3 ] ]
eq_((foo3.cache_hits, foo3.cache_misses), (15, 3))
[ foo3(n) for n in [ 4, 2, 1, 1, 4 ] ]
eq_((foo3.cache_hits, foo3.cache_misses), (18, 5))
eq_(foo3d.destructed, [1, 3])
foo3.cache_remove(1)
eq_(foo3d.destructed, [1, 3, 1])
foo3.cache_remove_all()
eq_(foo3d.destructed, [1, 3, 1, 2, 4 ])

Loading…
Cancel
Save