Skip to content

Commit ce011a3

Browse files
committed
functools: add cache decorator with tests
Implement functools.cache, equivalent to CPython's unbounded memoisation decorator (lru_cache(maxsize=None)). Uses a callable class instead of closure attributes for MicroPython compatibility. Includes _make_key with int/str fast-path, _CacheInfo with namedtuple-like interface, and 14 test cases covering memoisation, cache_info/cache_clear, kwargs, unhashable args, and independence.
1 parent f4c41df commit ce011a3

2 files changed

Lines changed: 366 additions & 0 deletions

File tree

python-stdlib/functools/functools.py

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,3 +26,117 @@ def reduce(function, iterable, initializer=None):
2626
for element in it:
2727
value = function(value, element)
2828
return value
29+
30+
31+
# ---------------------------------------------------------------------------
32+
# cache — unbounded memoisation decorator (equivalent to CPython's
33+
# functools.cache / lru_cache(maxsize=None))
34+
# ---------------------------------------------------------------------------
35+
36+
_kwd_mark = (object(),)
37+
_fasttypes = {int, str}
38+
39+
40+
def _make_key(args, kwds):
41+
"""Build a flat, hashable cache key from positional and keyword arguments.
42+
43+
When there is a single positional argument whose type is int or str and
44+
no keyword arguments, the argument itself is returned (avoiding the
45+
overhead of a one-element tuple lookup).
46+
47+
Keyword argument order matters: f(x=1, y=2) and f(y=2, x=1) produce
48+
different keys, matching CPython's behaviour.
49+
"""
50+
key = args
51+
if kwds:
52+
key += _kwd_mark
53+
for item in kwds.items():
54+
key += item
55+
elif len(key) == 1 and type(key[0]) in _fasttypes:
56+
return key[0]
57+
return key
58+
59+
60+
class _CacheInfo:
61+
"""Lightweight equivalent of collections.namedtuple('CacheInfo', ...)."""
62+
63+
__slots__ = ("hits", "misses", "maxsize", "currsize")
64+
65+
def __init__(self, hits, misses, maxsize, currsize):
66+
self.hits = hits
67+
self.misses = misses
68+
self.maxsize = maxsize
69+
self.currsize = currsize
70+
71+
def __repr__(self):
72+
return "CacheInfo(hits=%d, misses=%d, maxsize=%s, currsize=%d)" % (
73+
self.hits,
74+
self.misses,
75+
self.maxsize,
76+
self.currsize,
77+
)
78+
79+
def __eq__(self, other):
80+
if isinstance(other, _CacheInfo):
81+
return (
82+
self.hits == other.hits
83+
and self.misses == other.misses
84+
and self.maxsize == other.maxsize
85+
and self.currsize == other.currsize
86+
)
87+
if isinstance(other, tuple) and len(other) == 4:
88+
return (
89+
self.hits == other[0]
90+
and self.misses == other[1]
91+
and self.maxsize == other[2]
92+
and self.currsize == other[3]
93+
)
94+
return NotImplemented
95+
96+
def __iter__(self):
97+
return iter((self.hits, self.misses, self.maxsize, self.currsize))
98+
99+
def __getitem__(self, i):
100+
return (self.hits, self.misses, self.maxsize, self.currsize)[i]
101+
102+
def __len__(self):
103+
return 4
104+
105+
106+
def cache(user_function):
107+
"""Simple lightweight unbounded cache. Sometimes called 'memoize'.
108+
109+
Equivalent to CPython's functools.cache (lru_cache(maxsize=None)).
110+
Returns a callable with cache_info(), cache_clear(), and __wrapped__.
111+
112+
Uses a callable class instead of a closure with function attributes
113+
because MicroPython closures do not support attribute assignment.
114+
"""
115+
sentinel = object()
116+
_cache = {}
117+
_stats = [0, 0] # [hits, misses]
118+
119+
class _Cached:
120+
__wrapped__ = user_function
121+
122+
def __call__(self, *args, **kwds):
123+
key = _make_key(args, kwds)
124+
result = _cache.get(key, sentinel)
125+
if result is not sentinel:
126+
_stats[0] += 1
127+
return result
128+
_stats[1] += 1
129+
result = user_function(*args, **kwds)
130+
_cache[key] = result
131+
return result
132+
133+
def cache_info(self):
134+
"""Report cache statistics."""
135+
return _CacheInfo(_stats[0], _stats[1], None, len(_cache))
136+
137+
def cache_clear(self):
138+
"""Clear the cache and cache statistics."""
139+
_cache.clear()
140+
_stats[0] = _stats[1] = 0
141+
142+
return _Cached()
Lines changed: 252 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,252 @@
1+
from functools import cache
2+
3+
4+
# -- 1. Basic memoisation ---------------------------------------------------
5+
6+
call_count = 0
7+
8+
9+
@cache
10+
def fib(n):
11+
global call_count
12+
call_count += 1
13+
if n < 2:
14+
return n
15+
return fib(n - 1) + fib(n - 2)
16+
17+
18+
assert fib(10) == 55
19+
assert call_count == 11, "each unique n should be computed exactly once"
20+
21+
# -- 2. cache_info ----------------------------------------------------------
22+
23+
info = fib.cache_info()
24+
assert info.maxsize is None
25+
assert info.currsize == 11
26+
assert info.hits > 0
27+
assert info.misses == 11
28+
29+
# tuple comparison (matches CPython namedtuple behaviour)
30+
assert info == (info.hits, info.misses, None, 11)
31+
32+
# unpacking
33+
hits, misses, maxsize, currsize = info
34+
assert maxsize is None and currsize == 11
35+
36+
# indexing
37+
assert info[0] == hits and info[3] == currsize
38+
39+
# len
40+
assert len(info) == 4
41+
42+
# repr
43+
r = repr(info)
44+
assert "CacheInfo" in r and "hits=" in r
45+
46+
# -- 3. cache_clear ---------------------------------------------------------
47+
48+
fib.cache_clear()
49+
info = fib.cache_info()
50+
assert info == (0, 0, None, 0), "cache_clear should reset everything"
51+
52+
# recompute after clearing
53+
call_count = 0
54+
assert fib(5) == 5
55+
assert call_count == 6
56+
57+
# -- 4. __wrapped__ ---------------------------------------------------------
58+
59+
assert fib.__wrapped__ is not None
60+
assert callable(fib.__wrapped__)
61+
62+
# -- 5. Keyword arguments ---------------------------------------------------
63+
64+
kw_calls = []
65+
66+
67+
@cache
68+
def greet(name, greeting="hello"):
69+
kw_calls.append((name, greeting))
70+
return "%s, %s!" % (greeting, name)
71+
72+
73+
assert greet("world") == "hello, world!"
74+
assert greet("world") == "hello, world!"
75+
assert len(kw_calls) == 1, "second call should be a cache hit"
76+
77+
assert greet("world", greeting="hi") == "hi, world!"
78+
assert len(kw_calls) == 2, "different kwarg value is a separate entry"
79+
80+
assert greet(name="world") == "hello, world!"
81+
assert len(kw_calls) == 3, "positional vs keyword is a separate entry"
82+
83+
# -- 6. Keyword argument order ----------------------------------------------
84+
# CPython dicts are insertion-ordered, so f(a=1, b=2) and f(b=2, a=1)
85+
# produce different cache keys. MicroPython may not preserve call-site
86+
# kwarg order, so they may share a cache entry. Both are correct.
87+
88+
order_calls = []
89+
90+
91+
@cache
92+
def multi_kw(a=1, b=2):
93+
order_calls.append((a, b))
94+
return a + b
95+
96+
97+
assert multi_kw(a=1, b=2) == 3
98+
assert multi_kw(b=2, a=1) == 3
99+
assert len(order_calls) in (1, 2)
100+
101+
# Verify the behaviour is self-consistent: repeating the same call is always
102+
# a cache hit regardless of the runtime's kwarg ordering.
103+
prev = len(order_calls)
104+
assert multi_kw(a=1, b=2) == 3
105+
assert len(order_calls) == prev, "repeated identical call must be a cache hit"
106+
107+
# -- 7. No-argument function ------------------------------------------------
108+
109+
no_arg_calls = [0]
110+
111+
112+
@cache
113+
def constant():
114+
no_arg_calls[0] += 1
115+
return 42
116+
117+
118+
assert constant() == 42
119+
assert constant() == 42
120+
assert constant() == 42
121+
assert no_arg_calls[0] == 1
122+
assert constant.cache_info().hits == 2
123+
assert constant.cache_info().misses == 1
124+
assert constant.cache_info().currsize == 1
125+
126+
# -- 8. None as a valid cached result ---------------------------------------
127+
128+
none_calls = [0]
129+
130+
131+
@cache
132+
def returns_none(x):
133+
none_calls[0] += 1
134+
return None
135+
136+
137+
assert returns_none(1) is None
138+
assert returns_none(1) is None
139+
assert none_calls[0] == 1, "None must be cached, not treated as a miss"
140+
141+
# -- 9. Single int/str fast-path key ---------------------------------------
142+
143+
fast_calls = [0]
144+
145+
146+
@cache
147+
def square(x):
148+
fast_calls[0] += 1
149+
return x * x
150+
151+
152+
assert square(5) == 25
153+
assert square(5) == 25
154+
assert fast_calls[0] == 1
155+
156+
assert square(7) == 49
157+
assert square(7) == 49
158+
assert fast_calls[0] == 2, "7 is a distinct key from 5"
159+
160+
str_calls = [0]
161+
162+
163+
@cache
164+
def upper(s):
165+
str_calls[0] += 1
166+
return s.upper()
167+
168+
169+
assert upper("hello") == "HELLO"
170+
assert upper("hello") == "HELLO"
171+
assert str_calls[0] == 1, "str arg should hit the fast-path cache"
172+
173+
# -- 10. Multiple positional arguments -------------------------------------
174+
175+
multi_calls = [0]
176+
177+
178+
@cache
179+
def add(a, b):
180+
multi_calls[0] += 1
181+
return a + b
182+
183+
184+
assert add(1, 2) == 3
185+
assert add(1, 2) == 3
186+
assert add(2, 1) == 3
187+
assert multi_calls[0] == 2, "(1,2) and (2,1) are distinct keys"
188+
189+
# -- 11. Unhashable arguments raise TypeError -------------------------------
190+
191+
@cache
192+
def bad_args(x):
193+
return x
194+
195+
196+
try:
197+
bad_args([1, 2, 3])
198+
assert False, "should have raised TypeError for unhashable arg"
199+
except TypeError:
200+
pass
201+
202+
# -- 12. Decorated function is still callable as expected -------------------
203+
204+
@cache
205+
def variadic(*args, **kwargs):
206+
return (args, tuple(sorted(kwargs.items())))
207+
208+
209+
r = variadic(1, 2, x=3)
210+
assert r == ((1, 2), (("x", 3),))
211+
assert variadic(1, 2, x=3) == r
212+
assert variadic.cache_info().hits == 1
213+
214+
# -- 13. Independent caches per decorated function -------------------------
215+
216+
@cache
217+
def fn_a(x):
218+
return x + 1
219+
220+
221+
@cache
222+
def fn_b(x):
223+
return x + 2
224+
225+
226+
assert fn_a(1) == 2
227+
assert fn_b(1) == 3
228+
assert fn_a.cache_info().currsize == 1
229+
assert fn_b.cache_info().currsize == 1
230+
fn_a.cache_clear()
231+
assert fn_a.cache_info().currsize == 0
232+
assert fn_b.cache_info().currsize == 1, "clearing fn_a must not affect fn_b"
233+
234+
# -- 14. Large number of entries (unbounded) --------------------------------
235+
236+
@cache
237+
def identity(x):
238+
return x
239+
240+
241+
for i in range(500):
242+
assert identity(i) == i
243+
244+
assert identity.cache_info().currsize == 500
245+
assert identity.cache_info().misses == 500
246+
247+
for i in range(500):
248+
assert identity(i) == i
249+
250+
assert identity.cache_info().hits == 500
251+
252+
print("all cache tests passed")

0 commit comments

Comments
 (0)