Commit 1c858c35 authored by Serhiy Storchaka's avatar Serhiy Storchaka

Issue #14373: Added C implementation of functools.lru_cache(). Based on

patches by Matt Joiner and Alexey Kachayev.
parent c7090855
...@@ -419,12 +419,18 @@ def lru_cache(maxsize=128, typed=False): ...@@ -419,12 +419,18 @@ def lru_cache(maxsize=128, typed=False):
if maxsize is not None and not isinstance(maxsize, int): if maxsize is not None and not isinstance(maxsize, int):
raise TypeError('Expected maxsize to be an integer or None') raise TypeError('Expected maxsize to be an integer or None')
def decorating_function(user_function):
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
return update_wrapper(wrapper, user_function)
return decorating_function
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
# Constants shared by all lru cache instances: # Constants shared by all lru cache instances:
sentinel = object() # unique object used to signal cache misses sentinel = object() # unique object used to signal cache misses
make_key = _make_key # build a key from the function arguments make_key = _make_key # build a key from the function arguments
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
def decorating_function(user_function):
cache = {} cache = {}
hits = misses = 0 hits = misses = 0
full = False full = False
...@@ -532,7 +538,10 @@ def lru_cache(maxsize=128, typed=False): ...@@ -532,7 +538,10 @@ def lru_cache(maxsize=128, typed=False):
wrapper.cache_clear = cache_clear wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function) return update_wrapper(wrapper, user_function)
return decorating_function try:
from _functools import _lru_cache_wrapper
except ImportError:
pass
################################################################################ ################################################################################
......
...@@ -7,6 +7,10 @@ import sys ...@@ -7,6 +7,10 @@ import sys
from test import support from test import support
import unittest import unittest
from weakref import proxy from weakref import proxy
try:
import threading
except ImportError:
threading = None
import functools import functools
...@@ -912,12 +916,12 @@ class Orderable_LT: ...@@ -912,12 +916,12 @@ class Orderable_LT:
return self.value == other.value return self.value == other.value
class TestLRU(unittest.TestCase): class TestLRU:
def test_lru(self): def test_lru(self):
def orig(x, y): def orig(x, y):
return 3 * x + y return 3 * x + y
f = functools.lru_cache(maxsize=20)(orig) f = self.module.lru_cache(maxsize=20)(orig)
hits, misses, maxsize, currsize = f.cache_info() hits, misses, maxsize, currsize = f.cache_info()
self.assertEqual(maxsize, 20) self.assertEqual(maxsize, 20)
self.assertEqual(currsize, 0) self.assertEqual(currsize, 0)
...@@ -955,7 +959,7 @@ class TestLRU(unittest.TestCase): ...@@ -955,7 +959,7 @@ class TestLRU(unittest.TestCase):
self.assertEqual(currsize, 1) self.assertEqual(currsize, 1)
# test size zero (which means "never-cache") # test size zero (which means "never-cache")
@functools.lru_cache(0) @self.module.lru_cache(0)
def f(): def f():
nonlocal f_cnt nonlocal f_cnt
f_cnt += 1 f_cnt += 1
...@@ -971,7 +975,7 @@ class TestLRU(unittest.TestCase): ...@@ -971,7 +975,7 @@ class TestLRU(unittest.TestCase):
self.assertEqual(currsize, 0) self.assertEqual(currsize, 0)
# test size one # test size one
@functools.lru_cache(1) @self.module.lru_cache(1)
def f(): def f():
nonlocal f_cnt nonlocal f_cnt
f_cnt += 1 f_cnt += 1
...@@ -987,7 +991,7 @@ class TestLRU(unittest.TestCase): ...@@ -987,7 +991,7 @@ class TestLRU(unittest.TestCase):
self.assertEqual(currsize, 1) self.assertEqual(currsize, 1)
# test size two # test size two
@functools.lru_cache(2) @self.module.lru_cache(2)
def f(x): def f(x):
nonlocal f_cnt nonlocal f_cnt
f_cnt += 1 f_cnt += 1
...@@ -1004,7 +1008,7 @@ class TestLRU(unittest.TestCase): ...@@ -1004,7 +1008,7 @@ class TestLRU(unittest.TestCase):
self.assertEqual(currsize, 2) self.assertEqual(currsize, 2)
def test_lru_with_maxsize_none(self): def test_lru_with_maxsize_none(self):
@functools.lru_cache(maxsize=None) @self.module.lru_cache(maxsize=None)
def fib(n): def fib(n):
if n < 2: if n < 2:
return n return n
...@@ -1012,17 +1016,26 @@ class TestLRU(unittest.TestCase): ...@@ -1012,17 +1016,26 @@ class TestLRU(unittest.TestCase):
self.assertEqual([fib(n) for n in range(16)], self.assertEqual([fib(n) for n in range(16)],
[0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]) [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])
self.assertEqual(fib.cache_info(), self.assertEqual(fib.cache_info(),
functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16)) self.module._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
fib.cache_clear() fib.cache_clear()
self.assertEqual(fib.cache_info(), self.assertEqual(fib.cache_info(),
functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0)) self.module._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
def test_lru_with_maxsize_negative(self):
@self.module.lru_cache(maxsize=-10)
def eq(n):
return n
for i in (0, 1):
self.assertEqual([eq(n) for n in range(150)], list(range(150)))
self.assertEqual(eq.cache_info(),
self.module._CacheInfo(hits=0, misses=300, maxsize=-10, currsize=1))
def test_lru_with_exceptions(self): def test_lru_with_exceptions(self):
# Verify that user_function exceptions get passed through without # Verify that user_function exceptions get passed through without
# creating a hard-to-read chained exception. # creating a hard-to-read chained exception.
# http://bugs.python.org/issue13177 # http://bugs.python.org/issue13177
for maxsize in (None, 128): for maxsize in (None, 128):
@functools.lru_cache(maxsize) @self.module.lru_cache(maxsize)
def func(i): def func(i):
return 'abc'[i] return 'abc'[i]
self.assertEqual(func(0), 'a') self.assertEqual(func(0), 'a')
...@@ -1035,7 +1048,7 @@ class TestLRU(unittest.TestCase): ...@@ -1035,7 +1048,7 @@ class TestLRU(unittest.TestCase):
def test_lru_with_types(self): def test_lru_with_types(self):
for maxsize in (None, 128): for maxsize in (None, 128):
@functools.lru_cache(maxsize=maxsize, typed=True) @self.module.lru_cache(maxsize=maxsize, typed=True)
def square(x): def square(x):
return x * x return x * x
self.assertEqual(square(3), 9) self.assertEqual(square(3), 9)
...@@ -1050,7 +1063,7 @@ class TestLRU(unittest.TestCase): ...@@ -1050,7 +1063,7 @@ class TestLRU(unittest.TestCase):
self.assertEqual(square.cache_info().misses, 4) self.assertEqual(square.cache_info().misses, 4)
def test_lru_with_keyword_args(self): def test_lru_with_keyword_args(self):
@functools.lru_cache() @self.module.lru_cache()
def fib(n): def fib(n):
if n < 2: if n < 2:
return n return n
...@@ -1060,13 +1073,13 @@ class TestLRU(unittest.TestCase): ...@@ -1060,13 +1073,13 @@ class TestLRU(unittest.TestCase):
[0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610] [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]
) )
self.assertEqual(fib.cache_info(), self.assertEqual(fib.cache_info(),
functools._CacheInfo(hits=28, misses=16, maxsize=128, currsize=16)) self.module._CacheInfo(hits=28, misses=16, maxsize=128, currsize=16))
fib.cache_clear() fib.cache_clear()
self.assertEqual(fib.cache_info(), self.assertEqual(fib.cache_info(),
functools._CacheInfo(hits=0, misses=0, maxsize=128, currsize=0)) self.module._CacheInfo(hits=0, misses=0, maxsize=128, currsize=0))
def test_lru_with_keyword_args_maxsize_none(self): def test_lru_with_keyword_args_maxsize_none(self):
@functools.lru_cache(maxsize=None) @self.module.lru_cache(maxsize=None)
def fib(n): def fib(n):
if n < 2: if n < 2:
return n return n
...@@ -1074,15 +1087,71 @@ class TestLRU(unittest.TestCase): ...@@ -1074,15 +1087,71 @@ class TestLRU(unittest.TestCase):
self.assertEqual([fib(n=number) for number in range(16)], self.assertEqual([fib(n=number) for number in range(16)],
[0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]) [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])
self.assertEqual(fib.cache_info(), self.assertEqual(fib.cache_info(),
functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16)) self.module._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
fib.cache_clear() fib.cache_clear()
self.assertEqual(fib.cache_info(), self.assertEqual(fib.cache_info(),
functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0)) self.module._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
def test_lru_cache_decoration(self):
def f(zomg: 'zomg_annotation'):
"""f doc string"""
return 42
g = self.module.lru_cache()(f)
for attr in self.module.WRAPPER_ASSIGNMENTS:
self.assertEqual(getattr(g, attr), getattr(f, attr))
@unittest.skipUnless(threading, 'This test requires threading.')
def test_lru_cache_threaded(self):
def orig(x, y):
return 3 * x + y
f = self.module.lru_cache(maxsize=20)(orig)
hits, misses, maxsize, currsize = f.cache_info()
self.assertEqual(currsize, 0)
def full(f, *args):
for _ in range(10):
f(*args)
def clear(f):
for _ in range(10):
f.cache_clear()
orig_si = sys.getswitchinterval()
sys.setswitchinterval(1e-6)
try:
# create 5 threads in order to fill cache
threads = []
for k in range(5):
t = threading.Thread(target=full, args=[f, k, k])
t.start()
threads.append(t)
for t in threads:
t.join()
hits, misses, maxsize, currsize = f.cache_info()
self.assertEqual(hits, 45)
self.assertEqual(misses, 5)
self.assertEqual(currsize, 5)
# create 5 threads in order to fill cache and 1 to clear it
cleaner = threading.Thread(target=clear, args=[f])
cleaner.start()
threads = [cleaner]
for k in range(5):
t = threading.Thread(target=full, args=[f, k, k])
t.start()
threads.append(t)
for t in threads:
t.join()
finally:
sys.setswitchinterval(orig_si)
def test_need_for_rlock(self): def test_need_for_rlock(self):
# This will deadlock on an LRU cache that uses a regular lock # This will deadlock on an LRU cache that uses a regular lock
@functools.lru_cache(maxsize=10) @self.module.lru_cache(maxsize=10)
def test_func(x): def test_func(x):
'Used to demonstrate a reentrant lru_cache call within a single thread' 'Used to demonstrate a reentrant lru_cache call within a single thread'
return x return x
...@@ -1110,6 +1179,12 @@ class TestLRU(unittest.TestCase): ...@@ -1110,6 +1179,12 @@ class TestLRU(unittest.TestCase):
def f(): def f():
pass pass
class TestLRUC(TestLRU, unittest.TestCase):
module = c_functools
class TestLRUPy(TestLRU, unittest.TestCase):
module = py_functools
class TestSingleDispatch(unittest.TestCase): class TestSingleDispatch(unittest.TestCase):
def test_simple_overloads(self): def test_simple_overloads(self):
......
...@@ -63,6 +63,9 @@ Core and Builtins ...@@ -63,6 +63,9 @@ Core and Builtins
Library Library
------- -------
- Issue #14373: Added C implementation of functools.lru_cache(). Based on
patches by Matt Joiner and Alexey Kachayev.
- Issue 24230: The tempfile module now accepts bytes for prefix, suffix and dir - Issue 24230: The tempfile module now accepts bytes for prefix, suffix and dir
parameters and returns bytes in such situations (matching the os module APIs). parameters and returns bytes in such situations (matching the os module APIs).
......
...@@ -590,6 +590,539 @@ For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates\n\ ...@@ -590,6 +590,539 @@ For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates\n\
of the sequence in the calculation, and serves as a default when the\n\ of the sequence in the calculation, and serves as a default when the\n\
sequence is empty."); sequence is empty.");
/* lru_cache object **********************************************************/
/* this object is used delimit args and keywords in the cache keys */
static PyObject *kwd_mark = NULL;
struct lru_list_elem;
struct lru_cache_object;
typedef struct lru_list_elem {
PyObject_HEAD
struct lru_list_elem *prev, *next; /* borrowed links */
PyObject *key, *result;
} lru_list_elem;
static void
lru_list_elem_dealloc(lru_list_elem *link)
{
_PyObject_GC_UNTRACK(link);
Py_XDECREF(link->key);
Py_XDECREF(link->result);
PyObject_GC_Del(link);
}
static int
lru_list_elem_traverse(lru_list_elem *link, visitproc visit, void *arg)
{
Py_VISIT(link->key);
Py_VISIT(link->result);
return 0;
}
static int
lru_list_elem_clear(lru_list_elem *link)
{
Py_CLEAR(link->key);
Py_CLEAR(link->result);
return 0;
}
static PyTypeObject lru_list_elem_type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"functools._lru_list_elem", /* tp_name */
sizeof(lru_list_elem), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
(destructor)lru_list_elem_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_reserved */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
0, /* tp_doc */
(traverseproc)lru_list_elem_traverse, /* tp_traverse */
(inquiry)lru_list_elem_clear, /* tp_clear */
};
typedef PyObject *(*lru_cache_ternaryfunc)(struct lru_cache_object *, PyObject *, PyObject *);
typedef struct lru_cache_object {
lru_list_elem root; /* includes PyObject_HEAD */
Py_ssize_t maxsize;
PyObject *maxsize_O;
PyObject *func;
lru_cache_ternaryfunc wrapper;
PyObject *cache;
PyObject *cache_info_type;
Py_ssize_t misses, hits;
int typed;
PyObject *dict;
int full;
} lru_cache_object;
static PyTypeObject lru_cache_type;
static PyObject *
lru_cache_make_key(PyObject *args, PyObject *kwds, int typed)
{
PyObject *key, *sorted_items;
Py_ssize_t key_size, pos, key_pos;
/* short path, key will match args anyway, which is a tuple */
if (!typed && !kwds) {
Py_INCREF(args);
return args;
}
if (kwds && PyDict_Size(kwds) > 0) {
sorted_items = PyDict_Items(kwds);
if (!sorted_items)
return NULL;
if (PyList_Sort(sorted_items) < 0) {
Py_DECREF(sorted_items);
return NULL;
}
} else
sorted_items = NULL;
key_size = PyTuple_GET_SIZE(args);
if (sorted_items)
key_size += PyList_GET_SIZE(sorted_items);
if (typed)
key_size *= 2;
if (sorted_items)
key_size++;
key = PyTuple_New(key_size);
if (key == NULL)
goto done;
key_pos = 0;
for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) {
PyObject *item = PyTuple_GET_ITEM(args, pos);
Py_INCREF(item);
PyTuple_SET_ITEM(key, key_pos++, item);
}
if (sorted_items) {
Py_INCREF(kwd_mark);
PyTuple_SET_ITEM(key, key_pos++, kwd_mark);
for (pos = 0; pos < PyList_GET_SIZE(sorted_items); ++pos) {
PyObject *item = PyList_GET_ITEM(sorted_items, pos);
Py_INCREF(item);
PyTuple_SET_ITEM(key, key_pos++, item);
}
}
if (typed) {
for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) {
PyObject *item = (PyObject *)Py_TYPE(PyTuple_GET_ITEM(args, pos));
Py_INCREF(item);
PyTuple_SET_ITEM(key, key_pos++, item);
}
if (sorted_items) {
for (pos = 0; pos < PyList_GET_SIZE(sorted_items); ++pos) {
PyObject *tp_items = PyList_GET_ITEM(sorted_items, pos);
PyObject *item = (PyObject *)Py_TYPE(PyTuple_GET_ITEM(tp_items, 1));
Py_INCREF(item);
PyTuple_SET_ITEM(key, key_pos++, item);
}
}
}
assert(key_pos == key_size);
done:
if (sorted_items)
Py_DECREF(sorted_items);
return key;
}
static PyObject *
uncached_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
{
PyObject *result = PyObject_Call(self->func, args, kwds);
if (!result)
return NULL;
self->misses++;
return result;
}
static PyObject *
infinite_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
{
PyObject *result;
PyObject *key = lru_cache_make_key(args, kwds, self->typed);
if (!key)
return NULL;
result = PyDict_GetItemWithError(self->cache, key);
if (result) {
Py_INCREF(result);
self->hits++;
Py_DECREF(key);
return result;
}
if (PyErr_Occurred()) {
Py_DECREF(key);
return NULL;
}
result = PyObject_Call(self->func, args, kwds);
if (!result) {
Py_DECREF(key);
return NULL;
}
if (PyDict_SetItem(self->cache, key, result) < 0) {
Py_DECREF(result);
Py_DECREF(key);
return NULL;
}
Py_DECREF(key);
self->misses++;
return result;
}
static void
lru_cache_extricate_link(lru_list_elem *link)
{
link->prev->next = link->next;
link->next->prev = link->prev;
}
static void
lru_cache_append_link(lru_cache_object *self, lru_list_elem *link)
{
lru_list_elem *root = &self->root;
lru_list_elem *last = root->prev;
last->next = root->prev = link;
link->prev = last;
link->next = root;
}
static PyObject *
bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
{
lru_list_elem *link;
PyObject *key, *result;
key = lru_cache_make_key(args, kwds, self->typed);
if (!key)
return NULL;
link = (lru_list_elem *)PyDict_GetItemWithError(self->cache, key);
if (link) {
lru_cache_extricate_link(link);
lru_cache_append_link(self, link);
self->hits++;
result = link->result;
Py_INCREF(result);
Py_DECREF(key);
return result;
}
if (PyErr_Occurred()) {
Py_DECREF(key);
return NULL;
}
result = PyObject_Call(self->func, args, kwds);
if (!result) {
Py_DECREF(key);
return NULL;
}
if (self->full && self->root.next != &self->root) {
/* Use the oldest item to store the new key and result. */
PyObject *oldkey, *oldresult;
/* Extricate the oldest item. */
link = self->root.next;
lru_cache_extricate_link(link);
/* Remove it from the cache.
The cache dict holds one reference to the link,
and the linked list holds yet one reference to it. */
if (PyDict_DelItem(self->cache, link->key) < 0) {
lru_cache_append_link(self, link);
Py_DECREF(key);
Py_DECREF(result);
return NULL;
}
/* Keep a reference to the old key and old result to
prevent their ref counts from going to zero during the
update. That will prevent potentially arbitrary object
clean-up code (i.e. __del__) from running while we're
still adjusting the links. */
oldkey = link->key;
oldresult = link->result;
link->key = key;
link->result = result;
if (PyDict_SetItem(self->cache, key, (PyObject *)link) < 0) {
Py_DECREF(link);
Py_DECREF(oldkey);
Py_DECREF(oldresult);
return NULL;
}
lru_cache_append_link(self, link);
Py_INCREF(result); /* for return */
Py_DECREF(oldkey);
Py_DECREF(oldresult);
} else {
/* Put result in a new link at the front of the queue. */
link = (lru_list_elem *)PyObject_GC_New(lru_list_elem,
&lru_list_elem_type);
if (link == NULL) {
Py_DECREF(key);
Py_DECREF(result);
return NULL;
}
link->key = key;
link->result = result;
_PyObject_GC_TRACK(link);
if (PyDict_SetItem(self->cache, key, (PyObject *)link) < 0) {
Py_DECREF(link);
return NULL;
}
lru_cache_append_link(self, link);
Py_INCREF(result); /* for return */
self->full = (PyDict_Size(self->cache) >= self->maxsize);
}
self->misses++;
return result;
}
static PyObject *
lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw)
{
PyObject *func, *maxsize_O, *cache_info_type;
int typed;
lru_cache_object *obj;
Py_ssize_t maxsize;
PyObject *(*wrapper)(lru_cache_object *, PyObject *, PyObject *);
static char *keywords[] = {"user_function", "maxsize", "typed",
"cache_info_type", NULL};
if (!PyArg_ParseTupleAndKeywords(args, kw, "OOpO:lru_cache", keywords,
&func, &maxsize_O, &typed,
&cache_info_type)) {
return NULL;
}
if (!PyCallable_Check(func)) {
PyErr_SetString(PyExc_TypeError,
"the first argument must be callable");
return NULL;
}
/* select the caching function, and make/inc maxsize_O */
if (maxsize_O == Py_None) {
wrapper = infinite_lru_cache_wrapper;
/* use this only to initialize lru_cache_object attribute maxsize */
maxsize = -1;
} else if (PyIndex_Check(maxsize_O)) {
maxsize = PyNumber_AsSsize_t(maxsize_O, PyExc_OverflowError);
if (maxsize == -1 && PyErr_Occurred())
return NULL;
if (maxsize == 0)
wrapper = uncached_lru_cache_wrapper;
else
wrapper = bounded_lru_cache_wrapper;
} else {
PyErr_SetString(PyExc_TypeError, "maxsize should be integer or None");
return NULL;
}
obj = (lru_cache_object *)type->tp_alloc(type, 0);
if (obj == NULL)
return NULL;
if (!(obj->cache = PyDict_New())) {
Py_DECREF(obj);
return NULL;
}
obj->root.prev = &obj->root;
obj->root.next = &obj->root;
obj->maxsize = maxsize;
Py_INCREF(maxsize_O);
obj->maxsize_O = maxsize_O;
Py_INCREF(func);
obj->func = func;
obj->wrapper = wrapper;
obj->misses = obj->hits = 0;
obj->typed = typed;
Py_INCREF(cache_info_type);
obj->cache_info_type = cache_info_type;
return (PyObject *)obj;
}
static lru_list_elem *
lru_cache_unlink_list(lru_cache_object *self)
{
lru_list_elem *root = &self->root;
lru_list_elem *link = root->next;
if (link == root)
return NULL;
root->prev->next = NULL;
root->next = root->prev = root;
return link;
}
static void
lru_cache_clear_list(lru_list_elem *link)
{
while (link != NULL) {
lru_list_elem *next = link->next;
Py_DECREF(link);
link = next;
}
}
static void
lru_cache_dealloc(lru_cache_object *obj)
{
lru_list_elem *list = lru_cache_unlink_list(obj);
Py_XDECREF(obj->maxsize_O);
Py_XDECREF(obj->func);
Py_XDECREF(obj->cache);
Py_XDECREF(obj->dict);
Py_XDECREF(obj->cache_info_type);
lru_cache_clear_list(list);
Py_TYPE(obj)->tp_free(obj);
}
static PyObject *
lru_cache_call(lru_cache_object *self, PyObject *args, PyObject *kwds)
{
return self->wrapper(self, args, kwds);
}
static PyObject *
lru_cache_cache_info(lru_cache_object *self, PyObject *unused)
{
return PyObject_CallFunction(self->cache_info_type, "nnOn",
self->hits, self->misses, self->maxsize_O,
PyDict_Size(self->cache));
}
static PyObject *
lru_cache_cache_clear(lru_cache_object *self, PyObject *unused)
{
lru_list_elem *list = lru_cache_unlink_list(self);
self->hits = self->misses = 0;
self->full = 0;
PyDict_Clear(self->cache);
lru_cache_clear_list(list);
Py_RETURN_NONE;
}
static int
lru_cache_tp_traverse(lru_cache_object *self, visitproc visit, void *arg)
{
lru_list_elem *link = self->root.next;
while (link != &self->root) {
lru_list_elem *next = link->next;
Py_VISIT(link);
link = next;
}
Py_VISIT(self->maxsize_O);
Py_VISIT(self->func);
Py_VISIT(self->cache);
Py_VISIT(self->cache_info_type);
Py_VISIT(self->dict);
return 0;
}
static int
lru_cache_tp_clear(lru_cache_object *self)
{
lru_list_elem *list = lru_cache_unlink_list(self);
Py_CLEAR(self->maxsize_O);
Py_CLEAR(self->func);
Py_CLEAR(self->cache);
Py_CLEAR(self->cache_info_type);
Py_CLEAR(self->dict);
lru_cache_clear_list(list);
return 0;
}
PyDoc_STRVAR(lru_cache_doc,
"Create a cached callable that wraps another function.\n\
\n\
user_function: the function being cached\n\
\n\
maxsize: 0 for no caching\n\
None for unlimited cache size\n\
n for a bounded cache\n\
\n\
typed: False cache f(3) and f(3.0) as identical calls\n\
True cache f(3) and f(3.0) as distinct calls\n\
\n\
cache_info_type: namedtuple class with the fields:\n\
hits misses currsize maxsize\n"
);
static PyMethodDef lru_cache_methods[] = {
{"cache_info", (PyCFunction)lru_cache_cache_info, METH_NOARGS},
{"cache_clear", (PyCFunction)lru_cache_cache_clear, METH_NOARGS},
{NULL}
};
static PyGetSetDef lru_cache_getsetlist[] = {
{"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
{NULL}
};
static PyTypeObject lru_cache_type = {
PyVarObject_HEAD_INIT(NULL, 0)
"functools._lru_cache_wrapper", /* tp_name */
sizeof(lru_cache_object), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
(destructor)lru_cache_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_reserved */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
(ternaryfunc)lru_cache_call, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC,
/* tp_flags */
lru_cache_doc, /* tp_doc */
(traverseproc)lru_cache_tp_traverse,/* tp_traverse */
(inquiry)lru_cache_tp_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
lru_cache_methods, /* tp_methods */
0, /* tp_members */
lru_cache_getsetlist, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
offsetof(lru_cache_object, dict), /* tp_dictoffset */
0, /* tp_init */
0, /* tp_alloc */
lru_cache_new, /* tp_new */
};
/* module level code ********************************************************/ /* module level code ********************************************************/
PyDoc_STRVAR(module_doc, PyDoc_STRVAR(module_doc,
...@@ -602,6 +1135,11 @@ static PyMethodDef module_methods[] = { ...@@ -602,6 +1135,11 @@ static PyMethodDef module_methods[] = {
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };
static void
module_free(void *m)
{
Py_CLEAR(kwd_mark);
}
static struct PyModuleDef _functoolsmodule = { static struct PyModuleDef _functoolsmodule = {
PyModuleDef_HEAD_INIT, PyModuleDef_HEAD_INIT,
...@@ -612,7 +1150,7 @@ static struct PyModuleDef _functoolsmodule = { ...@@ -612,7 +1150,7 @@ static struct PyModuleDef _functoolsmodule = {
NULL, NULL,
NULL, NULL,
NULL, NULL,
NULL module_free,
}; };
PyMODINIT_FUNC PyMODINIT_FUNC
...@@ -623,6 +1161,7 @@ PyInit__functools(void) ...@@ -623,6 +1161,7 @@ PyInit__functools(void)
char *name; char *name;
PyTypeObject *typelist[] = { PyTypeObject *typelist[] = {
&partial_type, &partial_type,
&lru_cache_type,
NULL NULL
}; };
...@@ -630,6 +1169,12 @@ PyInit__functools(void) ...@@ -630,6 +1169,12 @@ PyInit__functools(void)
if (m == NULL) if (m == NULL)
return NULL; return NULL;
kwd_mark = PyObject_CallObject((PyObject *)&PyBaseObject_Type, NULL);
if (!kwd_mark) {
Py_DECREF(m);
return NULL;
}
for (i=0 ; typelist[i] != NULL ; i++) { for (i=0 ; typelist[i] != NULL ; i++) {
if (PyType_Ready(typelist[i]) < 0) { if (PyType_Ready(typelist[i]) < 0) {
Py_DECREF(m); Py_DECREF(m);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment