Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit a28c55a

Browse files
committed
merge
2 parents 11c08fd + f96b2b0 commit a28c55a

1 file changed

Lines changed: 35 additions & 12 deletions

File tree

Lib/functools.py

Lines changed: 35 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from _thread import RLock
2121
except:
2222
class RLock:
23-
'Dummy reentrant lock'
23+
'Dummy reentrant lock for builds without threads'
2424
def __enter__(self): pass
2525
def __exit__(self, exctype, excinst, exctb): pass
2626

@@ -172,6 +172,12 @@ def newfunc(*fargs, **fkeywords):
172172
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
173173

174174
class _HashedSeq(list):
175+
""" This class guarantees that hash() will be called no more than once
176+
per element. This is important because the lru_cache() will hash
177+
the key multiple times on a cache miss.
178+
179+
"""
180+
175181
__slots__ = 'hashvalue'
176182

177183
def __init__(self, tup, hash=hash):
@@ -185,7 +191,16 @@ def _make_key(args, kwds, typed,
185191
kwd_mark = (object(),),
186192
fasttypes = {int, str, frozenset, type(None)},
187193
sorted=sorted, tuple=tuple, type=type, len=len):
188-
'Make a cache key from optionally typed positional and keyword arguments'
194+
"""Make a cache key from optionally typed positional and keyword arguments
195+
196+
The key is constructed in a way that is flat as possible rather than
197+
as a nested structure that would take more memory.
198+
199+
If there is only a single argument and its data type is known to cache
200+
its hash value, then that argument is returned without a wrapper. This
201+
saves space and improves lookup speed.
202+
203+
"""
189204
key = args
190205
if kwds:
191206
sorted_items = sorted(kwds.items())
@@ -242,7 +257,7 @@ def decorating_function(user_function):
242257
if maxsize == 0:
243258

244259
def wrapper(*args, **kwds):
245-
# no caching, just a statistics update after a successful call
260+
# No caching -- just a statistics update after a successful call
246261
nonlocal misses
247262
result = user_function(*args, **kwds)
248263
misses += 1
@@ -272,8 +287,8 @@ def wrapper(*args, **kwds):
272287
with lock:
273288
link = cache_get(key)
274289
if link is not None:
275-
# move the link to the front of the circular queue
276-
link_prev, link_next, key, result = link
290+
# Move the link to the front of the circular queue
291+
link_prev, link_next, _key, result = link
277292
link_prev[NEXT] = link_next
278293
link_next[PREV] = link_prev
279294
last = root[PREV]
@@ -285,26 +300,34 @@ def wrapper(*args, **kwds):
285300
result = user_function(*args, **kwds)
286301
with lock:
287302
if key in cache:
288-
# getting here means that this same key was added to the
289-
# cache while the lock was released. since the link
303+
# Getting here means that this same key was added to the
304+
# cache while the lock was released. Since the link
290305
# update is already done, we need only return the
291306
# computed result and update the count of misses.
292307
pass
293308
elif full:
294-
# use the old root to store the new key and result
309+
# Use the old root to store the new key and result.
295310
oldroot = root
296311
oldroot[KEY] = key
297312
oldroot[RESULT] = result
298-
# empty the oldest link and make it the new root
313+
# Empty the oldest link and make it the new root.
314+
# Keep a reference to the old key and old result to
315+
# prevent their ref counts from going to zero during the
316+
# update. That will prevent potentially arbitrary object
317+
# clean-up code (i.e. __del__) from running while we're
318+
# still adjusting the links.
299319
root = oldroot[NEXT]
300320
oldkey = root[KEY]
301-
oldvalue = root[RESULT]
321+
oldresult = root[RESULT]
302322
root[KEY] = root[RESULT] = None
303-
# now update the cache dictionary for the new links
323+
# Now update the cache dictionary.
304324
del cache[oldkey]
325+
# Save the potentially reentrant cache[key] assignment
326+
# for last, after the root and links have been put in
327+
# a consistent state.
305328
cache[key] = oldroot
306329
else:
307-
# put result in a new link at the front of the queue
330+
# Put result in a new link at the front of the queue.
308331
last = root[PREV]
309332
link = [last, root, key, result]
310333
last[NEXT] = root[PREV] = cache[key] = link

0 commit comments

Comments
 (0)