bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
|
4516.2.1
by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even |
1 |
# Copyright (C) 2006, 2008, 2009 Canonical Ltd
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
2 |
#
|
3 |
# This program is free software; you can redistribute it and/or modify
|
|
4 |
# it under the terms of the GNU General Public License as published by
|
|
5 |
# the Free Software Foundation; either version 2 of the License, or
|
|
6 |
# (at your option) any later version.
|
|
7 |
#
|
|
8 |
# This program is distributed in the hope that it will be useful,
|
|
9 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11 |
# GNU General Public License for more details.
|
|
12 |
#
|
|
13 |
# You should have received a copy of the GNU General Public License
|
|
14 |
# along with this program; if not, write to the Free Software
|
|
|
4183.7.1
by Sabin Iacob
update FSF mailing address |
15 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
16 |
|
|
6379.6.7
by Jelmer Vernooij
Move importing from future until after doc string, otherwise the doc string will disappear. |
17 |
"""A simple least-recently-used (LRU) cache."""
|
18 |
||
|
6624
by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes') |
19 |
from . import ( |
|
4178.3.7
by John Arbash Meinel
Review tweaks from Ian. |
20 |
trace, |
21 |
)
|
|
|
6656.1.1
by Martin
Apply 2to3 dict fixer and clean up resulting mess using view helpers |
22 |
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
23 |
|
|
4287.1.10
by John Arbash Meinel
Restore the ability to handle None as a key. |
24 |
_null_key = object() |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
25 |
|
|
7143.15.2
by Jelmer Vernooij
Run autopep8. |
26 |
|
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
27 |
class _LRUNode(object): |
28 |
"""This maintains the linked-list which is the lru internals.""" |
|
29 |
||
|
6215.1.4
by Martin Packman
Remove unneeded _LRUNode.cleanup callback ability and deprecate LRUCache.add |
30 |
__slots__ = ('prev', 'next_key', 'key', 'value') |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
31 |
|
|
6215.1.4
by Martin Packman
Remove unneeded _LRUNode.cleanup callback ability and deprecate LRUCache.add |
32 |
def __init__(self, key, value): |
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
33 |
self.prev = None |
|
4287.1.10
by John Arbash Meinel
Restore the ability to handle None as a key. |
34 |
self.next_key = _null_key |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
35 |
self.key = key |
36 |
self.value = value |
|
37 |
||
38 |
def __repr__(self): |
|
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
39 |
if self.prev is None: |
40 |
prev_key = None |
|
|
4287.1.4
by John Arbash Meinel
use indirection on both next and prev. |
41 |
else: |
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
42 |
prev_key = self.prev.key |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
43 |
return '%s(%r n:%r p:%r)' % (self.__class__.__name__, self.key, |
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
44 |
self.next_key, prev_key) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
45 |
|
46 |
||
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
47 |
class LRUCache(object): |
48 |
"""A class which manages a cache of entries, removing unused ones.""" |
|
49 |
||
|
5346.1.4
by Vincent Ladeuil
Delete the after_cleanup_size parameter from the LRUCache constructor. |
50 |
def __init__(self, max_cache=100, after_cleanup_count=None): |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
51 |
self._cache = {} |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
52 |
# The "HEAD" of the lru linked list
|
53 |
self._most_recently_used = None |
|
54 |
# The "TAIL" of the lru linked list
|
|
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
55 |
self._least_recently_used = None |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
56 |
self._update_max_cache(max_cache, after_cleanup_count) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
57 |
|
58 |
def __contains__(self, key): |
|
59 |
return key in self._cache |
|
60 |
||
61 |
def __getitem__(self, key): |
|
|
4287.1.6
by John Arbash Meinel
Remove the double getattr() for self._cache. |
62 |
cache = self._cache |
63 |
node = cache[key] |
|
|
4178.3.4
by John Arbash Meinel
Shave off approx 100ms by inlining _record_access into __getitem__, |
64 |
# Inlined from _record_access to decrease the overhead of __getitem__
|
65 |
# We also have more knowledge about structure if __getitem__ is
|
|
66 |
# succeeding, then we know that self._most_recently_used must not be
|
|
67 |
# None, etc.
|
|
68 |
mru = self._most_recently_used |
|
69 |
if node is mru: |
|
70 |
# Nothing to do, this node is already at the head of the queue
|
|
71 |
return node.value |
|
72 |
# Remove this node from the old location
|
|
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
73 |
node_prev = node.prev |
|
4287.1.4
by John Arbash Meinel
use indirection on both next and prev. |
74 |
next_key = node.next_key |
|
4287.1.10
by John Arbash Meinel
Restore the ability to handle None as a key. |
75 |
# benchmarking shows that the lookup of _null_key in globals is faster
|
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
76 |
# than the attribute lookup for (node is self._least_recently_used)
|
|
4287.1.10
by John Arbash Meinel
Restore the ability to handle None as a key. |
77 |
if next_key is _null_key: |
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
78 |
# 'node' is the _least_recently_used, because it doesn't have a
|
|
4287.1.7
by John Arbash Meinel
Fairly significant savings... avoid looking at self._last_recently_used. |
79 |
# 'next' item. So move the current lru to the previous node.
|
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
80 |
self._least_recently_used = node_prev |
|
4287.1.4
by John Arbash Meinel
use indirection on both next and prev. |
81 |
else: |
|
4287.1.6
by John Arbash Meinel
Remove the double getattr() for self._cache. |
82 |
node_next = cache[next_key] |
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
83 |
node_next.prev = node_prev |
|
4287.1.7
by John Arbash Meinel
Fairly significant savings... avoid looking at self._last_recently_used. |
84 |
node_prev.next_key = next_key |
|
4287.1.4
by John Arbash Meinel
use indirection on both next and prev. |
85 |
# Insert this node at the front of the list
|
86 |
node.next_key = mru.key |
|
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
87 |
mru.prev = node |
|
4178.3.4
by John Arbash Meinel
Shave off approx 100ms by inlining _record_access into __getitem__, |
88 |
self._most_recently_used = node |
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
89 |
node.prev = None |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
90 |
return node.value |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
91 |
|
92 |
def __len__(self): |
|
93 |
return len(self._cache) |
|
94 |
||
|
6215.1.4
by Martin Packman
Remove unneeded _LRUNode.cleanup callback ability and deprecate LRUCache.add |
95 |
def __setitem__(self, key, value): |
96 |
"""Add a new value to the cache""" |
|
|
4287.1.10
by John Arbash Meinel
Restore the ability to handle None as a key. |
97 |
if key is _null_key: |
98 |
raise ValueError('cannot use _null_key as a key') |
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
99 |
if key in self._cache: |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
100 |
node = self._cache[key] |
|
6215.1.4
by Martin Packman
Remove unneeded _LRUNode.cleanup callback ability and deprecate LRUCache.add |
101 |
node.value = value |
102 |
self._record_access(node) |
|
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
103 |
else: |
|
6215.1.4
by Martin Packman
Remove unneeded _LRUNode.cleanup callback ability and deprecate LRUCache.add |
104 |
node = _LRUNode(key, value) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
105 |
self._cache[key] = node |
|
4516.2.1
by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even |
106 |
self._record_access(node) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
107 |
|
108 |
if len(self._cache) > self._max_cache: |
|
109 |
# Trigger the cleanup
|
|
110 |
self.cleanup() |
|
111 |
||
|
4178.3.1
by John Arbash Meinel
Implement LRUCache.cache_size(), so that it can trivially substitute for FIFOCache. |
112 |
def cache_size(self): |
113 |
"""Get the number of entries we will cache.""" |
|
114 |
return self._max_cache |
|
115 |
||
|
2998.2.1
by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get() |
116 |
def get(self, key, default=None): |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
117 |
node = self._cache.get(key, None) |
118 |
if node is None: |
|
119 |
return default |
|
|
4178.3.5
by John Arbash Meinel
Add tests that LRUCache.get() properly tracks accesses. |
120 |
self._record_access(node) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
121 |
return node.value |
|
2998.2.1
by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get() |
122 |
|
|
3763.8.10
by John Arbash Meinel
Add a .keys() member to LRUCache and LRUSizeCache. |
123 |
def keys(self): |
124 |
"""Get the list of keys currently cached. |
|
125 |
||
126 |
Note that values returned here may not be available by the time you
|
|
127 |
request them later. This is simply meant as a peak into the current
|
|
128 |
state.
|
|
129 |
||
130 |
:return: An unordered list of keys that are currently cached.
|
|
131 |
"""
|
|
|
6656.1.1
by Martin
Apply 2to3 dict fixer and clean up resulting mess using view helpers |
132 |
# GZ 2016-06-04: Maybe just make this return the view?
|
|
7479.2.1
by Jelmer Vernooij
Drop python2 support. |
133 |
return list(self._cache.keys()) |
|
3763.8.10
by John Arbash Meinel
Add a .keys() member to LRUCache and LRUSizeCache. |
134 |
|
|
6215.1.1
by Martin Packman
Rename confusing LRUCache.items method that doesn't act like dict.items to as_dict |
135 |
def as_dict(self): |
136 |
"""Get a new dict with the same key:value pairs as the cache""" |
|
|
7479.2.1
by Jelmer Vernooij
Drop python2 support. |
137 |
return dict((k, n.value) for k, n in self._cache.items()) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
138 |
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
139 |
def cleanup(self): |
140 |
"""Clear the cache until it shrinks to the requested size. |
|
141 |
||
142 |
This does not completely wipe the cache, just makes sure it is under
|
|
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
143 |
the after_cleanup_count.
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
144 |
"""
|
145 |
# Make sure the cache is shrunk to the correct size
|
|
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
146 |
while len(self._cache) > self._after_cleanup_count: |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
147 |
self._remove_lru() |
148 |
||
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
149 |
def _record_access(self, node): |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
150 |
"""Record that key was accessed.""" |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
151 |
# Move 'node' to the front of the queue
|
152 |
if self._most_recently_used is None: |
|
153 |
self._most_recently_used = node |
|
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
154 |
self._least_recently_used = node |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
155 |
return
|
156 |
elif node is self._most_recently_used: |
|
157 |
# Nothing to do, this node is already at the head of the queue
|
|
158 |
return
|
|
159 |
# We've taken care of the tail pointer, remove the node, and insert it
|
|
160 |
# at the front
|
|
161 |
# REMOVE
|
|
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
162 |
if node is self._least_recently_used: |
163 |
self._least_recently_used = node.prev |
|
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
164 |
if node.prev is not None: |
165 |
node.prev.next_key = node.next_key |
|
|
4287.1.10
by John Arbash Meinel
Restore the ability to handle None as a key. |
166 |
if node.next_key is not _null_key: |
|
4287.1.4
by John Arbash Meinel
use indirection on both next and prev. |
167 |
node_next = self._cache[node.next_key] |
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
168 |
node_next.prev = node.prev |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
169 |
# INSERT
|
|
4287.1.4
by John Arbash Meinel
use indirection on both next and prev. |
170 |
node.next_key = self._most_recently_used.key |
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
171 |
self._most_recently_used.prev = node |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
172 |
self._most_recently_used = node |
|
4287.1.5
by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer. |
173 |
node.prev = None |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
174 |
|
175 |
def _remove_node(self, node): |
|
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
176 |
if node is self._least_recently_used: |
177 |
self._least_recently_used = node.prev |
|
|
4178.3.6
by John Arbash Meinel
Remove the asserts, and change some to AssertionError. |
178 |
self._cache.pop(node.key) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
179 |
# If we have removed all entries, remove the head pointer as well
|
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
180 |
if self._least_recently_used is None: |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
181 |
self._most_recently_used = None |
|
6215.1.4
by Martin Packman
Remove unneeded _LRUNode.cleanup callback ability and deprecate LRUCache.add |
182 |
if node.prev is not None: |
183 |
node.prev.next_key = node.next_key |
|
184 |
if node.next_key is not _null_key: |
|
185 |
node_next = self._cache[node.next_key] |
|
186 |
node_next.prev = node.prev |
|
187 |
# And remove this node's pointers
|
|
188 |
node.prev = None |
|
189 |
node.next_key = _null_key |
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
190 |
|
191 |
def _remove_lru(self): |
|
192 |
"""Remove one entry from the lru, and handle consequences. |
|
193 |
||
194 |
If there are no more references to the lru, then this entry should be
|
|
195 |
removed from the cache.
|
|
196 |
"""
|
|
|
4287.1.11
by John Arbash Meinel
Small tweaks from Ian. |
197 |
self._remove_node(self._least_recently_used) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
198 |
|
199 |
def clear(self): |
|
200 |
"""Clear out all of the cache.""" |
|
201 |
# Clean up in LRU order
|
|
|
3735.34.3
by John Arbash Meinel
Cleanup, in preparation for merging to brisbane-core. |
202 |
while self._cache: |
203 |
self._remove_lru() |
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
204 |
|
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
205 |
def resize(self, max_cache, after_cleanup_count=None): |
206 |
"""Change the number of entries that will be cached.""" |
|
207 |
self._update_max_cache(max_cache, |
|
208 |
after_cleanup_count=after_cleanup_count) |
|
209 |
||
210 |
def _update_max_cache(self, max_cache, after_cleanup_count=None): |
|
211 |
self._max_cache = max_cache |
|
212 |
if after_cleanup_count is None: |
|
|
6754.1.1
by Martin
Use future divison in lru_cache to pass tests on Python 3 |
213 |
self._after_cleanup_count = self._max_cache * 8 // 10 |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
214 |
else: |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
215 |
self._after_cleanup_count = min(after_cleanup_count, |
216 |
self._max_cache) |
|
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
217 |
self.cleanup() |
218 |
||
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
219 |
|
220 |
class LRUSizeCache(LRUCache): |
|
221 |
"""An LRUCache that removes things based on the size of the values. |
|
222 |
||
223 |
This differs in that it doesn't care how many actual items there are,
|
|
224 |
it just restricts the cache to be cleaned up after so much data is stored.
|
|
225 |
||
|
4178.3.7
by John Arbash Meinel
Review tweaks from Ian. |
226 |
The size of items added will be computed using compute_size(value), which
|
227 |
defaults to len() if not supplied.
|
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
228 |
"""
|
229 |
||
|
7143.15.2
by Jelmer Vernooij
Run autopep8. |
230 |
def __init__(self, max_size=1024 * 1024, after_cleanup_size=None, |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
231 |
compute_size=None): |
232 |
"""Create a new LRUSizeCache. |
|
233 |
||
234 |
:param max_size: The max number of bytes to store before we start
|
|
235 |
clearing out entries.
|
|
236 |
:param after_cleanup_size: After cleaning up, shrink everything to this
|
|
237 |
size.
|
|
238 |
:param compute_size: A function to compute the size of the values. We
|
|
239 |
use a function here, so that you can pass 'len' if you are just
|
|
240 |
using simple strings, or a more complex function if you are using
|
|
241 |
something like a list of strings, or even a custom object.
|
|
242 |
The function should take the form "compute_size(value) => integer".
|
|
243 |
If not supplied, it defaults to 'len()'
|
|
244 |
"""
|
|
245 |
self._value_size = 0 |
|
246 |
self._compute_size = compute_size |
|
247 |
if compute_size is None: |
|
248 |
self._compute_size = len |
|
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
249 |
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size) |
|
6754.1.1
by Martin
Use future divison in lru_cache to pass tests on Python 3 |
250 |
LRUCache.__init__(self, max_cache=max(int(max_size // 512), 1)) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
251 |
|
|
6215.1.4
by Martin Packman
Remove unneeded _LRUNode.cleanup callback ability and deprecate LRUCache.add |
252 |
def __setitem__(self, key, value): |
253 |
"""Add a new value to the cache""" |
|
|
4287.1.10
by John Arbash Meinel
Restore the ability to handle None as a key. |
254 |
if key is _null_key: |
255 |
raise ValueError('cannot use _null_key as a key') |
|
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
256 |
node = self._cache.get(key, None) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
257 |
value_len = self._compute_size(value) |
258 |
if value_len >= self._after_cleanup_size: |
|
|
4178.3.7
by John Arbash Meinel
Review tweaks from Ian. |
259 |
# The new value is 'too big to fit', as it would fill up/overflow
|
260 |
# the cache all by itself
|
|
261 |
trace.mutter('Adding the key %r to an LRUSizeCache failed.' |
|
262 |
' value %d is too big to fit in a the cache' |
|
263 |
' with size %d %d', key, value_len, |
|
264 |
self._after_cleanup_size, self._max_size) |
|
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
265 |
if node is not None: |
|
4178.3.7
by John Arbash Meinel
Review tweaks from Ian. |
266 |
# We won't be replacing the old node, so just remove it
|
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
267 |
self._remove_node(node) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
268 |
return
|
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
269 |
if node is None: |
|
6215.1.4
by Martin Packman
Remove unneeded _LRUNode.cleanup callback ability and deprecate LRUCache.add |
270 |
node = _LRUNode(key, value) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
271 |
self._cache[key] = node |
272 |
else: |
|
|
6215.1.3
by Martin Packman
Recompute size rather than storing on _LRUNode.size |
273 |
self._value_size -= self._compute_size(node.value) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
274 |
self._value_size += value_len |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
275 |
self._record_access(node) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
276 |
|
277 |
if self._value_size > self._max_size: |
|
278 |
# Time to cleanup
|
|
279 |
self.cleanup() |
|
280 |
||
281 |
def cleanup(self): |
|
282 |
"""Clear the cache until it shrinks to the requested size. |
|
283 |
||
284 |
This does not completely wipe the cache, just makes sure it is under
|
|
285 |
the after_cleanup_size.
|
|
286 |
"""
|
|
287 |
# Make sure the cache is shrunk to the correct size
|
|
288 |
while self._value_size > self._after_cleanup_size: |
|
289 |
self._remove_lru() |
|
290 |
||
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
291 |
def _remove_node(self, node): |
|
6215.1.3
by Martin Packman
Recompute size rather than storing on _LRUNode.size |
292 |
self._value_size -= self._compute_size(node.value) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
293 |
LRUCache._remove_node(self, node) |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
294 |
|
295 |
def resize(self, max_size, after_cleanup_size=None): |
|
296 |
"""Change the number of bytes that will be cached.""" |
|
297 |
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size) |
|
|
6754.1.1
by Martin
Use future divison in lru_cache to pass tests on Python 3 |
298 |
max_cache = max(int(max_size // 512), 1) |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
299 |
self._update_max_cache(max_cache) |
300 |
||
301 |
def _update_max_size(self, max_size, after_cleanup_size=None): |
|
302 |
self._max_size = max_size |
|
303 |
if after_cleanup_size is None: |
|
|
6754.1.1
by Martin
Use future divison in lru_cache to pass tests on Python 3 |
304 |
self._after_cleanup_size = self._max_size * 8 // 10 |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
305 |
else: |
306 |
self._after_cleanup_size = min(after_cleanup_size, self._max_size) |