/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
1
# Copyright (C) 2006, 2008, 2009 Canonical Ltd
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
2
#
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
7
#
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
#
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
4183.7.1 by Sabin Iacob
update FSF mailing address
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
16
17
"""A simple least-recently-used (LRU) cache."""
18
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
19
from bzrlib import (
20
    symbol_versioning,
21
    trace,
22
    )
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
23
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
24
_null_key = object()
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
25
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
26
class _LRUNode(object):
27
    """This maintains the linked-list which is the lru internals."""
28
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
29
    __slots__ = ('prev', 'next_key', 'key', 'value', 'cleanup', 'size')
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
30
31
    def __init__(self, key, value, cleanup=None):
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
32
        self.prev = None
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
33
        self.next_key = _null_key
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
34
        self.key = key
35
        self.value = value
36
        self.cleanup = cleanup
37
        # TODO: We could compute this 'on-the-fly' like we used to, and remove
38
        #       one pointer from this object, we just need to decide if it
39
        #       actually costs us much of anything in normal usage
40
        self.size = None
41
42
    def __repr__(self):
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
43
        if self.prev is None:
44
            prev_key = None
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
45
        else:
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
46
            prev_key = self.prev.key
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
47
        return '%s(%r n:%r p:%r)' % (self.__class__.__name__, self.key,
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
48
                                     self.next_key, prev_key)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
49
50
    def run_cleanup(self):
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
51
        try:
52
            if self.cleanup is not None:
53
                self.cleanup(self.key, self.value)
54
        finally:
55
            self.cleanup = None
56
            # Just make sure to break any refcycles, etc
57
            self.value = None
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
58
59
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
60
class LRUCache(object):
61
    """A class which manages a cache of entries, removing unused ones."""
62
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
63
    def __init__(self, max_cache=100, after_cleanup_count=None,
64
                 after_cleanup_size=symbol_versioning.DEPRECATED_PARAMETER):
65
        if symbol_versioning.deprecated_passed(after_cleanup_size):
66
            symbol_versioning.warn('LRUCache.__init__(after_cleanup_size) was'
67
                                   ' deprecated in 1.11. Use'
68
                                   ' after_cleanup_count instead.',
69
                                   DeprecationWarning)
70
            after_cleanup_count = after_cleanup_size
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
71
        self._cache = {}
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
72
        # The "HEAD" of the lru linked list
73
        self._most_recently_used = None
74
        # The "TAIL" of the lru linked list
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
75
        self._least_recently_used = None
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
76
        self._update_max_cache(max_cache, after_cleanup_count)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
77
78
    def __contains__(self, key):
79
        return key in self._cache
80
81
    def __getitem__(self, key):
4287.1.6 by John Arbash Meinel
Remove the double getattr() for self._cache.
82
        cache = self._cache
83
        node = cache[key]
4178.3.4 by John Arbash Meinel
Shave off approx 100ms by inlining _record_access into __getitem__,
84
        # Inlined from _record_access to decrease the overhead of __getitem__
85
        # We also have more knowledge about structure if __getitem__ is
86
        # succeeding, then we know that self._most_recently_used must not be
87
        # None, etc.
88
        mru = self._most_recently_used
89
        if node is mru:
90
            # Nothing to do, this node is already at the head of the queue
91
            return node.value
92
        # Remove this node from the old location
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
93
        node_prev = node.prev
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
94
        next_key = node.next_key
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
95
        # benchmarking shows that the lookup of _null_key in globals is faster
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
96
        # than the attribute lookup for (node is self._least_recently_used)
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
97
        if next_key is _null_key:
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
98
            # 'node' is the _least_recently_used, because it doesn't have a
4287.1.7 by John Arbash Meinel
Fairly significant savings... avoid looking at self._last_recently_used.
99
            # 'next' item. So move the current lru to the previous node.
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
100
            self._least_recently_used = node_prev
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
101
        else:
4287.1.6 by John Arbash Meinel
Remove the double getattr() for self._cache.
102
            node_next = cache[next_key]
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
103
            node_next.prev = node_prev
4287.1.7 by John Arbash Meinel
Fairly significant savings... avoid looking at self._last_recently_used.
104
        node_prev.next_key = next_key
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
105
        # Insert this node at the front of the list
106
        node.next_key = mru.key
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
107
        mru.prev = node
4178.3.4 by John Arbash Meinel
Shave off approx 100ms by inlining _record_access into __getitem__,
108
        self._most_recently_used = node
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
109
        node.prev = None
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
110
        return node.value
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
111
112
    def __len__(self):
113
        return len(self._cache)
114
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
115
    def _walk_lru(self):
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
116
        """Walk the LRU list, only meant to be used in tests."""
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
117
        node = self._most_recently_used
118
        if node is not None:
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
119
            if node.prev is not None:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
120
                raise AssertionError('the _most_recently_used entry is not'
121
                                     ' supposed to have a previous entry'
122
                                     ' %s' % (node,))
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
123
        while node is not None:
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
124
            if node.next_key is _null_key:
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
125
                if node is not self._least_recently_used:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
126
                    raise AssertionError('only the last node should have'
127
                                         ' no next value: %s' % (node,))
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
128
                node_next = None
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
129
            else:
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
130
                node_next = self._cache[node.next_key]
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
131
                if node_next.prev is not node:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
132
                    raise AssertionError('inconsistency found, node.next.prev'
133
                                         ' != node: %s' % (node,))
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
134
            if node.prev is None:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
135
                if node is not self._most_recently_used:
136
                    raise AssertionError('only the _most_recently_used should'
137
                                         ' not have a previous node: %s'
138
                                         % (node,))
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
139
            else:
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
140
                if node.prev.next_key != node.key:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
141
                    raise AssertionError('inconsistency found, node.prev.next'
142
                                         ' != node: %s' % (node,))
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
143
            yield node
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
144
            node = node_next
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
145
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
146
    def add(self, key, value, cleanup=None):
147
        """Add a new value to the cache.
148
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
149
        Also, if the entry is ever removed from the cache, call
150
        cleanup(key, value).
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
151
152
        :param key: The key to store it under
153
        :param value: The object to store
154
        :param cleanup: None or a function taking (key, value) to indicate
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
155
                        'value' should be cleaned up.
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
156
        """
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
157
        if key is _null_key:
158
            raise ValueError('cannot use _null_key as a key')
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
159
        if key in self._cache:
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
160
            node = self._cache[key]
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
161
            try:
162
                node.run_cleanup()
163
            finally:
164
                node.value = value
165
                node.cleanup = cleanup
166
                self._record_access(node)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
167
        else:
168
            node = _LRUNode(key, value, cleanup=cleanup)
169
            self._cache[key] = node
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
170
            self._record_access(node)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
171
172
        if len(self._cache) > self._max_cache:
173
            # Trigger the cleanup
174
            self.cleanup()
175
4178.3.1 by John Arbash Meinel
Implement LRUCache.cache_size(), so that it can trivially substitute for FIFOCache.
176
    def cache_size(self):
177
        """Get the number of entries we will cache."""
178
        return self._max_cache
179
2998.2.1 by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get()
180
    def get(self, key, default=None):
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
181
        node = self._cache.get(key, None)
182
        if node is None:
183
            return default
4178.3.5 by John Arbash Meinel
Add tests that LRUCache.get() properly tracks accesses.
184
        self._record_access(node)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
185
        return node.value
2998.2.1 by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get()
186
3763.8.10 by John Arbash Meinel
Add a .keys() member to LRUCache and LRUSizeCache.
187
    def keys(self):
188
        """Get the list of keys currently cached.
189
190
        Note that values returned here may not be available by the time you
191
        request them later. This is simply meant as a peak into the current
192
        state.
193
194
        :return: An unordered list of keys that are currently cached.
195
        """
196
        return self._cache.keys()
197
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
198
    def items(self):
199
        """Get the key:value pairs as a dict."""
200
        return dict((k, n.value) for k, n in self._cache.iteritems())
201
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
202
    def cleanup(self):
203
        """Clear the cache until it shrinks to the requested size.
204
205
        This does not completely wipe the cache, just makes sure it is under
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
206
        the after_cleanup_count.
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
207
        """
208
        # Make sure the cache is shrunk to the correct size
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
209
        while len(self._cache) > self._after_cleanup_count:
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
210
            self._remove_lru()
211
212
    def __setitem__(self, key, value):
213
        """Add a value to the cache, there will be no cleanup function."""
214
        self.add(key, value, cleanup=None)
215
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
216
    def _record_access(self, node):
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
217
        """Record that key was accessed."""
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
218
        # Move 'node' to the front of the queue
219
        if self._most_recently_used is None:
220
            self._most_recently_used = node
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
221
            self._least_recently_used = node
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
222
            return
223
        elif node is self._most_recently_used:
224
            # Nothing to do, this node is already at the head of the queue
225
            return
226
        # We've taken care of the tail pointer, remove the node, and insert it
227
        # at the front
228
        # REMOVE
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
229
        if node is self._least_recently_used:
230
            self._least_recently_used = node.prev
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
231
        if node.prev is not None:
232
            node.prev.next_key = node.next_key
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
233
        if node.next_key is not _null_key:
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
234
            node_next = self._cache[node.next_key]
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
235
            node_next.prev = node.prev
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
236
        # INSERT
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
237
        node.next_key = self._most_recently_used.key
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
238
        self._most_recently_used.prev = node
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
239
        self._most_recently_used = node
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
240
        node.prev = None
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
241
242
    def _remove_node(self, node):
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
243
        if node is self._least_recently_used:
244
            self._least_recently_used = node.prev
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
245
        self._cache.pop(node.key)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
246
        # If we have removed all entries, remove the head pointer as well
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
247
        if self._least_recently_used is None:
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
248
            self._most_recently_used = None
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
249
        try:
250
            node.run_cleanup()
251
        finally:
252
            # Now remove this node from the linked list
253
            if node.prev is not None:
254
                node.prev.next_key = node.next_key
255
            if node.next_key is not _null_key:
256
                node_next = self._cache[node.next_key]
257
                node_next.prev = node.prev
258
            # And remove this node's pointers
259
            node.prev = None
260
            node.next_key = _null_key
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
261
262
    def _remove_lru(self):
263
        """Remove one entry from the lru, and handle consequences.
264
265
        If there are no more references to the lru, then this entry should be
266
        removed from the cache.
267
        """
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
268
        self._remove_node(self._least_recently_used)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
269
270
    def clear(self):
271
        """Clear out all of the cache."""
272
        # Clean up in LRU order
3735.34.3 by John Arbash Meinel
Cleanup, in preparation for merging to brisbane-core.
273
        while self._cache:
274
            self._remove_lru()
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
275
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
276
    def resize(self, max_cache, after_cleanup_count=None):
277
        """Change the number of entries that will be cached."""
278
        self._update_max_cache(max_cache,
279
                               after_cleanup_count=after_cleanup_count)
280
281
    def _update_max_cache(self, max_cache, after_cleanup_count=None):
282
        self._max_cache = max_cache
283
        if after_cleanup_count is None:
284
            self._after_cleanup_count = self._max_cache * 8 / 10
285
        else:
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
286
            self._after_cleanup_count = min(after_cleanup_count,
287
                                            self._max_cache)
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
288
        self.cleanup()
289
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
290
291
class LRUSizeCache(LRUCache):
292
    """An LRUCache that removes things based on the size of the values.
293
294
    This differs in that it doesn't care how many actual items there are,
295
    it just restricts the cache to be cleaned up after so much data is stored.
296
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
297
    The size of items added will be computed using compute_size(value), which
298
    defaults to len() if not supplied.
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
299
    """
300
301
    def __init__(self, max_size=1024*1024, after_cleanup_size=None,
302
                 compute_size=None):
303
        """Create a new LRUSizeCache.
304
305
        :param max_size: The max number of bytes to store before we start
306
            clearing out entries.
307
        :param after_cleanup_size: After cleaning up, shrink everything to this
308
            size.
309
        :param compute_size: A function to compute the size of the values. We
310
            use a function here, so that you can pass 'len' if you are just
311
            using simple strings, or a more complex function if you are using
312
            something like a list of strings, or even a custom object.
313
            The function should take the form "compute_size(value) => integer".
314
            If not supplied, it defaults to 'len()'
315
        """
316
        self._value_size = 0
317
        self._compute_size = compute_size
318
        if compute_size is None:
319
            self._compute_size = len
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
320
        self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
321
        LRUCache.__init__(self, max_cache=max(int(max_size/512), 1))
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
322
323
    def add(self, key, value, cleanup=None):
324
        """Add a new value to the cache.
325
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
326
        Also, if the entry is ever removed from the cache, call
327
        cleanup(key, value).
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
328
329
        :param key: The key to store it under
330
        :param value: The object to store
331
        :param cleanup: None or a function taking (key, value) to indicate
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
332
                        'value' should be cleaned up.
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
333
        """
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
334
        if key is _null_key:
335
            raise ValueError('cannot use _null_key as a key')
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
336
        node = self._cache.get(key, None)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
337
        value_len = self._compute_size(value)
338
        if value_len >= self._after_cleanup_size:
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
339
            # The new value is 'too big to fit', as it would fill up/overflow
340
            # the cache all by itself
341
            trace.mutter('Adding the key %r to an LRUSizeCache failed.'
342
                         ' value %d is too big to fit in a the cache'
343
                         ' with size %d %d', key, value_len,
344
                         self._after_cleanup_size, self._max_size)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
345
            if node is not None:
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
346
                # We won't be replacing the old node, so just remove it
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
347
                self._remove_node(node)
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
348
            if cleanup is not None:
349
                cleanup(key, value)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
350
            return
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
351
        if node is None:
352
            node = _LRUNode(key, value, cleanup=cleanup)
353
            self._cache[key] = node
354
        else:
355
            self._value_size -= node.size
356
        node.size = value_len
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
357
        self._value_size += value_len
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
358
        self._record_access(node)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
359
360
        if self._value_size > self._max_size:
361
            # Time to cleanup
362
            self.cleanup()
363
364
    def cleanup(self):
365
        """Clear the cache until it shrinks to the requested size.
366
367
        This does not completely wipe the cache, just makes sure it is under
368
        the after_cleanup_size.
369
        """
370
        # Make sure the cache is shrunk to the correct size
371
        while self._value_size > self._after_cleanup_size:
372
            self._remove_lru()
373
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
374
    def _remove_node(self, node):
375
        self._value_size -= node.size
376
        LRUCache._remove_node(self, node)
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
377
378
    def resize(self, max_size, after_cleanup_size=None):
379
        """Change the number of bytes that will be cached."""
380
        self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
381
        max_cache = max(int(max_size/512), 1)
382
        self._update_max_cache(max_cache)
383
384
    def _update_max_size(self, max_size, after_cleanup_size=None):
385
        self._max_size = max_size
386
        if after_cleanup_size is None:
387
            self._after_cleanup_size = self._max_size * 8 / 10
388
        else:
389
            self._after_cleanup_size = min(after_cleanup_size, self._max_size)