1
# Copyright (C) 2006 Canonical Ltd
1
# Copyright (C) 2006, 2008 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
17
17
"""A simple least-recently-used (LRU) cache."""
19
19
from collections import deque
21
from bzrlib import symbol_versioning
23
24
class LRUCache(object):
24
25
"""A class which manages a cache of entries, removing unused ones."""
26
def __init__(self, max_cache=100, after_cleanup_size=None):
27
self._max_cache = max_cache
28
if after_cleanup_size is None:
29
self._after_cleanup_size = self._max_cache
31
self._after_cleanup_size = min(after_cleanup_size, self._max_cache)
33
self._compact_queue_length = 4*self._max_cache
27
def __init__(self, max_cache=100, after_cleanup_count=None,
28
after_cleanup_size=symbol_versioning.DEPRECATED_PARAMETER):
29
if symbol_versioning.deprecated_passed(after_cleanup_size):
30
symbol_versioning.warn('LRUCache.__init__(after_cleanup_size) was'
31
' deprecated in 1.11. Use'
32
' after_cleanup_count instead.',
34
after_cleanup_count = after_cleanup_size
37
37
self._queue = deque() # Track when things are accessed
38
38
self._refcount = {} # number of entries in self._queue for each key
39
self._update_max_cache(max_cache, after_cleanup_count)
40
41
def __contains__(self, key):
41
42
return key in self._cache
80
"""Get the list of keys currently cached.
82
Note that values returned here may not be available by the time you
83
request them later. This is simply meant as a peak into the current
86
:return: An unordered list of keys that are currently cached.
88
return self._cache.keys()
78
91
"""Clear the cache until it shrinks to the requested size.
80
93
This does not completely wipe the cache, just makes sure it is under
81
the after_cleanup_size.
94
the after_cleanup_count.
83
96
# Make sure the cache is shrunk to the correct size
84
while len(self._cache) > self._after_cleanup_size:
97
while len(self._cache) > self._after_cleanup_count:
99
# No need to compact the queue at this point, because the code that
100
# calls this would have already triggered it based on queue length
87
102
def __setitem__(self, key, value):
88
103
"""Add a value to the cache, there will be no cleanup function."""
116
131
def _remove(self, key):
117
132
"""Remove an entry, making sure to maintain the invariants."""
118
cleanup = self._cleanup.pop(key)
133
cleanup = self._cleanup.pop(key, None)
119
134
val = self._cache.pop(key)
120
135
if cleanup is not None:
121
136
cleanup(key, val)
139
154
while self._cache:
140
155
self._remove_lru()
157
def resize(self, max_cache, after_cleanup_count=None):
158
"""Change the number of entries that will be cached."""
159
self._update_max_cache(max_cache,
160
after_cleanup_count=after_cleanup_count)
162
def _update_max_cache(self, max_cache, after_cleanup_count=None):
163
self._max_cache = max_cache
164
if after_cleanup_count is None:
165
self._after_cleanup_count = self._max_cache * 8 / 10
167
self._after_cleanup_count = min(after_cleanup_count, self._max_cache)
169
self._compact_queue_length = 4*self._max_cache
170
if len(self._queue) > self._compact_queue_length:
171
self._compact_queue()
143
175
class LRUSizeCache(LRUCache):
144
176
"""An LRUCache that removes things based on the size of the values.
164
196
The function should take the form "compute_size(value) => integer".
165
197
If not supplied, it defaults to 'len()'
167
# This approximates that texts are > 0.5k in size. It only really
168
# effects when we clean up the queue, so we don't want it to be too
170
LRUCache.__init__(self, max_cache=int(max_size/512))
171
self._max_size = max_size
172
if after_cleanup_size is None:
173
self._after_cleanup_size = self._max_size
175
self._after_cleanup_size = min(after_cleanup_size, self._max_size)
177
199
self._value_size = 0
178
200
self._compute_size = compute_size
179
201
if compute_size is None:
180
202
self._compute_size = len
203
# This approximates that texts are > 0.5k in size. It only really
204
# effects when we clean up the queue, so we don't want it to be too
206
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
207
LRUCache.__init__(self, max_cache=max(int(max_size/512), 1))
182
209
def add(self, key, value, cleanup=None):
183
210
"""Add a new value to the cache.
218
246
"""Remove an entry, making sure to maintain the invariants."""
219
247
val = LRUCache._remove(self, key)
220
248
self._value_size -= self._compute_size(val)
250
def resize(self, max_size, after_cleanup_size=None):
251
"""Change the number of bytes that will be cached."""
252
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
253
max_cache = max(int(max_size/512), 1)
254
self._update_max_cache(max_cache)
256
def _update_max_size(self, max_size, after_cleanup_size=None):
257
self._max_size = max_size
258
if after_cleanup_size is None:
259
self._after_cleanup_size = self._max_size * 8 / 10
261
self._after_cleanup_size = min(after_cleanup_size, self._max_size)