bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
| 
2993.1.1
by Robert Collins
 * New module ``lru_cache`` providing a cache for use by tasks that need  | 
1  | 
# Copyright (C) 2006 Canonical Ltd
 | 
2  | 
#
 | 
|
3  | 
# This program is free software; you can redistribute it and/or modify
 | 
|
4  | 
# it under the terms of the GNU General Public License as published by
 | 
|
5  | 
# the Free Software Foundation; either version 2 of the License, or
 | 
|
6  | 
# (at your option) any later version.
 | 
|
7  | 
#
 | 
|
8  | 
# This program is distributed in the hope that it will be useful,
 | 
|
9  | 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|
10  | 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
|
11  | 
# GNU General Public License for more details.
 | 
|
12  | 
#
 | 
|
13  | 
# You should have received a copy of the GNU General Public License
 | 
|
14  | 
# along with this program; if not, write to the Free Software
 | 
|
15  | 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 | 
|
16  | 
||
17  | 
"""Tests for the lru_cache module."""
 | 
|
18  | 
||
19  | 
from bzrlib import (  | 
|
20  | 
lru_cache,  | 
|
21  | 
tests,  | 
|
22  | 
    )
 | 
|
23  | 
||
24  | 
||
25  | 
class TestLRUCache(tests.TestCase):  | 
|
26  | 
"""Test that LRU cache properly keeps track of entries."""  | 
|
27  | 
||
28  | 
def test_missing(self):  | 
|
29  | 
cache = lru_cache.LRUCache(max_cache=10)  | 
|
30  | 
||
31  | 
self.failIf('foo' in cache)  | 
|
32  | 
self.assertRaises(KeyError, cache.__getitem__, 'foo')  | 
|
33  | 
||
34  | 
cache['foo'] = 'bar'  | 
|
35  | 
self.assertEqual('bar', cache['foo'])  | 
|
36  | 
self.failUnless('foo' in cache)  | 
|
37  | 
self.failIf('bar' in cache)  | 
|
38  | 
||
39  | 
def test_overflow(self):  | 
|
40  | 
"""Adding extra entries will pop out old ones."""  | 
|
41  | 
cache = lru_cache.LRUCache(max_cache=1)  | 
|
42  | 
||
43  | 
cache['foo'] = 'bar'  | 
|
44  | 
        # With a max cache of 1, adding 'baz' should pop out 'foo'
 | 
|
45  | 
cache['baz'] = 'biz'  | 
|
46  | 
||
47  | 
self.failIf('foo' in cache)  | 
|
48  | 
self.failUnless('baz' in cache)  | 
|
49  | 
||
50  | 
self.assertEqual('biz', cache['baz'])  | 
|
51  | 
||
52  | 
def test_by_usage(self):  | 
|
53  | 
"""Accessing entries bumps them up in priority."""  | 
|
54  | 
cache = lru_cache.LRUCache(max_cache=2)  | 
|
55  | 
||
56  | 
cache['baz'] = 'biz'  | 
|
57  | 
cache['foo'] = 'bar'  | 
|
58  | 
||
59  | 
self.assertEqual('biz', cache['baz'])  | 
|
60  | 
||
61  | 
        # This must kick out 'foo' because it was the last accessed
 | 
|
62  | 
cache['nub'] = 'in'  | 
|
63  | 
||
64  | 
self.failIf('foo' in cache)  | 
|
65  | 
||
66  | 
def test_queue_stays_bounded(self):  | 
|
67  | 
"""Lots of accesses does not cause the queue to grow without bound."""  | 
|
68  | 
cache = lru_cache.LRUCache(max_cache=10)  | 
|
69  | 
||
70  | 
cache['baz'] = 'biz'  | 
|
71  | 
cache['foo'] = 'bar'  | 
|
72  | 
||
73  | 
for i in xrange(1000):  | 
|
74  | 
cache['baz']  | 
|
75  | 
||
76  | 
self.failUnless(len(cache._queue) < 40)  | 
|
77  | 
||
78  | 
def test_cleanup(self):  | 
|
79  | 
"""Test that we can use a cleanup function."""  | 
|
80  | 
cleanup_called = []  | 
|
81  | 
def cleanup_func(key, val):  | 
|
82  | 
cleanup_called.append((key, val))  | 
|
83  | 
||
84  | 
cache = lru_cache.LRUCache(max_cache=2)  | 
|
85  | 
||
86  | 
cache.add('baz', '1', cleanup=cleanup_func)  | 
|
87  | 
cache.add('foo', '2', cleanup=cleanup_func)  | 
|
88  | 
cache.add('biz', '3', cleanup=cleanup_func)  | 
|
89  | 
||
90  | 
self.assertEqual([('baz', '1')], cleanup_called)  | 
|
91  | 
||
92  | 
        # 'foo' is now most recent, so final cleanup will call it last
 | 
|
93  | 
cache['foo']  | 
|
94  | 
cache.clear()  | 
|
95  | 
self.assertEqual([('baz', '1'), ('biz', '3'), ('foo', '2')], cleanup_called)  | 
|
96  | 
||
97  | 
def test_cleanup_on_replace(self):  | 
|
98  | 
"""Replacing an object should cleanup the old value."""  | 
|
99  | 
cleanup_called = []  | 
|
100  | 
def cleanup_func(key, val):  | 
|
101  | 
cleanup_called.append((key, val))  | 
|
102  | 
||
103  | 
cache = lru_cache.LRUCache(max_cache=2)  | 
|
104  | 
cache.add(1, 10, cleanup=cleanup_func)  | 
|
105  | 
cache.add(2, 20, cleanup=cleanup_func)  | 
|
106  | 
cache.add(2, 25, cleanup=cleanup_func)  | 
|
107  | 
||
108  | 
self.assertEqual([(2, 20)], cleanup_called)  | 
|
109  | 
self.assertEqual(25, cache[2])  | 
|
110  | 
||
111  | 
        # Even __setitem__ should make sure cleanup() is called
 | 
|
112  | 
cache[2] = 26  | 
|
113  | 
self.assertEqual([(2, 20), (2, 25)], cleanup_called)  | 
|
114  | 
||
115  | 
def test_len(self):  | 
|
116  | 
cache = lru_cache.LRUCache(max_cache=10)  | 
|
117  | 
||
118  | 
cache[1] = 10  | 
|
119  | 
cache[2] = 20  | 
|
120  | 
cache[3] = 30  | 
|
121  | 
cache[4] = 40  | 
|
122  | 
||
123  | 
self.assertEqual(4, len(cache))  | 
|
124  | 
||
125  | 
cache[5] = 50  | 
|
126  | 
cache[6] = 60  | 
|
127  | 
cache[7] = 70  | 
|
128  | 
cache[8] = 80  | 
|
129  | 
||
130  | 
self.assertEqual(8, len(cache))  | 
|
131  | 
||
132  | 
cache[1] = 15 # replacement  | 
|
133  | 
||
134  | 
self.assertEqual(8, len(cache))  | 
|
135  | 
||
136  | 
cache[9] = 90  | 
|
137  | 
cache[10] = 100  | 
|
138  | 
cache[11] = 110  | 
|
139  | 
||
140  | 
        # We hit the max
 | 
|
141  | 
self.assertEqual(10, len(cache))  | 
|
142  | 
||
143  | 
def test_cleanup_shrinks_to_after_clean_size(self):  | 
|
144  | 
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_size=3)  | 
|
145  | 
||
146  | 
cache.add(1, 10)  | 
|
147  | 
cache.add(2, 20)  | 
|
148  | 
cache.add(3, 25)  | 
|
149  | 
cache.add(4, 30)  | 
|
150  | 
cache.add(5, 35)  | 
|
151  | 
||
152  | 
self.assertEqual(5, len(cache))  | 
|
153  | 
        # This will bump us over the max, which causes us to shrink down to
 | 
|
154  | 
        # after_cleanup_cache size
 | 
|
155  | 
cache.add(6, 40)  | 
|
156  | 
self.assertEqual(3, len(cache))  | 
|
157  | 
||
158  | 
def test_after_cleanup_larger_than_max(self):  | 
|
159  | 
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_size=10)  | 
|
160  | 
self.assertEqual(5, cache._after_cleanup_size)  | 
|
161  | 
||
162  | 
def test_after_cleanup_none(self):  | 
|
163  | 
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_size=None)  | 
|
164  | 
self.assertEqual(5, cache._after_cleanup_size)  | 
|
165  | 
||
166  | 
def test_cleanup(self):  | 
|
167  | 
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_size=2)  | 
|
168  | 
||
169  | 
        # Add these in order
 | 
|
170  | 
cache.add(1, 10)  | 
|
171  | 
cache.add(2, 20)  | 
|
172  | 
cache.add(3, 25)  | 
|
173  | 
cache.add(4, 30)  | 
|
174  | 
cache.add(5, 35)  | 
|
175  | 
||
176  | 
self.assertEqual(5, len(cache))  | 
|
177  | 
        # Force a compaction
 | 
|
178  | 
cache.cleanup()  | 
|
179  | 
self.assertEqual(2, len(cache))  | 
|
180  | 
||
181  | 
def test_compact_preserves_last_access_order(self):  | 
|
182  | 
cache = lru_cache.LRUCache(max_cache=5)  | 
|
183  | 
||
184  | 
        # Add these in order
 | 
|
185  | 
cache.add(1, 10)  | 
|
186  | 
cache.add(2, 20)  | 
|
187  | 
cache.add(3, 25)  | 
|
188  | 
cache.add(4, 30)  | 
|
189  | 
cache.add(5, 35)  | 
|
190  | 
||
191  | 
self.assertEqual([1, 2, 3, 4, 5], list(cache._queue))  | 
|
192  | 
||
193  | 
        # Now access some randomly
 | 
|
194  | 
cache[2]  | 
|
195  | 
cache[5]  | 
|
196  | 
cache[3]  | 
|
197  | 
cache[2]  | 
|
198  | 
self.assertEqual([1, 2, 3, 4, 5, 2, 5, 3, 2], list(cache._queue))  | 
|
199  | 
self.assertEqual({1:1, 2:3, 3:2, 4:1, 5:2}, cache._refcount)  | 
|
200  | 
||
201  | 
        # Compacting should save the last position
 | 
|
202  | 
cache._compact_queue()  | 
|
203  | 
self.assertEqual([1, 4, 5, 3, 2], list(cache._queue))  | 
|
204  | 
self.assertEqual({1:1, 2:1, 3:1, 4:1, 5:1}, cache._refcount)  | 
|
205  | 
||
| 
2998.2.1
by John Arbash Meinel
 Implement LRUCache.get() which acts like dict.get()  | 
206  | 
def test_get(self):  | 
207  | 
cache = lru_cache.LRUCache(max_cache=5)  | 
|
208  | 
||
209  | 
cache.add(1, 10)  | 
|
210  | 
cache.add(2, 20)  | 
|
211  | 
self.assertEqual(20, cache.get(2))  | 
|
212  | 
self.assertIs(None, cache.get(3))  | 
|
213  | 
obj = object()  | 
|
214  | 
self.assertIs(obj, cache.get(3, obj))  | 
|
215  | 
||
| 
3763.8.10
by John Arbash Meinel
 Add a .keys() member to LRUCache and LRUSizeCache.  | 
216  | 
def test_keys(self):  | 
217  | 
cache = lru_cache.LRUCache(max_cache=5)  | 
|
218  | 
||
219  | 
cache[1] = 2  | 
|
220  | 
cache[2] = 3  | 
|
221  | 
cache[3] = 4  | 
|
222  | 
self.assertEqual([1, 2, 3], sorted(cache.keys()))  | 
|
223  | 
cache[4] = 5  | 
|
224  | 
cache[5] = 6  | 
|
225  | 
cache[6] = 7  | 
|
226  | 
self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys()))  | 
|
227  | 
||
| 
2993.1.1
by Robert Collins
 * New module ``lru_cache`` providing a cache for use by tasks that need  | 
228  | 
|
229  | 
class TestLRUSizeCache(tests.TestCase):  | 
|
230  | 
||
231  | 
def test_basic_init(self):  | 
|
232  | 
cache = lru_cache.LRUSizeCache()  | 
|
233  | 
self.assertEqual(2048, cache._max_cache)  | 
|
234  | 
self.assertEqual(4*2048, cache._compact_queue_length)  | 
|
235  | 
self.assertEqual(cache._max_size, cache._after_cleanup_size)  | 
|
236  | 
self.assertEqual(0, cache._value_size)  | 
|
237  | 
||
238  | 
def test_add_tracks_size(self):  | 
|
239  | 
cache = lru_cache.LRUSizeCache()  | 
|
240  | 
self.assertEqual(0, cache._value_size)  | 
|
241  | 
cache.add('my key', 'my value text')  | 
|
242  | 
self.assertEqual(13, cache._value_size)  | 
|
243  | 
||
244  | 
def test_remove_tracks_size(self):  | 
|
245  | 
cache = lru_cache.LRUSizeCache()  | 
|
246  | 
self.assertEqual(0, cache._value_size)  | 
|
247  | 
cache.add('my key', 'my value text')  | 
|
248  | 
self.assertEqual(13, cache._value_size)  | 
|
249  | 
cache._remove('my key')  | 
|
250  | 
self.assertEqual(0, cache._value_size)  | 
|
251  | 
||
252  | 
def test_no_add_over_size(self):  | 
|
253  | 
"""Adding a large value may not be cached at all."""  | 
|
254  | 
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)  | 
|
255  | 
self.assertEqual(0, cache._value_size)  | 
|
256  | 
self.assertEqual({}, cache._cache)  | 
|
257  | 
cache.add('test', 'key')  | 
|
258  | 
self.assertEqual(3, cache._value_size)  | 
|
259  | 
self.assertEqual({'test':'key'}, cache._cache)  | 
|
260  | 
cache.add('test2', 'key that is too big')  | 
|
261  | 
self.assertEqual(3, cache._value_size)  | 
|
262  | 
self.assertEqual({'test':'key'}, cache._cache)  | 
|
263  | 
        # If we would add a key, only to cleanup and remove all cached entries,
 | 
|
264  | 
        # then obviously that value should not be stored
 | 
|
265  | 
cache.add('test3', 'bigkey')  | 
|
266  | 
self.assertEqual(3, cache._value_size)  | 
|
267  | 
self.assertEqual({'test':'key'}, cache._cache)  | 
|
268  | 
||
269  | 
cache.add('test4', 'bikey')  | 
|
270  | 
self.assertEqual(3, cache._value_size)  | 
|
271  | 
self.assertEqual({'test':'key'}, cache._cache)  | 
|
272  | 
||
273  | 
def test_adding_clears_cache_based_on_size(self):  | 
|
274  | 
"""The cache is cleared in LRU order until small enough"""  | 
|
275  | 
cache = lru_cache.LRUSizeCache(max_size=20)  | 
|
276  | 
cache.add('key1', 'value') # 5 chars  | 
|
277  | 
cache.add('key2', 'value2') # 6 chars  | 
|
278  | 
cache.add('key3', 'value23') # 7 chars  | 
|
279  | 
self.assertEqual(5+6+7, cache._value_size)  | 
|
280  | 
cache['key2'] # reference key2 so it gets a newer reference time  | 
|
281  | 
cache.add('key4', 'value234') # 8 chars, over limit  | 
|
282  | 
        # We have to remove 2 keys to get back under limit
 | 
|
283  | 
self.assertEqual(6+8, cache._value_size)  | 
|
284  | 
self.assertEqual({'key2':'value2', 'key4':'value234'},  | 
|
285  | 
cache._cache)  | 
|
286  | 
||
287  | 
def test_adding_clears_to_after_cleanup_size(self):  | 
|
288  | 
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)  | 
|
289  | 
cache.add('key1', 'value') # 5 chars  | 
|
290  | 
cache.add('key2', 'value2') # 6 chars  | 
|
291  | 
cache.add('key3', 'value23') # 7 chars  | 
|
292  | 
self.assertEqual(5+6+7, cache._value_size)  | 
|
293  | 
cache['key2'] # reference key2 so it gets a newer reference time  | 
|
294  | 
cache.add('key4', 'value234') # 8 chars, over limit  | 
|
295  | 
        # We have to remove 3 keys to get back under limit
 | 
|
296  | 
self.assertEqual(8, cache._value_size)  | 
|
297  | 
self.assertEqual({'key4':'value234'}, cache._cache)  | 
|
298  | 
||
299  | 
def test_custom_sizes(self):  | 
|
300  | 
def size_of_list(lst):  | 
|
301  | 
return sum(len(x) for x in lst)  | 
|
302  | 
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10,  | 
|
303  | 
compute_size=size_of_list)  | 
|
304  | 
||
305  | 
cache.add('key1', ['val', 'ue']) # 5 chars  | 
|
306  | 
cache.add('key2', ['val', 'ue2']) # 6 chars  | 
|
307  | 
cache.add('key3', ['val', 'ue23']) # 7 chars  | 
|
308  | 
self.assertEqual(5+6+7, cache._value_size)  | 
|
309  | 
cache['key2'] # reference key2 so it gets a newer reference time  | 
|
310  | 
cache.add('key4', ['value', '234']) # 8 chars, over limit  | 
|
311  | 
        # We have to remove 3 keys to get back under limit
 | 
|
312  | 
self.assertEqual(8, cache._value_size)  | 
|
313  | 
self.assertEqual({'key4':['value', '234']}, cache._cache)  | 
|
314  | 
||
315  | 
def test_cleanup(self):  | 
|
316  | 
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)  | 
|
317  | 
||
318  | 
        # Add these in order
 | 
|
319  | 
cache.add('key1', 'value') # 5 chars  | 
|
320  | 
cache.add('key2', 'value2') # 6 chars  | 
|
321  | 
cache.add('key3', 'value23') # 7 chars  | 
|
322  | 
self.assertEqual(5+6+7, cache._value_size)  | 
|
323  | 
||
324  | 
cache.cleanup()  | 
|
325  | 
        # Only the most recent fits after cleaning up
 | 
|
326  | 
self.assertEqual(7, cache._value_size)  | 
|
| 
3763.8.10
by John Arbash Meinel
 Add a .keys() member to LRUCache and LRUSizeCache.  | 
327  | 
|
328  | 
def test_keys(self):  | 
|
329  | 
cache = lru_cache.LRUSizeCache(max_size=10)  | 
|
330  | 
||
331  | 
cache[1] = 'a'  | 
|
332  | 
cache[2] = 'b'  | 
|
333  | 
cache[3] = 'cdef'  | 
|
334  | 
self.assertEqual([1, 2, 3], sorted(cache.keys()))  |