128
129
self.assertFalse('foo' in cache)
131
def test_cleanup_function_deprecated(self):
132
"""Test that per-node cleanup functions are no longer allowed"""
133
cache = lru_cache.LRUCache()
134
self.assertRaises(ValueError, self.applyDeprecated,
135
symbol_versioning.deprecated_in((2, 5, 0)),
136
cache.add, "key", 1, cleanup=lambda: None)
130
138
def test_len(self):
131
139
cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10)
278
286
self.assertEqual([3, 4, 5, 6, 7, 8, 9, 10], sorted(cache.keys()))
279
cache[11] = 12 # triggers cleanup back to new after_cleanup_count
287
cache[11] = 12 # triggers cleanup back to new after_cleanup_count
280
288
self.assertEqual([6, 7, 8, 9, 10, 11], sorted(cache.keys()))
285
293
def test_basic_init(self):
286
294
cache = lru_cache.LRUSizeCache()
287
295
self.assertEqual(2048, cache._max_cache)
288
self.assertEqual(int(cache._max_size * 0.8), cache._after_cleanup_size)
296
self.assertEqual(int(cache._max_size*0.8), cache._after_cleanup_size)
289
297
self.assertEqual(0, cache._value_size)
291
299
def test_add__null_key(self):
292
300
cache = lru_cache.LRUSizeCache()
293
301
self.assertRaises(ValueError,
294
cache.__setitem__, lru_cache._null_key, 1)
302
cache.__setitem__, lru_cache._null_key, 1)
296
304
def test_add_tracks_size(self):
297
305
cache = lru_cache.LRUSizeCache()
318
326
self.assertEqual({'test': 'key'}, cache.as_dict())
319
327
cache['test2'] = 'key that is too big'
320
328
self.assertEqual(3, cache._value_size)
321
self.assertEqual({'test': 'key'}, cache.as_dict())
329
self.assertEqual({'test':'key'}, cache.as_dict())
322
330
# If we would add a key, only to cleanup and remove all cached entries,
323
331
# then obviously that value should not be stored
324
332
cache['test3'] = 'bigkey'
325
333
self.assertEqual(3, cache._value_size)
326
self.assertEqual({'test': 'key'}, cache.as_dict())
334
self.assertEqual({'test':'key'}, cache.as_dict())
328
336
cache['test4'] = 'bikey'
329
337
self.assertEqual(3, cache._value_size)
330
self.assertEqual({'test': 'key'}, cache.as_dict())
338
self.assertEqual({'test':'key'}, cache.as_dict())
332
340
def test_adding_clears_cache_based_on_size(self):
333
341
"""The cache is cleared in LRU order until small enough"""
334
342
cache = lru_cache.LRUSizeCache(max_size=20)
335
cache['key1'] = 'value' # 5 chars
336
cache['key2'] = 'value2' # 6 chars
337
cache['key3'] = 'value23' # 7 chars
338
self.assertEqual(5 + 6 + 7, cache._value_size)
339
cache['key2'] # reference key2 so it gets a newer reference time
340
cache['key4'] = 'value234' # 8 chars, over limit
343
cache['key1'] = 'value' # 5 chars
344
cache['key2'] = 'value2' # 6 chars
345
cache['key3'] = 'value23' # 7 chars
346
self.assertEqual(5+6+7, cache._value_size)
347
cache['key2'] # reference key2 so it gets a newer reference time
348
cache['key4'] = 'value234' # 8 chars, over limit
341
349
# We have to remove 2 keys to get back under limit
342
self.assertEqual(6 + 8, cache._value_size)
343
self.assertEqual({'key2': 'value2', 'key4': 'value234'},
350
self.assertEqual(6+8, cache._value_size)
351
self.assertEqual({'key2':'value2', 'key4':'value234'},
346
354
def test_adding_clears_to_after_cleanup_size(self):
347
355
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
348
cache['key1'] = 'value' # 5 chars
349
cache['key2'] = 'value2' # 6 chars
350
cache['key3'] = 'value23' # 7 chars
351
self.assertEqual(5 + 6 + 7, cache._value_size)
352
cache['key2'] # reference key2 so it gets a newer reference time
353
cache['key4'] = 'value234' # 8 chars, over limit
356
cache['key1'] = 'value' # 5 chars
357
cache['key2'] = 'value2' # 6 chars
358
cache['key3'] = 'value23' # 7 chars
359
self.assertEqual(5+6+7, cache._value_size)
360
cache['key2'] # reference key2 so it gets a newer reference time
361
cache['key4'] = 'value234' # 8 chars, over limit
354
362
# We have to remove 3 keys to get back under limit
355
363
self.assertEqual(8, cache._value_size)
356
self.assertEqual({'key4': 'value234'}, cache.as_dict())
364
self.assertEqual({'key4':'value234'}, cache.as_dict())
358
366
def test_custom_sizes(self):
359
367
def size_of_list(lst):
361
369
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10,
362
370
compute_size=size_of_list)
364
cache['key1'] = ['val', 'ue'] # 5 chars
365
cache['key2'] = ['val', 'ue2'] # 6 chars
366
cache['key3'] = ['val', 'ue23'] # 7 chars
367
self.assertEqual(5 + 6 + 7, cache._value_size)
368
cache['key2'] # reference key2 so it gets a newer reference time
369
cache['key4'] = ['value', '234'] # 8 chars, over limit
372
cache['key1'] = ['val', 'ue'] # 5 chars
373
cache['key2'] = ['val', 'ue2'] # 6 chars
374
cache['key3'] = ['val', 'ue23'] # 7 chars
375
self.assertEqual(5+6+7, cache._value_size)
376
cache['key2'] # reference key2 so it gets a newer reference time
377
cache['key4'] = ['value', '234'] # 8 chars, over limit
370
378
# We have to remove 3 keys to get back under limit
371
379
self.assertEqual(8, cache._value_size)
372
self.assertEqual({'key4': ['value', '234']}, cache.as_dict())
380
self.assertEqual({'key4':['value', '234']}, cache.as_dict())
374
382
def test_cleanup(self):
375
383
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
377
385
# Add these in order
378
cache['key1'] = 'value' # 5 chars
379
cache['key2'] = 'value2' # 6 chars
380
cache['key3'] = 'value23' # 7 chars
381
self.assertEqual(5 + 6 + 7, cache._value_size)
386
cache['key1'] = 'value' # 5 chars
387
cache['key2'] = 'value2' # 6 chars
388
cache['key3'] = 'value23' # 7 chars
389
self.assertEqual(5+6+7, cache._value_size)
384
392
# Only the most recent fits after cleaning up