1
# Copyright (C) 2007-2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for indices."""
24
from ..sixish import int2byte
30
class ErrorTests(tests.TestCase):
32
def test_bad_index_format_signature(self):
33
error = _mod_index.BadIndexFormatSignature("foo", "bar")
34
self.assertEqual("foo is not an index of type bar.",
37
def test_bad_index_data(self):
38
error = _mod_index.BadIndexData("foo")
39
self.assertEqual("Error in data for index foo.",
42
def test_bad_index_duplicate_key(self):
43
error = _mod_index.BadIndexDuplicateKey("foo", "bar")
44
self.assertEqual("The key 'foo' is already in index 'bar'.",
47
def test_bad_index_key(self):
48
error = _mod_index.BadIndexKey("foo")
49
self.assertEqual("The key 'foo' is not a valid key.",
52
def test_bad_index_options(self):
53
error = _mod_index.BadIndexOptions("foo")
54
self.assertEqual("Could not parse options for index foo.",
57
def test_bad_index_value(self):
58
error = _mod_index.BadIndexValue("foo")
59
self.assertEqual("The value 'foo' is not a valid value.",
63
class TestGraphIndexBuilder(tests.TestCaseWithMemoryTransport):
65
def test_build_index_empty(self):
66
builder = _mod_index.GraphIndexBuilder()
67
stream = builder.finish()
68
contents = stream.read()
70
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=0\n\n",
73
def test_build_index_empty_two_element_keys(self):
74
builder = _mod_index.GraphIndexBuilder(key_elements=2)
75
stream = builder.finish()
76
contents = stream.read()
78
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=0\n\n",
81
def test_build_index_one_reference_list_empty(self):
82
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
83
stream = builder.finish()
84
contents = stream.read()
86
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=0\n\n",
89
def test_build_index_two_reference_list_empty(self):
90
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
91
stream = builder.finish()
92
contents = stream.read()
94
b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=0\n\n",
97
def test_build_index_one_node_no_refs(self):
98
builder = _mod_index.GraphIndexBuilder()
99
builder.add_node((b'akey', ), b'data')
100
stream = builder.finish()
101
contents = stream.read()
103
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
104
b"akey\x00\x00\x00data\n\n", contents)
106
def test_build_index_one_node_no_refs_accepts_empty_reflist(self):
107
builder = _mod_index.GraphIndexBuilder()
108
builder.add_node((b'akey', ), b'data', ())
109
stream = builder.finish()
110
contents = stream.read()
112
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
113
b"akey\x00\x00\x00data\n\n", contents)
115
def test_build_index_one_node_2_element_keys(self):
116
# multipart keys are separated by \x00 - because they are fixed length,
117
# not variable this does not cause any issues, and seems clearer to the
119
builder = _mod_index.GraphIndexBuilder(key_elements=2)
120
builder.add_node((b'akey', b'secondpart'), b'data')
121
stream = builder.finish()
122
contents = stream.read()
124
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=1\n"
125
b"akey\x00secondpart\x00\x00\x00data\n\n", contents)
127
def test_add_node_empty_value(self):
128
builder = _mod_index.GraphIndexBuilder()
129
builder.add_node((b'akey', ), b'')
130
stream = builder.finish()
131
contents = stream.read()
133
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
134
b"akey\x00\x00\x00\n\n", contents)
136
def test_build_index_nodes_sorted(self):
137
# the highest sorted node comes first.
138
builder = _mod_index.GraphIndexBuilder()
139
# use three to have a good chance of glitching dictionary hash
140
# lookups etc. Insert in randomish order that is not correct
141
# and not the reverse of the correct order.
142
builder.add_node((b'2002', ), b'data')
143
builder.add_node((b'2000', ), b'data')
144
builder.add_node((b'2001', ), b'data')
145
stream = builder.finish()
146
contents = stream.read()
148
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=3\n"
149
b"2000\x00\x00\x00data\n"
150
b"2001\x00\x00\x00data\n"
151
b"2002\x00\x00\x00data\n"
154
def test_build_index_2_element_key_nodes_sorted(self):
155
# multiple element keys are sorted first-key, second-key.
156
builder = _mod_index.GraphIndexBuilder(key_elements=2)
157
# use three values of each key element, to have a good chance of
158
# glitching dictionary hash lookups etc. Insert in randomish order that
159
# is not correct and not the reverse of the correct order.
160
builder.add_node((b'2002', b'2002'), b'data')
161
builder.add_node((b'2002', b'2000'), b'data')
162
builder.add_node((b'2002', b'2001'), b'data')
163
builder.add_node((b'2000', b'2002'), b'data')
164
builder.add_node((b'2000', b'2000'), b'data')
165
builder.add_node((b'2000', b'2001'), b'data')
166
builder.add_node((b'2001', b'2002'), b'data')
167
builder.add_node((b'2001', b'2000'), b'data')
168
builder.add_node((b'2001', b'2001'), b'data')
169
stream = builder.finish()
170
contents = stream.read()
172
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=9\n"
173
b"2000\x002000\x00\x00\x00data\n"
174
b"2000\x002001\x00\x00\x00data\n"
175
b"2000\x002002\x00\x00\x00data\n"
176
b"2001\x002000\x00\x00\x00data\n"
177
b"2001\x002001\x00\x00\x00data\n"
178
b"2001\x002002\x00\x00\x00data\n"
179
b"2002\x002000\x00\x00\x00data\n"
180
b"2002\x002001\x00\x00\x00data\n"
181
b"2002\x002002\x00\x00\x00data\n"
184
def test_build_index_reference_lists_are_included_one(self):
185
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
186
builder.add_node((b'key', ), b'data', ([], ))
187
stream = builder.finish()
188
contents = stream.read()
190
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
191
b"key\x00\x00\x00data\n"
194
def test_build_index_reference_lists_with_2_element_keys(self):
195
builder = _mod_index.GraphIndexBuilder(
196
reference_lists=1, key_elements=2)
197
builder.add_node((b'key', b'key2'), b'data', ([], ))
198
stream = builder.finish()
199
contents = stream.read()
201
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=2\nlen=1\n"
202
b"key\x00key2\x00\x00\x00data\n"
205
def test_build_index_reference_lists_are_included_two(self):
206
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
207
builder.add_node((b'key', ), b'data', ([], []))
208
stream = builder.finish()
209
contents = stream.read()
211
b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
212
b"key\x00\x00\t\x00data\n"
215
def test_clear_cache(self):
216
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
217
# This is a no-op, but the api should exist
218
builder.clear_cache()
220
def test_node_references_are_byte_offsets(self):
221
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
222
builder.add_node((b'reference', ), b'data', ([], ))
223
builder.add_node((b'key', ), b'data', ([(b'reference', )], ))
224
stream = builder.finish()
225
contents = stream.read()
227
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=2\n"
228
b"key\x00\x0072\x00data\n"
229
b"reference\x00\x00\x00data\n"
232
def test_node_references_are_cr_delimited(self):
233
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
234
builder.add_node((b'reference', ), b'data', ([], ))
235
builder.add_node((b'reference2', ), b'data', ([], ))
236
builder.add_node((b'key', ), b'data',
237
([(b'reference', ), (b'reference2', )], ))
238
stream = builder.finish()
239
contents = stream.read()
241
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=3\n"
242
b"key\x00\x00077\r094\x00data\n"
243
b"reference\x00\x00\x00data\n"
244
b"reference2\x00\x00\x00data\n"
247
def test_multiple_reference_lists_are_tab_delimited(self):
248
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
249
builder.add_node((b'keference', ), b'data', ([], []))
250
builder.add_node((b'rey', ), b'data',
251
([(b'keference', )], [(b'keference', )]))
252
stream = builder.finish()
253
contents = stream.read()
255
b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=2\n"
256
b"keference\x00\x00\t\x00data\n"
257
b"rey\x00\x0059\t59\x00data\n"
260
def test_add_node_referencing_missing_key_makes_absent(self):
261
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
262
builder.add_node((b'rey', ), b'data',
263
([(b'beference', ), (b'aeference2', )], ))
264
stream = builder.finish()
265
contents = stream.read()
267
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
268
b"aeference2\x00a\x00\x00\n"
269
b"beference\x00a\x00\x00\n"
270
b"rey\x00\x00074\r059\x00data\n"
273
def test_node_references_three_digits(self):
274
# test the node digit expands as needed.
275
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
276
references = [((b"%d" % val), ) for val in range(8, -1, -1)]
277
builder.add_node((b'2-key', ), b'', (references, ))
278
stream = builder.finish()
279
contents = stream.read()
280
self.assertEqualDiff(
281
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
285
b"2-key\x00\x00151\r145\r139\r133\r127\r121\r071\r065\r059\x00\n"
294
def test_absent_has_no_reference_overhead(self):
295
# the offsets after an absent record should be correct when there are
296
# >1 reference lists.
297
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
298
builder.add_node((b'parent', ), b'', ([(b'aail', ), (b'zther', )], []))
299
stream = builder.finish()
300
contents = stream.read()
302
b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
303
b"aail\x00a\x00\x00\n"
304
b"parent\x00\x0059\r84\t\x00\n"
305
b"zther\x00a\x00\x00\n"
308
def test_add_node_bad_key(self):
309
builder = _mod_index.GraphIndexBuilder()
310
for bad_char in bytearray(b'\t\n\x0b\x0c\r\x00 '):
311
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
312
(b'a%skey' % int2byte(bad_char), ), b'data')
313
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
315
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
316
b'not-a-tuple', b'data')
318
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
321
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
322
(b'primary', b'secondary'), b'data')
323
# secondary key elements get checked too:
324
builder = _mod_index.GraphIndexBuilder(key_elements=2)
325
for bad_char in bytearray(b'\t\n\x0b\x0c\r\x00 '):
326
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
327
(b'prefix', b'a%skey' % int2byte(bad_char)), b'data')
329
def test_add_node_bad_data(self):
330
builder = _mod_index.GraphIndexBuilder()
331
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
333
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
336
def test_add_node_bad_mismatched_ref_lists_length(self):
337
builder = _mod_index.GraphIndexBuilder()
338
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
340
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
341
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
343
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
345
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
346
b'data aa', ([], []))
347
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
348
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
350
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
352
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
353
b'data aa', ([], [], []))
355
def test_add_node_bad_key_in_reference_lists(self):
356
# first list, first key - trivial
357
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
358
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
359
b'data aa', ([(b'a key', )], ))
360
# references keys must be tuples too
361
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
362
b'data aa', (['not-a-tuple'], ))
364
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
365
b'data aa', ([()], ))
367
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
368
b'data aa', ([(b'primary', b'secondary')], ))
369
# need to check more than the first key in the list
370
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
371
b'data aa', ([(b'agoodkey', ), (b'that is a bad key', )], ))
372
# and if there is more than one list it should be getting checked
374
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
375
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
376
b'data aa', ([], ['a bad key']))
378
def test_add_duplicate_key(self):
379
builder = _mod_index.GraphIndexBuilder()
380
builder.add_node((b'key', ), b'data')
381
self.assertRaises(_mod_index.BadIndexDuplicateKey,
382
builder.add_node, (b'key', ), b'data')
384
def test_add_duplicate_key_2_elements(self):
385
builder = _mod_index.GraphIndexBuilder(key_elements=2)
386
builder.add_node((b'key', b'key'), b'data')
387
self.assertRaises(_mod_index.BadIndexDuplicateKey, builder.add_node,
388
(b'key', b'key'), b'data')
390
def test_add_key_after_referencing_key(self):
391
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
392
builder.add_node((b'key', ), b'data', ([(b'reference', )], ))
393
builder.add_node((b'reference', ), b'data', ([],))
395
def test_add_key_after_referencing_key_2_elements(self):
396
builder = _mod_index.GraphIndexBuilder(
397
reference_lists=1, key_elements=2)
398
builder.add_node((b'k', b'ey'), b'data',
399
([(b'reference', b'tokey')], ))
400
builder.add_node((b'reference', b'tokey'), b'data', ([],))
402
def test_set_optimize(self):
403
builder = _mod_index.GraphIndexBuilder(
404
reference_lists=1, key_elements=2)
405
builder.set_optimize(for_size=True)
406
self.assertTrue(builder._optimize_for_size)
407
builder.set_optimize(for_size=False)
408
self.assertFalse(builder._optimize_for_size)
411
class TestGraphIndex(tests.TestCaseWithMemoryTransport):
413
def make_key(self, number):
414
return ((b'%d' % number) + b'X' * 100,)
416
def make_value(self, number):
417
return (b'%d' % number) + b'Y' * 100
419
def make_nodes(self, count=64):
420
# generate a big enough index that we only read some of it on a typical
423
for counter in range(count):
425
(self.make_key(counter), self.make_value(counter), ()))
428
def make_index(self, ref_lists=0, key_elements=1, nodes=[]):
429
builder = _mod_index.GraphIndexBuilder(
430
ref_lists, key_elements=key_elements)
431
for key, value, references in nodes:
432
builder.add_node(key, value, references)
433
stream = builder.finish()
434
trans = transport.get_transport_from_url('trace+' + self.get_url())
435
size = trans.put_file('index', stream)
436
return _mod_index.GraphIndex(trans, 'index', size)
438
def make_index_with_offset(self, ref_lists=0, key_elements=1, nodes=[],
440
builder = _mod_index.GraphIndexBuilder(
441
ref_lists, key_elements=key_elements)
442
for key, value, references in nodes:
443
builder.add_node(key, value, references)
444
content = builder.finish().read()
446
trans = self.get_transport()
447
trans.put_bytes('index', (b' ' * offset) + content)
448
return _mod_index.GraphIndex(trans, 'index', size, offset=offset)
450
def test_clear_cache(self):
451
index = self.make_index()
452
# For now, we just want to make sure the api is available. As this is
453
# old code, we don't really worry if it *does* anything.
456
def test_open_bad_index_no_error(self):
457
trans = self.get_transport()
458
trans.put_bytes('name', b"not an index\n")
459
idx = _mod_index.GraphIndex(trans, 'name', 13)
461
def test_with_offset(self):
462
nodes = self.make_nodes(200)
463
idx = self.make_index_with_offset(offset=1234567, nodes=nodes)
464
self.assertEqual(200, idx.key_count())
466
def test_buffer_all_with_offset(self):
467
nodes = self.make_nodes(200)
468
idx = self.make_index_with_offset(offset=1234567, nodes=nodes)
470
self.assertEqual(200, idx.key_count())
472
def test_side_effect_buffering_with_offset(self):
473
nodes = self.make_nodes(20)
474
index = self.make_index_with_offset(offset=1234567, nodes=nodes)
475
index._transport.recommended_page_size = lambda: 64 * 1024
476
subset_nodes = [nodes[0][0], nodes[10][0], nodes[19][0]]
477
entries = [n[1] for n in index.iter_entries(subset_nodes)]
478
self.assertEqual(sorted(subset_nodes), sorted(entries))
479
self.assertEqual(20, index.key_count())
481
def test_open_sets_parsed_map_empty(self):
482
index = self.make_index()
483
self.assertEqual([], index._parsed_byte_map)
484
self.assertEqual([], index._parsed_key_map)
486
def test_key_count_buffers(self):
487
index = self.make_index(nodes=self.make_nodes(2))
488
# reset the transport log
489
del index._transport._activity[:]
490
self.assertEqual(2, index.key_count())
491
# We should have requested reading the header bytes
493
('readv', 'index', [(0, 200)], True, index._size),
495
index._transport._activity)
496
# And that should have been enough to trigger reading the whole index
498
self.assertIsNot(None, index._nodes)
500
def test_lookup_key_via_location_buffers(self):
501
index = self.make_index()
502
# reset the transport log
503
del index._transport._activity[:]
504
# do a _lookup_keys_via_location call for the middle of the file, which
505
# is what bisection uses.
506
result = index._lookup_keys_via_location(
507
[(index._size // 2, (b'missing', ))])
508
# this should have asked for a readv request, with adjust_for_latency,
509
# and two regions: the header, and half-way into the file.
511
('readv', 'index', [(30, 30), (0, 200)], True, 60),
513
index._transport._activity)
514
# and the result should be that the key cannot be present, because this
515
# is a trivial index.
516
self.assertEqual([((index._size // 2, (b'missing', )), False)],
518
# And this should have caused the file to be fully buffered
519
self.assertIsNot(None, index._nodes)
520
self.assertEqual([], index._parsed_byte_map)
522
def test_first_lookup_key_via_location(self):
523
# We need enough data so that the _HEADER_READV doesn't consume the
524
# whole file. We always read 800 bytes for every key, and the local
525
# transport natural expansion is 4096 bytes. So we have to have >8192
526
# bytes or we will trigger "buffer_all".
527
# We also want the 'missing' key to fall within the range that *did*
530
index = self.make_index(nodes=self.make_nodes(64))
531
# reset the transport log
532
del index._transport._activity[:]
533
# do a _lookup_keys_via_location call for the middle of the file, which
534
# is what bisection uses.
535
start_lookup = index._size // 2
536
result = index._lookup_keys_via_location(
537
[(start_lookup, (b'40missing', ))])
538
# this should have asked for a readv request, with adjust_for_latency,
539
# and two regions: the header, and half-way into the file.
542
[(start_lookup, 800), (0, 200)], True, index._size),
544
index._transport._activity)
545
# and the result should be that the key cannot be present, because this
546
# is a trivial index.
547
self.assertEqual([((start_lookup, (b'40missing', )), False)],
549
# And this should not have caused the file to be fully buffered
550
self.assertIs(None, index._nodes)
551
# And the regions of the file that have been parsed should be in the
552
# parsed_byte_map and the parsed_key_map
553
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
554
self.assertEqual([((), self.make_key(26)),
555
(self.make_key(31), self.make_key(48))],
556
index._parsed_key_map)
558
def test_parsing_non_adjacent_data_trims(self):
559
index = self.make_index(nodes=self.make_nodes(64))
560
result = index._lookup_keys_via_location(
561
[(index._size // 2, (b'40', ))])
562
# and the result should be that the key cannot be present, because key is
563
# in the middle of the observed data from a 4K read - the smallest transport
564
# will do today with this api.
565
self.assertEqual([((index._size // 2, (b'40', )), False)],
567
# and we should have a parse map that includes the header and the
568
# region that was parsed after trimming.
569
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
570
self.assertEqual([((), self.make_key(26)),
571
(self.make_key(31), self.make_key(48))],
572
index._parsed_key_map)
574
def test_parsing_data_handles_parsed_contained_regions(self):
575
# the following patten creates a parsed region that is wholly within a
576
# single result from the readv layer:
577
# .... single-read (readv-minimum-size) ...
578
# which then trims the start and end so the parsed size is < readv
580
# then a dual lookup (or a reference lookup for that matter) which
581
# abuts or overlaps the parsed region on both sides will need to
582
# discard the data in the middle, but parse the end as well.
584
# we test this by doing a single lookup to seed the data, then
585
# a lookup for two keys that are present, and adjacent -
586
# we except both to be found, and the parsed byte map to include the
587
# locations of both keys.
588
index = self.make_index(nodes=self.make_nodes(128))
589
result = index._lookup_keys_via_location(
590
[(index._size // 2, (b'40', ))])
591
# and we should have a parse map that includes the header and the
592
# region that was parsed after trimming.
593
self.assertEqual([(0, 4045), (11759, 15707)], index._parsed_byte_map)
594
self.assertEqual([((), self.make_key(116)),
595
(self.make_key(35), self.make_key(51))],
596
index._parsed_key_map)
597
# now ask for two keys, right before and after the parsed region
598
result = index._lookup_keys_via_location(
599
[(11450, self.make_key(34)), (15707, self.make_key(52))])
601
((11450, self.make_key(34)),
602
(index, self.make_key(34), self.make_value(34))),
603
((15707, self.make_key(52)),
604
(index, self.make_key(52), self.make_value(52))),
607
self.assertEqual([(0, 4045), (9889, 17993)], index._parsed_byte_map)
609
def test_lookup_missing_key_answers_without_io_when_map_permits(self):
610
# generate a big enough index that we only read some of it on a typical
612
index = self.make_index(nodes=self.make_nodes(64))
613
# lookup the keys in the middle of the file
614
result = index._lookup_keys_via_location(
615
[(index._size // 2, (b'40', ))])
616
# check the parse map, this determines the test validity
617
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
618
self.assertEqual([((), self.make_key(26)),
619
(self.make_key(31), self.make_key(48))],
620
index._parsed_key_map)
621
# reset the transport log
622
del index._transport._activity[:]
623
# now looking up a key in the portion of the file already parsed should
624
# not create a new transport request, and should return False (cannot
625
# be in the index) - even when the byte location we ask for is outside
627
result = index._lookup_keys_via_location(
629
self.assertEqual([((4000, (b'40', )), False)],
631
self.assertEqual([], index._transport._activity)
633
def test_lookup_present_key_answers_without_io_when_map_permits(self):
634
# generate a big enough index that we only read some of it on a typical
636
index = self.make_index(nodes=self.make_nodes(64))
637
# lookup the keys in the middle of the file
638
result = index._lookup_keys_via_location(
639
[(index._size // 2, (b'40', ))])
640
# check the parse map, this determines the test validity
641
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
642
self.assertEqual([((), self.make_key(26)),
643
(self.make_key(31), self.make_key(48))],
644
index._parsed_key_map)
645
# reset the transport log
646
del index._transport._activity[:]
647
# now looking up a key in the portion of the file already parsed should
648
# not create a new transport request, and should return False (cannot
649
# be in the index) - even when the byte location we ask for is outside
652
result = index._lookup_keys_via_location([(4000, self.make_key(40))])
654
[((4000, self.make_key(40)),
655
(index, self.make_key(40), self.make_value(40)))],
657
self.assertEqual([], index._transport._activity)
659
def test_lookup_key_below_probed_area(self):
660
# generate a big enough index that we only read some of it on a typical
662
index = self.make_index(nodes=self.make_nodes(64))
663
# ask for the key in the middle, but a key that is located in the
664
# unparsed region before the middle.
665
result = index._lookup_keys_via_location(
666
[(index._size // 2, (b'30', ))])
667
# check the parse map, this determines the test validity
668
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
669
self.assertEqual([((), self.make_key(26)),
670
(self.make_key(31), self.make_key(48))],
671
index._parsed_key_map)
672
self.assertEqual([((index._size // 2, (b'30', )), -1)],
675
def test_lookup_key_above_probed_area(self):
676
# generate a big enough index that we only read some of it on a typical
678
index = self.make_index(nodes=self.make_nodes(64))
679
# ask for the key in the middle, but a key that is located in the
680
# unparsed region after the middle.
681
result = index._lookup_keys_via_location(
682
[(index._size // 2, (b'50', ))])
683
# check the parse map, this determines the test validity
684
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
685
self.assertEqual([((), self.make_key(26)),
686
(self.make_key(31), self.make_key(48))],
687
index._parsed_key_map)
688
self.assertEqual([((index._size // 2, (b'50', )), +1)],
691
def test_lookup_key_resolves_references(self):
692
# generate a big enough index that we only read some of it on a typical
695
for counter in range(99):
696
nodes.append((self.make_key(counter), self.make_value(counter),
697
((self.make_key(counter + 20),),)))
698
index = self.make_index(ref_lists=1, nodes=nodes)
699
# lookup a key in the middle that does not exist, so that when we can
700
# check that the referred-to-keys are not accessed automatically.
701
index_size = index._size
702
index_center = index_size // 2
703
result = index._lookup_keys_via_location(
704
[(index_center, (b'40', ))])
705
# check the parse map - only the start and middle should have been
707
self.assertEqual([(0, 4027), (10198, 14028)], index._parsed_byte_map)
708
self.assertEqual([((), self.make_key(17)),
709
(self.make_key(44), self.make_key(5))],
710
index._parsed_key_map)
711
# and check the transport activity likewise.
713
[('readv', 'index', [(index_center, 800), (0, 200)], True,
715
index._transport._activity)
716
# reset the transport log for testing the reference lookup
717
del index._transport._activity[:]
718
# now looking up a key in the portion of the file already parsed should
719
# only perform IO to resolve its key references.
720
result = index._lookup_keys_via_location([(11000, self.make_key(45))])
722
[((11000, self.make_key(45)),
723
(index, self.make_key(45), self.make_value(45),
724
((self.make_key(65),),)))],
726
self.assertEqual([('readv', 'index', [(15093, 800)], True, index_size)],
727
index._transport._activity)
729
def test_lookup_key_can_buffer_all(self):
731
for counter in range(64):
732
nodes.append((self.make_key(counter), self.make_value(counter),
733
((self.make_key(counter + 20),),)))
734
index = self.make_index(ref_lists=1, nodes=nodes)
735
# lookup a key in the middle that does not exist, so that when we can
736
# check that the referred-to-keys are not accessed automatically.
737
index_size = index._size
738
index_center = index_size // 2
739
result = index._lookup_keys_via_location([(index_center, (b'40', ))])
740
# check the parse map - only the start and middle should have been
742
self.assertEqual([(0, 3890), (6444, 10274)], index._parsed_byte_map)
743
self.assertEqual([((), self.make_key(25)),
744
(self.make_key(37), self.make_key(52))],
745
index._parsed_key_map)
746
# and check the transport activity likewise.
748
[('readv', 'index', [(index_center, 800), (0, 200)], True,
750
index._transport._activity)
751
# reset the transport log for testing the reference lookup
752
del index._transport._activity[:]
753
# now looking up a key in the portion of the file already parsed should
754
# only perform IO to resolve its key references.
755
result = index._lookup_keys_via_location([(7000, self.make_key(40))])
757
[((7000, self.make_key(40)),
758
(index, self.make_key(40), self.make_value(40),
759
((self.make_key(60),),)))],
761
# Resolving the references would have required more data read, and we
762
# are already above the 50% threshold, so it triggered a _buffer_all
763
self.assertEqual([('get', 'index')], index._transport._activity)
765
def test_iter_all_entries_empty(self):
766
index = self.make_index()
767
self.assertEqual([], list(index.iter_all_entries()))
769
def test_iter_all_entries_simple(self):
770
index = self.make_index(nodes=[((b'name', ), b'data', ())])
771
self.assertEqual([(index, (b'name', ), b'data')],
772
list(index.iter_all_entries()))
774
def test_iter_all_entries_simple_2_elements(self):
775
index = self.make_index(key_elements=2,
776
nodes=[((b'name', b'surname'), b'data', ())])
777
self.assertEqual([(index, (b'name', b'surname'), b'data')],
778
list(index.iter_all_entries()))
780
def test_iter_all_entries_references_resolved(self):
781
index = self.make_index(1, nodes=[
782
((b'name', ), b'data', ([(b'ref', )], )),
783
((b'ref', ), b'refdata', ([], ))])
784
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
785
(index, (b'ref', ), b'refdata', ((), ))},
786
set(index.iter_all_entries()))
788
def test_iter_entries_buffers_once(self):
789
index = self.make_index(nodes=self.make_nodes(2))
790
# reset the transport log
791
del index._transport._activity[:]
792
self.assertEqual({(index, self.make_key(1), self.make_value(1))},
793
set(index.iter_entries([self.make_key(1)])))
794
# We should have requested reading the header bytes
795
# But not needed any more than that because it would have triggered a
798
('readv', 'index', [(0, 200)], True, index._size),
800
index._transport._activity)
801
# And that should have been enough to trigger reading the whole index
803
self.assertIsNot(None, index._nodes)
805
def test_iter_entries_buffers_by_bytes_read(self):
806
index = self.make_index(nodes=self.make_nodes(64))
807
list(index.iter_entries([self.make_key(10)]))
808
# The first time through isn't enough to trigger a buffer all
809
self.assertIs(None, index._nodes)
810
self.assertEqual(4096, index._bytes_read)
811
# Grabbing a key in that same page won't trigger a buffer all, as we
812
# still haven't read 50% of the file
813
list(index.iter_entries([self.make_key(11)]))
814
self.assertIs(None, index._nodes)
815
self.assertEqual(4096, index._bytes_read)
816
# We haven't read more data, so reading outside the range won't trigger
817
# a buffer all right away
818
list(index.iter_entries([self.make_key(40)]))
819
self.assertIs(None, index._nodes)
820
self.assertEqual(8192, index._bytes_read)
821
# On the next pass, we will not trigger buffer all if the key is
822
# available without reading more
823
list(index.iter_entries([self.make_key(32)]))
824
self.assertIs(None, index._nodes)
825
# But if we *would* need to read more to resolve it, then we will
827
list(index.iter_entries([self.make_key(60)]))
828
self.assertIsNot(None, index._nodes)
830
def test_iter_entries_references_resolved(self):
831
index = self.make_index(1, nodes=[
832
((b'name', ), b'data', ([(b'ref', ), (b'ref', )], )),
833
((b'ref', ), b'refdata', ([], ))])
834
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',), (b'ref',)),)),
835
(index, (b'ref', ), b'refdata', ((), ))},
836
set(index.iter_entries([(b'name',), (b'ref',)])))
838
def test_iter_entries_references_2_refs_resolved(self):
839
index = self.make_index(2, nodes=[
840
((b'name', ), b'data', ([(b'ref', )], [(b'ref', )])),
841
((b'ref', ), b'refdata', ([], []))])
842
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),), ((b'ref',),))),
843
(index, (b'ref', ), b'refdata', ((), ()))},
844
set(index.iter_entries([(b'name',), (b'ref',)])))
846
def test_iteration_absent_skipped(self):
847
index = self.make_index(1, nodes=[
848
((b'name', ), b'data', ([(b'ref', )], ))])
849
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
850
set(index.iter_all_entries()))
851
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
852
set(index.iter_entries([(b'name', )])))
853
self.assertEqual([], list(index.iter_entries([(b'ref', )])))
855
def test_iteration_absent_skipped_2_element_keys(self):
856
index = self.make_index(1, key_elements=2, nodes=[
857
((b'name', b'fin'), b'data', ([(b'ref', b'erence')], ))])
858
self.assertEqual([(index, (b'name', b'fin'), b'data', (((b'ref', b'erence'),),))],
859
list(index.iter_all_entries()))
860
self.assertEqual([(index, (b'name', b'fin'), b'data', (((b'ref', b'erence'),),))],
861
list(index.iter_entries([(b'name', b'fin')])))
862
self.assertEqual([], list(index.iter_entries([(b'ref', b'erence')])))
864
def test_iter_all_keys(self):
865
index = self.make_index(1, nodes=[
866
((b'name', ), b'data', ([(b'ref', )], )),
867
((b'ref', ), b'refdata', ([], ))])
868
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
869
(index, (b'ref', ), b'refdata', ((), ))},
870
set(index.iter_entries([(b'name', ), (b'ref', )])))
872
def test_iter_nothing_empty(self):
873
index = self.make_index()
874
self.assertEqual([], list(index.iter_entries([])))
876
def test_iter_missing_entry_empty(self):
877
index = self.make_index()
878
self.assertEqual([], list(index.iter_entries([(b'a', )])))
880
def test_iter_missing_entry_empty_no_size(self):
881
idx = self.make_index()
882
idx = _mod_index.GraphIndex(idx._transport, 'index', None)
883
self.assertEqual([], list(idx.iter_entries([(b'a', )])))
885
def test_iter_key_prefix_1_element_key_None(self):
886
index = self.make_index()
887
self.assertRaises(_mod_index.BadIndexKey, list,
888
index.iter_entries_prefix([(None, )]))
890
def test_iter_key_prefix_wrong_length(self):
891
index = self.make_index()
892
self.assertRaises(_mod_index.BadIndexKey, list,
893
index.iter_entries_prefix([(b'foo', None)]))
894
index = self.make_index(key_elements=2)
895
self.assertRaises(_mod_index.BadIndexKey, list,
896
index.iter_entries_prefix([(b'foo', )]))
897
self.assertRaises(_mod_index.BadIndexKey, list,
898
index.iter_entries_prefix([(b'foo', None, None)]))
900
def test_iter_key_prefix_1_key_element_no_refs(self):
901
index = self.make_index(nodes=[
902
((b'name', ), b'data', ()),
903
((b'ref', ), b'refdata', ())])
904
self.assertEqual({(index, (b'name', ), b'data'),
905
(index, (b'ref', ), b'refdata')},
906
set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
908
def test_iter_key_prefix_1_key_element_refs(self):
909
index = self.make_index(1, nodes=[
910
((b'name', ), b'data', ([(b'ref', )], )),
911
((b'ref', ), b'refdata', ([], ))])
912
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
913
(index, (b'ref', ), b'refdata', ((), ))},
914
set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
916
def test_iter_key_prefix_2_key_element_no_refs(self):
917
index = self.make_index(key_elements=2, nodes=[
918
((b'name', b'fin1'), b'data', ()),
919
((b'name', b'fin2'), b'beta', ()),
920
((b'ref', b'erence'), b'refdata', ())])
921
self.assertEqual({(index, (b'name', b'fin1'), b'data'),
922
(index, (b'ref', b'erence'), b'refdata')},
923
set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
924
self.assertEqual({(index, (b'name', b'fin1'), b'data'),
925
(index, (b'name', b'fin2'), b'beta')},
926
set(index.iter_entries_prefix([(b'name', None)])))
928
def test_iter_key_prefix_2_key_element_refs(self):
929
index = self.make_index(1, key_elements=2, nodes=[
930
((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], )),
931
((b'name', b'fin2'), b'beta', ([], )),
932
((b'ref', b'erence'), b'refdata', ([], ))])
933
self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
934
(index, (b'ref', b'erence'), b'refdata', ((), ))},
935
set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
936
self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
937
(index, (b'name', b'fin2'), b'beta', ((), ))},
938
set(index.iter_entries_prefix([(b'name', None)])))
940
def test_key_count_empty(self):
941
index = self.make_index()
942
self.assertEqual(0, index.key_count())
944
def test_key_count_one(self):
945
index = self.make_index(nodes=[((b'name', ), b'', ())])
946
self.assertEqual(1, index.key_count())
948
def test_key_count_two(self):
949
index = self.make_index(nodes=[
950
((b'name', ), b'', ()), ((b'foo', ), b'', ())])
951
self.assertEqual(2, index.key_count())
953
def test_read_and_parse_tracks_real_read_value(self):
954
index = self.make_index(nodes=self.make_nodes(10))
955
del index._transport._activity[:]
956
index._read_and_parse([(0, 200)])
958
('readv', 'index', [(0, 200)], True, index._size),
960
index._transport._activity)
961
# The readv expansion code will expand the initial request to 4096
962
# bytes, which is more than enough to read the entire index, and we
963
# will track the fact that we read that many bytes.
964
self.assertEqual(index._size, index._bytes_read)
966
def test_read_and_parse_triggers_buffer_all(self):
967
index = self.make_index(key_elements=2, nodes=[
968
((b'name', b'fin1'), b'data', ()),
969
((b'name', b'fin2'), b'beta', ()),
970
((b'ref', b'erence'), b'refdata', ())])
971
self.assertTrue(index._size > 0)
972
self.assertIs(None, index._nodes)
973
index._read_and_parse([(0, index._size)])
974
self.assertIsNot(None, index._nodes)
976
def test_validate_bad_index_errors(self):
977
trans = self.get_transport()
978
trans.put_bytes('name', b"not an index\n")
979
idx = _mod_index.GraphIndex(trans, 'name', 13)
980
self.assertRaises(_mod_index.BadIndexFormatSignature, idx.validate)
982
def test_validate_bad_node_refs(self):
983
idx = self.make_index(2)
984
trans = self.get_transport()
985
content = trans.get_bytes('index')
986
# change the options line to end with a rather than a parseable number
987
new_content = content[:-2] + b'a\n\n'
988
trans.put_bytes('index', new_content)
989
self.assertRaises(_mod_index.BadIndexOptions, idx.validate)
991
def test_validate_missing_end_line_empty(self):
992
index = self.make_index(2)
993
trans = self.get_transport()
994
content = trans.get_bytes('index')
995
# truncate the last byte
996
trans.put_bytes('index', content[:-1])
997
self.assertRaises(_mod_index.BadIndexData, index.validate)
999
def test_validate_missing_end_line_nonempty(self):
1000
index = self.make_index(2, nodes=[((b'key', ), b'', ([], []))])
1001
trans = self.get_transport()
1002
content = trans.get_bytes('index')
1003
# truncate the last byte
1004
trans.put_bytes('index', content[:-1])
1005
self.assertRaises(_mod_index.BadIndexData, index.validate)
1007
def test_validate_empty(self):
1008
index = self.make_index()
1011
def test_validate_no_refs_content(self):
1012
index = self.make_index(nodes=[((b'key', ), b'value', ())])
1015
# XXX: external_references tests are duplicated in test_btree_index. We
1016
# probably should have per_graph_index tests...
1017
def test_external_references_no_refs(self):
1018
index = self.make_index(ref_lists=0, nodes=[])
1019
self.assertRaises(ValueError, index.external_references, 0)
1021
def test_external_references_no_results(self):
1022
index = self.make_index(ref_lists=1, nodes=[
1023
((b'key',), b'value', ([],))])
1024
self.assertEqual(set(), index.external_references(0))
1026
def test_external_references_missing_ref(self):
1027
missing_key = (b'missing',)
1028
index = self.make_index(ref_lists=1, nodes=[
1029
((b'key',), b'value', ([missing_key],))])
1030
self.assertEqual({missing_key}, index.external_references(0))
1032
def test_external_references_multiple_ref_lists(self):
1033
missing_key = (b'missing',)
1034
index = self.make_index(ref_lists=2, nodes=[
1035
((b'key',), b'value', ([], [missing_key]))])
1036
self.assertEqual(set([]), index.external_references(0))
1037
self.assertEqual({missing_key}, index.external_references(1))
1039
def test_external_references_two_records(self):
1040
index = self.make_index(ref_lists=1, nodes=[
1041
((b'key-1',), b'value', ([(b'key-2',)],)),
1042
((b'key-2',), b'value', ([],)),
1044
self.assertEqual(set([]), index.external_references(0))
1046
def test__find_ancestors(self):
1049
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1050
(key1, b'value', ([key2],)),
1051
(key2, b'value', ([],)),
1054
missing_keys = set()
1055
search_keys = index._find_ancestors(
1056
[key1], 0, parent_map, missing_keys)
1057
self.assertEqual({key1: (key2,)}, parent_map)
1058
self.assertEqual(set(), missing_keys)
1059
self.assertEqual({key2}, search_keys)
1060
search_keys = index._find_ancestors(search_keys, 0, parent_map,
1062
self.assertEqual({key1: (key2,), key2: ()}, parent_map)
1063
self.assertEqual(set(), missing_keys)
1064
self.assertEqual(set(), search_keys)
1066
def test__find_ancestors_w_missing(self):
1070
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1071
(key1, b'value', ([key2],)),
1072
(key2, b'value', ([],)),
1075
missing_keys = set()
1076
search_keys = index._find_ancestors([key2, key3], 0, parent_map,
1078
self.assertEqual({key2: ()}, parent_map)
1079
self.assertEqual({key3}, missing_keys)
1080
self.assertEqual(set(), search_keys)
1082
def test__find_ancestors_dont_search_known(self):
1086
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1087
(key1, b'value', ([key2],)),
1088
(key2, b'value', ([key3],)),
1089
(key3, b'value', ([],)),
1091
# We already know about key2, so we won't try to search for key3
1092
parent_map = {key2: (key3,)}
1093
missing_keys = set()
1094
search_keys = index._find_ancestors([key1], 0, parent_map,
1096
self.assertEqual({key1: (key2,), key2: (key3,)}, parent_map)
1097
self.assertEqual(set(), missing_keys)
1098
self.assertEqual(set(), search_keys)
1100
def test_supports_unlimited_cache(self):
1101
builder = _mod_index.GraphIndexBuilder(0, key_elements=1)
1102
stream = builder.finish()
1103
trans = self.get_transport()
1104
size = trans.put_file('index', stream)
1105
# It doesn't matter what unlimited_cache does here, just that it can be
1107
idx = _mod_index.GraphIndex(trans, 'index', size, unlimited_cache=True)
1110
class TestCombinedGraphIndex(tests.TestCaseWithMemoryTransport):
1112
def make_index(self, name, ref_lists=0, key_elements=1, nodes=[]):
1113
builder = _mod_index.GraphIndexBuilder(
1114
ref_lists, key_elements=key_elements)
1115
for key, value, references in nodes:
1116
builder.add_node(key, value, references)
1117
stream = builder.finish()
1118
trans = self.get_transport()
1119
size = trans.put_file(name, stream)
1120
return _mod_index.GraphIndex(trans, name, size)
1122
def make_combined_index_with_missing(self, missing=['1', '2']):
1123
"""Create a CombinedGraphIndex which will have missing indexes.
1125
This creates a CGI which thinks it has 2 indexes, however they have
1126
been deleted. If CGI._reload_func() is called, then it will repopulate
1129
:param missing: The underlying indexes to delete
1130
:return: (CombinedGraphIndex, reload_counter)
1132
idx1 = self.make_index('1', nodes=[((b'1',), b'', ())])
1133
idx2 = self.make_index('2', nodes=[((b'2',), b'', ())])
1134
idx3 = self.make_index('3', nodes=[
1136
((b'2',), b'', ())])
1138
# total_reloads, num_changed, num_unchanged
1139
reload_counter = [0, 0, 0]
1142
reload_counter[0] += 1
1143
new_indices = [idx3]
1144
if idx._indices == new_indices:
1145
reload_counter[2] += 1
1147
reload_counter[1] += 1
1148
idx._indices[:] = new_indices
1150
idx = _mod_index.CombinedGraphIndex([idx1, idx2], reload_func=reload)
1151
trans = self.get_transport()
1152
for fname in missing:
1154
return idx, reload_counter
1156
def test_open_missing_index_no_error(self):
1157
trans = self.get_transport()
1158
idx1 = _mod_index.GraphIndex(trans, 'missing', 100)
1159
idx = _mod_index.CombinedGraphIndex([idx1])
1161
def test_add_index(self):
1162
idx = _mod_index.CombinedGraphIndex([])
1163
idx1 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
1164
idx.insert_index(0, idx1)
1165
self.assertEqual([(idx1, (b'key', ), b'')],
1166
list(idx.iter_all_entries()))
1168
def test_clear_cache(self):
1171
class ClearCacheProxy(object):
1173
def __init__(self, index):
1176
def __getattr__(self, name):
1177
return getattr(self._index)
1179
def clear_cache(self):
1180
log.append(self._index)
1181
return self._index.clear_cache()
1183
idx = _mod_index.CombinedGraphIndex([])
1184
idx1 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
1185
idx.insert_index(0, ClearCacheProxy(idx1))
1186
idx2 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
1187
idx.insert_index(1, ClearCacheProxy(idx2))
1188
# CombinedGraphIndex should call 'clear_cache()' on all children
1190
self.assertEqual(sorted([idx1, idx2]), sorted(log))
1192
def test_iter_all_entries_empty(self):
1193
idx = _mod_index.CombinedGraphIndex([])
1194
self.assertEqual([], list(idx.iter_all_entries()))
1196
def test_iter_all_entries_children_empty(self):
1197
idx1 = self.make_index('name')
1198
idx = _mod_index.CombinedGraphIndex([idx1])
1199
self.assertEqual([], list(idx.iter_all_entries()))
1201
def test_iter_all_entries_simple(self):
1202
idx1 = self.make_index('name', nodes=[((b'name', ), b'data', ())])
1203
idx = _mod_index.CombinedGraphIndex([idx1])
1204
self.assertEqual([(idx1, (b'name', ), b'data')],
1205
list(idx.iter_all_entries()))
1207
def test_iter_all_entries_two_indices(self):
1208
idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
1209
idx2 = self.make_index('name2', nodes=[((b'2', ), b'', ())])
1210
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1211
self.assertEqual([(idx1, (b'name', ), b'data'),
1212
(idx2, (b'2', ), b'')],
1213
list(idx.iter_all_entries()))
1215
def test_iter_entries_two_indices_dup_key(self):
1216
idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
1217
idx2 = self.make_index('name2', nodes=[((b'name', ), b'data', ())])
1218
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1219
self.assertEqual([(idx1, (b'name', ), b'data')],
1220
list(idx.iter_entries([(b'name', )])))
1222
def test_iter_all_entries_two_indices_dup_key(self):
1223
idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
1224
idx2 = self.make_index('name2', nodes=[((b'name', ), b'data', ())])
1225
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1226
self.assertEqual([(idx1, (b'name', ), b'data')],
1227
list(idx.iter_all_entries()))
1229
def test_iter_key_prefix_2_key_element_refs(self):
1230
idx1 = self.make_index('1', 1, key_elements=2, nodes=[
1231
((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], ))])
1232
idx2 = self.make_index('2', 1, key_elements=2, nodes=[
1233
((b'name', b'fin2'), b'beta', ([], )),
1234
((b'ref', b'erence'), b'refdata', ([], ))])
1235
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1236
self.assertEqual({(idx1, (b'name', b'fin1'), b'data',
1237
(((b'ref', b'erence'),),)),
1238
(idx2, (b'ref', b'erence'), b'refdata', ((), ))},
1239
set(idx.iter_entries_prefix([(b'name', b'fin1'),
1240
(b'ref', b'erence')])))
1241
self.assertEqual({(idx1, (b'name', b'fin1'), b'data',
1242
(((b'ref', b'erence'),),)),
1243
(idx2, (b'name', b'fin2'), b'beta', ((), ))},
1244
set(idx.iter_entries_prefix([(b'name', None)])))
1246
def test_iter_nothing_empty(self):
1247
idx = _mod_index.CombinedGraphIndex([])
1248
self.assertEqual([], list(idx.iter_entries([])))
1250
def test_iter_nothing_children_empty(self):
1251
idx1 = self.make_index('name')
1252
idx = _mod_index.CombinedGraphIndex([idx1])
1253
self.assertEqual([], list(idx.iter_entries([])))
1255
def test_iter_all_keys(self):
1256
idx1 = self.make_index('1', 1, nodes=[((b'name', ), b'data',
1258
idx2 = self.make_index(
1259
'2', 1, nodes=[((b'ref', ), b'refdata', ((), ))])
1260
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1261
self.assertEqual({(idx1, (b'name', ), b'data', (((b'ref', ), ), )),
1262
(idx2, (b'ref', ), b'refdata', ((), ))},
1263
set(idx.iter_entries([(b'name', ), (b'ref', )])))
1265
def test_iter_all_keys_dup_entry(self):
1266
idx1 = self.make_index('1', 1, nodes=[((b'name', ), b'data',
1268
((b'ref', ), b'refdata', ([], ))])
1269
idx2 = self.make_index(
1270
'2', 1, nodes=[((b'ref', ), b'refdata', ([], ))])
1271
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1272
self.assertEqual({(idx1, (b'name', ), b'data', (((b'ref',),),)),
1273
(idx1, (b'ref', ), b'refdata', ((), ))},
1274
set(idx.iter_entries([(b'name', ), (b'ref', )])))
1276
def test_iter_missing_entry_empty(self):
1277
idx = _mod_index.CombinedGraphIndex([])
1278
self.assertEqual([], list(idx.iter_entries([('a', )])))
1280
def test_iter_missing_entry_one_index(self):
1281
idx1 = self.make_index('1')
1282
idx = _mod_index.CombinedGraphIndex([idx1])
1283
self.assertEqual([], list(idx.iter_entries([(b'a', )])))
1285
def test_iter_missing_entry_two_index(self):
1286
idx1 = self.make_index('1')
1287
idx2 = self.make_index('2')
1288
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1289
self.assertEqual([], list(idx.iter_entries([('a', )])))
1291
def test_iter_entry_present_one_index_only(self):
1292
idx1 = self.make_index('1', nodes=[((b'key', ), b'', ())])
1293
idx2 = self.make_index('2', nodes=[])
1294
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1295
self.assertEqual([(idx1, (b'key', ), b'')],
1296
list(idx.iter_entries([(b'key', )])))
1297
# and in the other direction
1298
idx = _mod_index.CombinedGraphIndex([idx2, idx1])
1299
self.assertEqual([(idx1, (b'key', ), b'')],
1300
list(idx.iter_entries([(b'key', )])))
1302
def test_key_count_empty(self):
1303
idx1 = self.make_index('1', nodes=[])
1304
idx2 = self.make_index('2', nodes=[])
1305
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1306
self.assertEqual(0, idx.key_count())
1308
def test_key_count_sums_index_keys(self):
1309
idx1 = self.make_index('1', nodes=[
1311
((b'2',), b'', ())])
1312
idx2 = self.make_index('2', nodes=[((b'1',), b'', ())])
1313
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1314
self.assertEqual(3, idx.key_count())
1316
def test_validate_bad_child_index_errors(self):
1317
trans = self.get_transport()
1318
trans.put_bytes('name', b"not an index\n")
1319
idx1 = _mod_index.GraphIndex(trans, 'name', 13)
1320
idx = _mod_index.CombinedGraphIndex([idx1])
1321
self.assertRaises(_mod_index.BadIndexFormatSignature, idx.validate)
1323
def test_validate_empty(self):
1324
idx = _mod_index.CombinedGraphIndex([])
1327
def test_key_count_reloads(self):
1328
idx, reload_counter = self.make_combined_index_with_missing()
1329
self.assertEqual(2, idx.key_count())
1330
self.assertEqual([1, 1, 0], reload_counter)
1332
def test_key_count_no_reload(self):
1333
idx, reload_counter = self.make_combined_index_with_missing()
1334
idx._reload_func = None
1335
# Without a _reload_func we just raise the exception
1336
self.assertRaises(errors.NoSuchFile, idx.key_count)
1338
def test_key_count_reloads_and_fails(self):
1339
# We have deleted all underlying indexes, so we will try to reload, but
1340
# still fail. This is mostly to test we don't get stuck in an infinite
1341
# loop trying to reload
1342
idx, reload_counter = self.make_combined_index_with_missing(
1344
self.assertRaises(errors.NoSuchFile, idx.key_count)
1345
self.assertEqual([2, 1, 1], reload_counter)
1347
def test_iter_entries_reloads(self):
1348
index, reload_counter = self.make_combined_index_with_missing()
1349
result = list(index.iter_entries([(b'1',), (b'2',), (b'3',)]))
1350
index3 = index._indices[0]
1351
self.assertEqual({(index3, (b'1',), b''), (index3, (b'2',), b'')},
1353
self.assertEqual([1, 1, 0], reload_counter)
1355
def test_iter_entries_reloads_midway(self):
1356
# The first index still looks present, so we get interrupted mid-way
1358
index, reload_counter = self.make_combined_index_with_missing(['2'])
1359
index1, index2 = index._indices
1360
result = list(index.iter_entries([(b'1',), (b'2',), (b'3',)]))
1361
index3 = index._indices[0]
1362
# We had already yielded b'1', so we just go on to the next, we should
1363
# not yield b'1' twice.
1364
self.assertEqual([(index1, (b'1',), b''), (index3, (b'2',), b'')],
1366
self.assertEqual([1, 1, 0], reload_counter)
1368
def test_iter_entries_no_reload(self):
1369
index, reload_counter = self.make_combined_index_with_missing()
1370
index._reload_func = None
1371
# Without a _reload_func we just raise the exception
1372
self.assertListRaises(errors.NoSuchFile, index.iter_entries, [('3',)])
1374
def test_iter_entries_reloads_and_fails(self):
1375
index, reload_counter = self.make_combined_index_with_missing(
1377
self.assertListRaises(errors.NoSuchFile, index.iter_entries, [('3',)])
1378
self.assertEqual([2, 1, 1], reload_counter)
1380
def test_iter_all_entries_reloads(self):
1381
index, reload_counter = self.make_combined_index_with_missing()
1382
result = list(index.iter_all_entries())
1383
index3 = index._indices[0]
1384
self.assertEqual({(index3, (b'1',), b''), (index3, (b'2',), b'')},
1386
self.assertEqual([1, 1, 0], reload_counter)
1388
def test_iter_all_entries_reloads_midway(self):
1389
index, reload_counter = self.make_combined_index_with_missing(['2'])
1390
index1, index2 = index._indices
1391
result = list(index.iter_all_entries())
1392
index3 = index._indices[0]
1393
# We had already yielded '1', so we just go on to the next, we should
1394
# not yield '1' twice.
1395
self.assertEqual([(index1, (b'1',), b''), (index3, (b'2',), b'')],
1397
self.assertEqual([1, 1, 0], reload_counter)
1399
def test_iter_all_entries_no_reload(self):
1400
index, reload_counter = self.make_combined_index_with_missing()
1401
index._reload_func = None
1402
self.assertListRaises(errors.NoSuchFile, index.iter_all_entries)
1404
def test_iter_all_entries_reloads_and_fails(self):
1405
index, reload_counter = self.make_combined_index_with_missing(
1407
self.assertListRaises(errors.NoSuchFile, index.iter_all_entries)
1409
def test_iter_entries_prefix_reloads(self):
1410
index, reload_counter = self.make_combined_index_with_missing()
1411
result = list(index.iter_entries_prefix([(b'1',)]))
1412
index3 = index._indices[0]
1413
self.assertEqual([(index3, (b'1',), b'')], result)
1414
self.assertEqual([1, 1, 0], reload_counter)
1416
def test_iter_entries_prefix_reloads_midway(self):
1417
index, reload_counter = self.make_combined_index_with_missing(['2'])
1418
index1, index2 = index._indices
1419
result = list(index.iter_entries_prefix([(b'1',)]))
1420
index3 = index._indices[0]
1421
# We had already yielded b'1', so we just go on to the next, we should
1422
# not yield b'1' twice.
1423
self.assertEqual([(index1, (b'1',), b'')], result)
1424
self.assertEqual([1, 1, 0], reload_counter)
1426
def test_iter_entries_prefix_no_reload(self):
1427
index, reload_counter = self.make_combined_index_with_missing()
1428
index._reload_func = None
1429
self.assertListRaises(errors.NoSuchFile, index.iter_entries_prefix,
1432
def test_iter_entries_prefix_reloads_and_fails(self):
1433
index, reload_counter = self.make_combined_index_with_missing(
1435
self.assertListRaises(errors.NoSuchFile, index.iter_entries_prefix,
1438
def make_index_with_simple_nodes(self, name, num_nodes=1):
1439
"""Make an index named after 'name', with keys named after 'name' too.
1441
Nodes will have a value of '' and no references.
1444
((('index-%s-key-%s' % (name, n)).encode('ascii'),), b'', ())
1445
for n in range(1, num_nodes + 1)]
1446
return self.make_index('index-%s' % name, 0, nodes=nodes)
1448
def test_reorder_after_iter_entries(self):
1449
# Four indices: [key1] in idx1, [key2,key3] in idx2, [] in idx3,
1451
idx = _mod_index.CombinedGraphIndex([])
1452
idx.insert_index(0, self.make_index_with_simple_nodes('1'), b'1')
1453
idx.insert_index(1, self.make_index_with_simple_nodes('2'), b'2')
1454
idx.insert_index(2, self.make_index_with_simple_nodes('3'), b'3')
1455
idx.insert_index(3, self.make_index_with_simple_nodes('4'), b'4')
1456
idx1, idx2, idx3, idx4 = idx._indices
1457
# Query a key from idx4 and idx2.
1458
self.assertLength(2, list(idx.iter_entries(
1459
[(b'index-4-key-1',), (b'index-2-key-1',)])))
1460
# Now idx2 and idx4 should be moved to the front (and idx1 should
1461
# still be before idx3).
1462
self.assertEqual([idx2, idx4, idx1, idx3], idx._indices)
1463
self.assertEqual([b'2', b'4', b'1', b'3'], idx._index_names)
1465
def test_reorder_propagates_to_siblings(self):
1466
# Two CombinedGraphIndex objects, with the same number of indicies with
1468
cgi1 = _mod_index.CombinedGraphIndex([])
1469
cgi2 = _mod_index.CombinedGraphIndex([])
1470
cgi1.insert_index(0, self.make_index_with_simple_nodes('1-1'), 'one')
1471
cgi1.insert_index(1, self.make_index_with_simple_nodes('1-2'), 'two')
1472
cgi2.insert_index(0, self.make_index_with_simple_nodes('2-1'), 'one')
1473
cgi2.insert_index(1, self.make_index_with_simple_nodes('2-2'), 'two')
1474
index2_1, index2_2 = cgi2._indices
1475
cgi1.set_sibling_indices([cgi2])
1476
# Trigger a reordering in cgi1. cgi2 will be reordered as well.
1477
list(cgi1.iter_entries([(b'index-1-2-key-1',)]))
1478
self.assertEqual([index2_2, index2_1], cgi2._indices)
1479
self.assertEqual(['two', 'one'], cgi2._index_names)
1481
def test_validate_reloads(self):
1482
idx, reload_counter = self.make_combined_index_with_missing()
1484
self.assertEqual([1, 1, 0], reload_counter)
1486
def test_validate_reloads_midway(self):
1487
idx, reload_counter = self.make_combined_index_with_missing(['2'])
1490
def test_validate_no_reload(self):
1491
idx, reload_counter = self.make_combined_index_with_missing()
1492
idx._reload_func = None
1493
self.assertRaises(errors.NoSuchFile, idx.validate)
1495
def test_validate_reloads_and_fails(self):
1496
idx, reload_counter = self.make_combined_index_with_missing(
1498
self.assertRaises(errors.NoSuchFile, idx.validate)
1500
def test_find_ancestors_across_indexes(self):
1505
index1 = self.make_index('12', ref_lists=1, nodes=[
1506
(key1, b'value', ([],)),
1507
(key2, b'value', ([key1],)),
1509
index2 = self.make_index('34', ref_lists=1, nodes=[
1510
(key3, b'value', ([key2],)),
1511
(key4, b'value', ([key3],)),
1513
c_index = _mod_index.CombinedGraphIndex([index1, index2])
1514
parent_map, missing_keys = c_index.find_ancestry([key1], 0)
1515
self.assertEqual({key1: ()}, parent_map)
1516
self.assertEqual(set(), missing_keys)
1517
# Now look for a key from index2 which requires us to find the key in
1518
# the second index, and then continue searching for parents in the
1520
parent_map, missing_keys = c_index.find_ancestry([key3], 0)
1521
self.assertEqual({key1: (), key2: (key1,), key3: (key2,)}, parent_map)
1522
self.assertEqual(set(), missing_keys)
1524
def test_find_ancestors_missing_keys(self):
1529
index1 = self.make_index('12', ref_lists=1, nodes=[
1530
(key1, b'value', ([],)),
1531
(key2, b'value', ([key1],)),
1533
index2 = self.make_index('34', ref_lists=1, nodes=[
1534
(key3, b'value', ([key2],)),
1536
c_index = _mod_index.CombinedGraphIndex([index1, index2])
1537
# Searching for a key which is actually not present at all should
1538
# eventually converge
1539
parent_map, missing_keys = c_index.find_ancestry([key4], 0)
1540
self.assertEqual({}, parent_map)
1541
self.assertEqual({key4}, missing_keys)
1543
def test_find_ancestors_no_indexes(self):
1544
c_index = _mod_index.CombinedGraphIndex([])
1546
parent_map, missing_keys = c_index.find_ancestry([key1], 0)
1547
self.assertEqual({}, parent_map)
1548
self.assertEqual({key1}, missing_keys)
1550
def test_find_ancestors_ghost_parent(self):
1555
index1 = self.make_index('12', ref_lists=1, nodes=[
1556
(key1, b'value', ([],)),
1557
(key2, b'value', ([key1],)),
1559
index2 = self.make_index('34', ref_lists=1, nodes=[
1560
(key4, b'value', ([key2, key3],)),
1562
c_index = _mod_index.CombinedGraphIndex([index1, index2])
1563
# Searching for a key which is actually not present at all should
1564
# eventually converge
1565
parent_map, missing_keys = c_index.find_ancestry([key4], 0)
1566
self.assertEqual({key4: (key2, key3), key2: (key1,), key1: ()},
1568
self.assertEqual({key3}, missing_keys)
1570
def test__find_ancestors_empty_index(self):
1571
idx = self.make_index('test', ref_lists=1, key_elements=1, nodes=[])
1573
missing_keys = set()
1574
search_keys = idx._find_ancestors([(b'one',), (b'two',)], 0, parent_map,
1576
self.assertEqual(set(), search_keys)
1577
self.assertEqual({}, parent_map)
1578
self.assertEqual({(b'one',), (b'two',)}, missing_keys)
1581
class TestInMemoryGraphIndex(tests.TestCaseWithMemoryTransport):
1583
def make_index(self, ref_lists=0, key_elements=1, nodes=[]):
1584
result = _mod_index.InMemoryGraphIndex(
1585
ref_lists, key_elements=key_elements)
1586
result.add_nodes(nodes)
1589
def test_add_nodes_no_refs(self):
1590
index = self.make_index(0)
1591
index.add_nodes([((b'name', ), b'data')])
1592
index.add_nodes([((b'name2', ), b''), ((b'name3', ), b'')])
1594
(index, (b'name', ), b'data'),
1595
(index, (b'name2', ), b''),
1596
(index, (b'name3', ), b''),
1597
}, set(index.iter_all_entries()))
1599
def test_add_nodes(self):
1600
index = self.make_index(1)
1601
index.add_nodes([((b'name', ), b'data', ([],))])
1602
index.add_nodes([((b'name2', ), b'', ([],)),
1603
((b'name3', ), b'', ([(b'r', )],))])
1605
(index, (b'name', ), b'data', ((),)),
1606
(index, (b'name2', ), b'', ((),)),
1607
(index, (b'name3', ), b'', (((b'r', ), ), )),
1608
}, set(index.iter_all_entries()))
1610
def test_iter_all_entries_empty(self):
1611
index = self.make_index()
1612
self.assertEqual([], list(index.iter_all_entries()))
1614
def test_iter_all_entries_simple(self):
1615
index = self.make_index(nodes=[((b'name', ), b'data')])
1616
self.assertEqual([(index, (b'name', ), b'data')],
1617
list(index.iter_all_entries()))
1619
def test_iter_all_entries_references(self):
1620
index = self.make_index(1, nodes=[
1621
((b'name', ), b'data', ([(b'ref', )], )),
1622
((b'ref', ), b'refdata', ([], ))])
1623
self.assertEqual({(index, (b'name', ), b'data', (((b'ref', ),),)),
1624
(index, (b'ref', ), b'refdata', ((), ))},
1625
set(index.iter_all_entries()))
1627
def test_iteration_absent_skipped(self):
1628
index = self.make_index(1, nodes=[
1629
((b'name', ), b'data', ([(b'ref', )], ))])
1630
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
1631
set(index.iter_all_entries()))
1632
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
1633
set(index.iter_entries([(b'name', )])))
1634
self.assertEqual([], list(index.iter_entries([(b'ref', )])))
1636
def test_iter_all_keys(self):
1637
index = self.make_index(1, nodes=[
1638
((b'name', ), b'data', ([(b'ref', )], )),
1639
((b'ref', ), b'refdata', ([], ))])
1640
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
1641
(index, (b'ref', ), b'refdata', ((), ))},
1642
set(index.iter_entries([(b'name', ), (b'ref', )])))
1644
def test_iter_key_prefix_1_key_element_no_refs(self):
1645
index = self.make_index(nodes=[
1646
((b'name', ), b'data'),
1647
((b'ref', ), b'refdata')])
1648
self.assertEqual({(index, (b'name', ), b'data'),
1649
(index, (b'ref', ), b'refdata')},
1650
set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
1652
def test_iter_key_prefix_1_key_element_refs(self):
1653
index = self.make_index(1, nodes=[
1654
((b'name', ), b'data', ([(b'ref', )], )),
1655
((b'ref', ), b'refdata', ([], ))])
1656
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
1657
(index, (b'ref', ), b'refdata', ((), ))},
1658
set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
1660
def test_iter_key_prefix_2_key_element_no_refs(self):
1661
index = self.make_index(key_elements=2, nodes=[
1662
((b'name', b'fin1'), b'data'),
1663
((b'name', b'fin2'), b'beta'),
1664
((b'ref', b'erence'), b'refdata')])
1665
self.assertEqual({(index, (b'name', b'fin1'), b'data'),
1666
(index, (b'ref', b'erence'), b'refdata')},
1667
set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
1668
self.assertEqual({(index, (b'name', b'fin1'), b'data'),
1669
(index, (b'name', b'fin2'), b'beta')},
1670
set(index.iter_entries_prefix([(b'name', None)])))
1672
def test_iter_key_prefix_2_key_element_refs(self):
1673
index = self.make_index(1, key_elements=2, nodes=[
1674
((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], )),
1675
((b'name', b'fin2'), b'beta', ([], )),
1676
((b'ref', b'erence'), b'refdata', ([], ))])
1677
self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
1678
(index, (b'ref', b'erence'), b'refdata', ((), ))},
1679
set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
1680
self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
1681
(index, (b'name', b'fin2'), b'beta', ((), ))},
1682
set(index.iter_entries_prefix([(b'name', None)])))
1684
def test_iter_nothing_empty(self):
1685
index = self.make_index()
1686
self.assertEqual([], list(index.iter_entries([])))
1688
def test_iter_missing_entry_empty(self):
1689
index = self.make_index()
1690
self.assertEqual([], list(index.iter_entries([b'a'])))
1692
def test_key_count_empty(self):
1693
index = self.make_index()
1694
self.assertEqual(0, index.key_count())
1696
def test_key_count_one(self):
1697
index = self.make_index(nodes=[((b'name', ), b'')])
1698
self.assertEqual(1, index.key_count())
1700
def test_key_count_two(self):
1701
index = self.make_index(nodes=[((b'name', ), b''), ((b'foo', ), b'')])
1702
self.assertEqual(2, index.key_count())
1704
def test_validate_empty(self):
1705
index = self.make_index()
1708
def test_validate_no_refs_content(self):
1709
index = self.make_index(nodes=[((b'key', ), b'value')])
1713
class TestGraphIndexPrefixAdapter(tests.TestCaseWithMemoryTransport):
1715
def make_index(self, ref_lists=1, key_elements=2, nodes=[],
1716
add_callback=False):
1717
result = _mod_index.InMemoryGraphIndex(
1718
ref_lists, key_elements=key_elements)
1719
result.add_nodes(nodes)
1721
add_nodes_callback = result.add_nodes
1723
add_nodes_callback = None
1724
adapter = _mod_index.GraphIndexPrefixAdapter(
1725
result, (b'prefix', ), key_elements - 1,
1726
add_nodes_callback=add_nodes_callback)
1727
return result, adapter
1729
def test_add_node(self):
1730
index, adapter = self.make_index(add_callback=True)
1731
adapter.add_node((b'key',), b'value', (((b'ref',),),))
1732
self.assertEqual({(index, (b'prefix', b'key'), b'value',
1733
(((b'prefix', b'ref'),),))},
1734
set(index.iter_all_entries()))
1736
def test_add_nodes(self):
1737
index, adapter = self.make_index(add_callback=True)
1739
((b'key',), b'value', (((b'ref',),),)),
1740
((b'key2',), b'value2', ((),)),
1743
(index, (b'prefix', b'key2'), b'value2', ((),)),
1744
(index, (b'prefix', b'key'), b'value', (((b'prefix', b'ref'),),))
1746
set(index.iter_all_entries()))
1748
def test_construct(self):
1749
idx = _mod_index.InMemoryGraphIndex()
1750
adapter = _mod_index.GraphIndexPrefixAdapter(idx, (b'prefix', ), 1)
1752
def test_construct_with_callback(self):
1753
idx = _mod_index.InMemoryGraphIndex()
1754
adapter = _mod_index.GraphIndexPrefixAdapter(idx, (b'prefix', ), 1,
1757
def test_iter_all_entries_cross_prefix_map_errors(self):
1758
index, adapter = self.make_index(nodes=[
1759
((b'prefix', b'key1'), b'data1', (((b'prefixaltered', b'key2'),),))])
1760
self.assertRaises(_mod_index.BadIndexData, list,
1761
adapter.iter_all_entries())
1763
def test_iter_all_entries(self):
1764
index, adapter = self.make_index(nodes=[
1765
((b'notprefix', b'key1'), b'data', ((), )),
1766
((b'prefix', b'key1'), b'data1', ((), )),
1767
((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
1768
self.assertEqual({(index, (b'key1', ), b'data1', ((),)),
1769
(index, (b'key2', ), b'data2', (((b'key1',),),))},
1770
set(adapter.iter_all_entries()))
1772
def test_iter_entries(self):
1773
index, adapter = self.make_index(nodes=[
1774
((b'notprefix', b'key1'), b'data', ((), )),
1775
((b'prefix', b'key1'), b'data1', ((), )),
1776
((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
1777
# ask for many - get all
1778
self.assertEqual({(index, (b'key1', ), b'data1', ((),)),
1779
(index, (b'key2', ), b'data2', (((b'key1', ),),))},
1780
set(adapter.iter_entries([(b'key1', ), (b'key2', )])))
1781
# ask for one, get one
1782
self.assertEqual({(index, (b'key1', ), b'data1', ((),))},
1783
set(adapter.iter_entries([(b'key1', )])))
1784
# ask for missing, get none
1785
self.assertEqual(set(),
1786
set(adapter.iter_entries([(b'key3', )])))
1788
def test_iter_entries_prefix(self):
1789
index, adapter = self.make_index(key_elements=3, nodes=[
1790
((b'notprefix', b'foo', b'key1'), b'data', ((), )),
1791
((b'prefix', b'prefix2', b'key1'), b'data1', ((), )),
1792
((b'prefix', b'prefix2', b'key2'), b'data2', (((b'prefix', b'prefix2', b'key1'),),))])
1793
# ask for a prefix, get the results for just that prefix, adjusted.
1794
self.assertEqual({(index, (b'prefix2', b'key1', ), b'data1', ((),)),
1795
(index, (b'prefix2', b'key2', ), b'data2', (((b'prefix2', b'key1', ),),))},
1796
set(adapter.iter_entries_prefix([(b'prefix2', None)])))
1798
def test_key_count_no_matching_keys(self):
1799
index, adapter = self.make_index(nodes=[
1800
((b'notprefix', b'key1'), b'data', ((), ))])
1801
self.assertEqual(0, adapter.key_count())
1803
def test_key_count_some_keys(self):
1804
index, adapter = self.make_index(nodes=[
1805
((b'notprefix', b'key1'), b'data', ((), )),
1806
((b'prefix', b'key1'), b'data1', ((), )),
1807
((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
1808
self.assertEqual(2, adapter.key_count())
1810
def test_validate(self):
1811
index, adapter = self.make_index()
1815
calls.append('called')
1816
index.validate = validate
1818
self.assertEqual(['called'], calls)