1
# Copyright (C) 2007-2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for indices."""
29
class ErrorTests(tests.TestCase):
31
def test_bad_index_format_signature(self):
32
error = _mod_index.BadIndexFormatSignature("foo", "bar")
33
self.assertEqual("foo is not an index of type bar.",
36
def test_bad_index_data(self):
37
error = _mod_index.BadIndexData("foo")
38
self.assertEqual("Error in data for index foo.",
41
def test_bad_index_duplicate_key(self):
42
error = _mod_index.BadIndexDuplicateKey("foo", "bar")
43
self.assertEqual("The key 'foo' is already in index 'bar'.",
46
def test_bad_index_key(self):
47
error = _mod_index.BadIndexKey("foo")
48
self.assertEqual("The key 'foo' is not a valid key.",
51
def test_bad_index_options(self):
52
error = _mod_index.BadIndexOptions("foo")
53
self.assertEqual("Could not parse options for index foo.",
56
def test_bad_index_value(self):
57
error = _mod_index.BadIndexValue("foo")
58
self.assertEqual("The value 'foo' is not a valid value.",
62
class TestGraphIndexBuilder(tests.TestCaseWithMemoryTransport):
64
def test_build_index_empty(self):
65
builder = _mod_index.GraphIndexBuilder()
66
stream = builder.finish()
67
contents = stream.read()
69
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=0\n\n",
72
def test_build_index_empty_two_element_keys(self):
73
builder = _mod_index.GraphIndexBuilder(key_elements=2)
74
stream = builder.finish()
75
contents = stream.read()
77
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=0\n\n",
80
def test_build_index_one_reference_list_empty(self):
81
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
82
stream = builder.finish()
83
contents = stream.read()
85
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=0\n\n",
88
def test_build_index_two_reference_list_empty(self):
89
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
90
stream = builder.finish()
91
contents = stream.read()
93
b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=0\n\n",
96
def test_build_index_one_node_no_refs(self):
97
builder = _mod_index.GraphIndexBuilder()
98
builder.add_node((b'akey', ), b'data')
99
stream = builder.finish()
100
contents = stream.read()
102
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
103
b"akey\x00\x00\x00data\n\n", contents)
105
def test_build_index_one_node_no_refs_accepts_empty_reflist(self):
106
builder = _mod_index.GraphIndexBuilder()
107
builder.add_node((b'akey', ), b'data', ())
108
stream = builder.finish()
109
contents = stream.read()
111
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
112
b"akey\x00\x00\x00data\n\n", contents)
114
def test_build_index_one_node_2_element_keys(self):
115
# multipart keys are separated by \x00 - because they are fixed length,
116
# not variable this does not cause any issues, and seems clearer to the
118
builder = _mod_index.GraphIndexBuilder(key_elements=2)
119
builder.add_node((b'akey', b'secondpart'), b'data')
120
stream = builder.finish()
121
contents = stream.read()
123
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=1\n"
124
b"akey\x00secondpart\x00\x00\x00data\n\n", contents)
126
def test_add_node_empty_value(self):
127
builder = _mod_index.GraphIndexBuilder()
128
builder.add_node((b'akey', ), b'')
129
stream = builder.finish()
130
contents = stream.read()
132
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
133
b"akey\x00\x00\x00\n\n", contents)
135
def test_build_index_nodes_sorted(self):
136
# the highest sorted node comes first.
137
builder = _mod_index.GraphIndexBuilder()
138
# use three to have a good chance of glitching dictionary hash
139
# lookups etc. Insert in randomish order that is not correct
140
# and not the reverse of the correct order.
141
builder.add_node((b'2002', ), b'data')
142
builder.add_node((b'2000', ), b'data')
143
builder.add_node((b'2001', ), b'data')
144
stream = builder.finish()
145
contents = stream.read()
147
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=3\n"
148
b"2000\x00\x00\x00data\n"
149
b"2001\x00\x00\x00data\n"
150
b"2002\x00\x00\x00data\n"
153
def test_build_index_2_element_key_nodes_sorted(self):
154
# multiple element keys are sorted first-key, second-key.
155
builder = _mod_index.GraphIndexBuilder(key_elements=2)
156
# use three values of each key element, to have a good chance of
157
# glitching dictionary hash lookups etc. Insert in randomish order that
158
# is not correct and not the reverse of the correct order.
159
builder.add_node((b'2002', b'2002'), b'data')
160
builder.add_node((b'2002', b'2000'), b'data')
161
builder.add_node((b'2002', b'2001'), b'data')
162
builder.add_node((b'2000', b'2002'), b'data')
163
builder.add_node((b'2000', b'2000'), b'data')
164
builder.add_node((b'2000', b'2001'), b'data')
165
builder.add_node((b'2001', b'2002'), b'data')
166
builder.add_node((b'2001', b'2000'), b'data')
167
builder.add_node((b'2001', b'2001'), b'data')
168
stream = builder.finish()
169
contents = stream.read()
171
b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=9\n"
172
b"2000\x002000\x00\x00\x00data\n"
173
b"2000\x002001\x00\x00\x00data\n"
174
b"2000\x002002\x00\x00\x00data\n"
175
b"2001\x002000\x00\x00\x00data\n"
176
b"2001\x002001\x00\x00\x00data\n"
177
b"2001\x002002\x00\x00\x00data\n"
178
b"2002\x002000\x00\x00\x00data\n"
179
b"2002\x002001\x00\x00\x00data\n"
180
b"2002\x002002\x00\x00\x00data\n"
183
def test_build_index_reference_lists_are_included_one(self):
184
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
185
builder.add_node((b'key', ), b'data', ([], ))
186
stream = builder.finish()
187
contents = stream.read()
189
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
190
b"key\x00\x00\x00data\n"
193
def test_build_index_reference_lists_with_2_element_keys(self):
194
builder = _mod_index.GraphIndexBuilder(
195
reference_lists=1, key_elements=2)
196
builder.add_node((b'key', b'key2'), b'data', ([], ))
197
stream = builder.finish()
198
contents = stream.read()
200
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=2\nlen=1\n"
201
b"key\x00key2\x00\x00\x00data\n"
204
def test_build_index_reference_lists_are_included_two(self):
205
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
206
builder.add_node((b'key', ), b'data', ([], []))
207
stream = builder.finish()
208
contents = stream.read()
210
b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
211
b"key\x00\x00\t\x00data\n"
214
def test_clear_cache(self):
215
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
216
# This is a no-op, but the api should exist
217
builder.clear_cache()
219
def test_node_references_are_byte_offsets(self):
220
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
221
builder.add_node((b'reference', ), b'data', ([], ))
222
builder.add_node((b'key', ), b'data', ([(b'reference', )], ))
223
stream = builder.finish()
224
contents = stream.read()
226
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=2\n"
227
b"key\x00\x0072\x00data\n"
228
b"reference\x00\x00\x00data\n"
231
def test_node_references_are_cr_delimited(self):
232
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
233
builder.add_node((b'reference', ), b'data', ([], ))
234
builder.add_node((b'reference2', ), b'data', ([], ))
235
builder.add_node((b'key', ), b'data',
236
([(b'reference', ), (b'reference2', )], ))
237
stream = builder.finish()
238
contents = stream.read()
240
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=3\n"
241
b"key\x00\x00077\r094\x00data\n"
242
b"reference\x00\x00\x00data\n"
243
b"reference2\x00\x00\x00data\n"
246
def test_multiple_reference_lists_are_tab_delimited(self):
247
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
248
builder.add_node((b'keference', ), b'data', ([], []))
249
builder.add_node((b'rey', ), b'data',
250
([(b'keference', )], [(b'keference', )]))
251
stream = builder.finish()
252
contents = stream.read()
254
b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=2\n"
255
b"keference\x00\x00\t\x00data\n"
256
b"rey\x00\x0059\t59\x00data\n"
259
def test_add_node_referencing_missing_key_makes_absent(self):
260
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
261
builder.add_node((b'rey', ), b'data',
262
([(b'beference', ), (b'aeference2', )], ))
263
stream = builder.finish()
264
contents = stream.read()
266
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
267
b"aeference2\x00a\x00\x00\n"
268
b"beference\x00a\x00\x00\n"
269
b"rey\x00\x00074\r059\x00data\n"
272
def test_node_references_three_digits(self):
273
# test the node digit expands as needed.
274
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
275
references = [((b"%d" % val), ) for val in range(8, -1, -1)]
276
builder.add_node((b'2-key', ), b'', (references, ))
277
stream = builder.finish()
278
contents = stream.read()
279
self.assertEqualDiff(
280
b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
284
b"2-key\x00\x00151\r145\r139\r133\r127\r121\r071\r065\r059\x00\n"
293
def test_absent_has_no_reference_overhead(self):
294
# the offsets after an absent record should be correct when there are
295
# >1 reference lists.
296
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
297
builder.add_node((b'parent', ), b'', ([(b'aail', ), (b'zther', )], []))
298
stream = builder.finish()
299
contents = stream.read()
301
b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
302
b"aail\x00a\x00\x00\n"
303
b"parent\x00\x0059\r84\t\x00\n"
304
b"zther\x00a\x00\x00\n"
307
def test_add_node_bad_key(self):
308
builder = _mod_index.GraphIndexBuilder()
309
for bad_char in bytearray(b'\t\n\x0b\x0c\r\x00 '):
310
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
311
(b'a%skey' % bytes([bad_char]), ), b'data')
312
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
314
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
315
b'not-a-tuple', b'data')
317
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
320
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
321
(b'primary', b'secondary'), b'data')
322
# secondary key elements get checked too:
323
builder = _mod_index.GraphIndexBuilder(key_elements=2)
324
for bad_char in bytearray(b'\t\n\x0b\x0c\r\x00 '):
325
self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
326
(b'prefix', b'a%skey' % bytes([bad_char])), b'data')
328
def test_add_node_bad_data(self):
329
builder = _mod_index.GraphIndexBuilder()
330
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
332
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
335
def test_add_node_bad_mismatched_ref_lists_length(self):
336
builder = _mod_index.GraphIndexBuilder()
337
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
339
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
340
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
342
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
344
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
345
b'data aa', ([], []))
346
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
347
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
349
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
351
self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
352
b'data aa', ([], [], []))
354
def test_add_node_bad_key_in_reference_lists(self):
355
# first list, first key - trivial
356
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
357
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
358
b'data aa', ([(b'a key', )], ))
359
# references keys must be tuples too
360
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
361
b'data aa', (['not-a-tuple'], ))
363
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
364
b'data aa', ([()], ))
366
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
367
b'data aa', ([(b'primary', b'secondary')], ))
368
# need to check more than the first key in the list
369
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
370
b'data aa', ([(b'agoodkey', ), (b'that is a bad key', )], ))
371
# and if there is more than one list it should be getting checked
373
builder = _mod_index.GraphIndexBuilder(reference_lists=2)
374
self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
375
b'data aa', ([], ['a bad key']))
377
def test_add_duplicate_key(self):
378
builder = _mod_index.GraphIndexBuilder()
379
builder.add_node((b'key', ), b'data')
380
self.assertRaises(_mod_index.BadIndexDuplicateKey,
381
builder.add_node, (b'key', ), b'data')
383
def test_add_duplicate_key_2_elements(self):
384
builder = _mod_index.GraphIndexBuilder(key_elements=2)
385
builder.add_node((b'key', b'key'), b'data')
386
self.assertRaises(_mod_index.BadIndexDuplicateKey, builder.add_node,
387
(b'key', b'key'), b'data')
389
def test_add_key_after_referencing_key(self):
390
builder = _mod_index.GraphIndexBuilder(reference_lists=1)
391
builder.add_node((b'key', ), b'data', ([(b'reference', )], ))
392
builder.add_node((b'reference', ), b'data', ([],))
394
def test_add_key_after_referencing_key_2_elements(self):
395
builder = _mod_index.GraphIndexBuilder(
396
reference_lists=1, key_elements=2)
397
builder.add_node((b'k', b'ey'), b'data',
398
([(b'reference', b'tokey')], ))
399
builder.add_node((b'reference', b'tokey'), b'data', ([],))
401
def test_set_optimize(self):
402
builder = _mod_index.GraphIndexBuilder(
403
reference_lists=1, key_elements=2)
404
builder.set_optimize(for_size=True)
405
self.assertTrue(builder._optimize_for_size)
406
builder.set_optimize(for_size=False)
407
self.assertFalse(builder._optimize_for_size)
410
class TestGraphIndex(tests.TestCaseWithMemoryTransport):
412
def make_key(self, number):
413
return ((b'%d' % number) + b'X' * 100,)
415
def make_value(self, number):
416
return (b'%d' % number) + b'Y' * 100
418
def make_nodes(self, count=64):
419
# generate a big enough index that we only read some of it on a typical
422
for counter in range(count):
424
(self.make_key(counter), self.make_value(counter), ()))
427
def make_index(self, ref_lists=0, key_elements=1, nodes=[]):
428
builder = _mod_index.GraphIndexBuilder(
429
ref_lists, key_elements=key_elements)
430
for key, value, references in nodes:
431
builder.add_node(key, value, references)
432
stream = builder.finish()
433
trans = transport.get_transport_from_url('trace+' + self.get_url())
434
size = trans.put_file('index', stream)
435
return _mod_index.GraphIndex(trans, 'index', size)
437
def make_index_with_offset(self, ref_lists=0, key_elements=1, nodes=[],
439
builder = _mod_index.GraphIndexBuilder(
440
ref_lists, key_elements=key_elements)
441
for key, value, references in nodes:
442
builder.add_node(key, value, references)
443
content = builder.finish().read()
445
trans = self.get_transport()
446
trans.put_bytes('index', (b' ' * offset) + content)
447
return _mod_index.GraphIndex(trans, 'index', size, offset=offset)
449
def test_clear_cache(self):
450
index = self.make_index()
451
# For now, we just want to make sure the api is available. As this is
452
# old code, we don't really worry if it *does* anything.
455
def test_open_bad_index_no_error(self):
456
trans = self.get_transport()
457
trans.put_bytes('name', b"not an index\n")
458
idx = _mod_index.GraphIndex(trans, 'name', 13)
460
def test_with_offset(self):
461
nodes = self.make_nodes(200)
462
idx = self.make_index_with_offset(offset=1234567, nodes=nodes)
463
self.assertEqual(200, idx.key_count())
465
def test_buffer_all_with_offset(self):
466
nodes = self.make_nodes(200)
467
idx = self.make_index_with_offset(offset=1234567, nodes=nodes)
469
self.assertEqual(200, idx.key_count())
471
def test_side_effect_buffering_with_offset(self):
472
nodes = self.make_nodes(20)
473
index = self.make_index_with_offset(offset=1234567, nodes=nodes)
474
index._transport.recommended_page_size = lambda: 64 * 1024
475
subset_nodes = [nodes[0][0], nodes[10][0], nodes[19][0]]
476
entries = [n[1] for n in index.iter_entries(subset_nodes)]
477
self.assertEqual(sorted(subset_nodes), sorted(entries))
478
self.assertEqual(20, index.key_count())
480
def test_open_sets_parsed_map_empty(self):
481
index = self.make_index()
482
self.assertEqual([], index._parsed_byte_map)
483
self.assertEqual([], index._parsed_key_map)
485
def test_key_count_buffers(self):
486
index = self.make_index(nodes=self.make_nodes(2))
487
# reset the transport log
488
del index._transport._activity[:]
489
self.assertEqual(2, index.key_count())
490
# We should have requested reading the header bytes
492
('readv', 'index', [(0, 200)], True, index._size),
494
index._transport._activity)
495
# And that should have been enough to trigger reading the whole index
497
self.assertIsNot(None, index._nodes)
499
def test_lookup_key_via_location_buffers(self):
500
index = self.make_index()
501
# reset the transport log
502
del index._transport._activity[:]
503
# do a _lookup_keys_via_location call for the middle of the file, which
504
# is what bisection uses.
505
result = index._lookup_keys_via_location(
506
[(index._size // 2, (b'missing', ))])
507
# this should have asked for a readv request, with adjust_for_latency,
508
# and two regions: the header, and half-way into the file.
510
('readv', 'index', [(30, 30), (0, 200)], True, 60),
512
index._transport._activity)
513
# and the result should be that the key cannot be present, because this
514
# is a trivial index.
515
self.assertEqual([((index._size // 2, (b'missing', )), False)],
517
# And this should have caused the file to be fully buffered
518
self.assertIsNot(None, index._nodes)
519
self.assertEqual([], index._parsed_byte_map)
521
def test_first_lookup_key_via_location(self):
522
# We need enough data so that the _HEADER_READV doesn't consume the
523
# whole file. We always read 800 bytes for every key, and the local
524
# transport natural expansion is 4096 bytes. So we have to have >8192
525
# bytes or we will trigger "buffer_all".
526
# We also want the 'missing' key to fall within the range that *did*
529
index = self.make_index(nodes=self.make_nodes(64))
530
# reset the transport log
531
del index._transport._activity[:]
532
# do a _lookup_keys_via_location call for the middle of the file, which
533
# is what bisection uses.
534
start_lookup = index._size // 2
535
result = index._lookup_keys_via_location(
536
[(start_lookup, (b'40missing', ))])
537
# this should have asked for a readv request, with adjust_for_latency,
538
# and two regions: the header, and half-way into the file.
541
[(start_lookup, 800), (0, 200)], True, index._size),
543
index._transport._activity)
544
# and the result should be that the key cannot be present, because this
545
# is a trivial index.
546
self.assertEqual([((start_lookup, (b'40missing', )), False)],
548
# And this should not have caused the file to be fully buffered
549
self.assertIs(None, index._nodes)
550
# And the regions of the file that have been parsed should be in the
551
# parsed_byte_map and the parsed_key_map
552
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
553
self.assertEqual([((), self.make_key(26)),
554
(self.make_key(31), self.make_key(48))],
555
index._parsed_key_map)
557
def test_parsing_non_adjacent_data_trims(self):
558
index = self.make_index(nodes=self.make_nodes(64))
559
result = index._lookup_keys_via_location(
560
[(index._size // 2, (b'40', ))])
561
# and the result should be that the key cannot be present, because key is
562
# in the middle of the observed data from a 4K read - the smallest transport
563
# will do today with this api.
564
self.assertEqual([((index._size // 2, (b'40', )), False)],
566
# and we should have a parse map that includes the header and the
567
# region that was parsed after trimming.
568
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
569
self.assertEqual([((), self.make_key(26)),
570
(self.make_key(31), self.make_key(48))],
571
index._parsed_key_map)
573
def test_parsing_data_handles_parsed_contained_regions(self):
574
# the following patten creates a parsed region that is wholly within a
575
# single result from the readv layer:
576
# .... single-read (readv-minimum-size) ...
577
# which then trims the start and end so the parsed size is < readv
579
# then a dual lookup (or a reference lookup for that matter) which
580
# abuts or overlaps the parsed region on both sides will need to
581
# discard the data in the middle, but parse the end as well.
583
# we test this by doing a single lookup to seed the data, then
584
# a lookup for two keys that are present, and adjacent -
585
# we except both to be found, and the parsed byte map to include the
586
# locations of both keys.
587
index = self.make_index(nodes=self.make_nodes(128))
588
result = index._lookup_keys_via_location(
589
[(index._size // 2, (b'40', ))])
590
# and we should have a parse map that includes the header and the
591
# region that was parsed after trimming.
592
self.assertEqual([(0, 4045), (11759, 15707)], index._parsed_byte_map)
593
self.assertEqual([((), self.make_key(116)),
594
(self.make_key(35), self.make_key(51))],
595
index._parsed_key_map)
596
# now ask for two keys, right before and after the parsed region
597
result = index._lookup_keys_via_location(
598
[(11450, self.make_key(34)), (15707, self.make_key(52))])
600
((11450, self.make_key(34)),
601
(index, self.make_key(34), self.make_value(34))),
602
((15707, self.make_key(52)),
603
(index, self.make_key(52), self.make_value(52))),
606
self.assertEqual([(0, 4045), (9889, 17993)], index._parsed_byte_map)
608
def test_lookup_missing_key_answers_without_io_when_map_permits(self):
609
# generate a big enough index that we only read some of it on a typical
611
index = self.make_index(nodes=self.make_nodes(64))
612
# lookup the keys in the middle of the file
613
result = index._lookup_keys_via_location(
614
[(index._size // 2, (b'40', ))])
615
# check the parse map, this determines the test validity
616
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
617
self.assertEqual([((), self.make_key(26)),
618
(self.make_key(31), self.make_key(48))],
619
index._parsed_key_map)
620
# reset the transport log
621
del index._transport._activity[:]
622
# now looking up a key in the portion of the file already parsed should
623
# not create a new transport request, and should return False (cannot
624
# be in the index) - even when the byte location we ask for is outside
626
result = index._lookup_keys_via_location(
628
self.assertEqual([((4000, (b'40', )), False)],
630
self.assertEqual([], index._transport._activity)
632
def test_lookup_present_key_answers_without_io_when_map_permits(self):
633
# generate a big enough index that we only read some of it on a typical
635
index = self.make_index(nodes=self.make_nodes(64))
636
# lookup the keys in the middle of the file
637
result = index._lookup_keys_via_location(
638
[(index._size // 2, (b'40', ))])
639
# check the parse map, this determines the test validity
640
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
641
self.assertEqual([((), self.make_key(26)),
642
(self.make_key(31), self.make_key(48))],
643
index._parsed_key_map)
644
# reset the transport log
645
del index._transport._activity[:]
646
# now looking up a key in the portion of the file already parsed should
647
# not create a new transport request, and should return False (cannot
648
# be in the index) - even when the byte location we ask for is outside
651
result = index._lookup_keys_via_location([(4000, self.make_key(40))])
653
[((4000, self.make_key(40)),
654
(index, self.make_key(40), self.make_value(40)))],
656
self.assertEqual([], index._transport._activity)
658
def test_lookup_key_below_probed_area(self):
659
# generate a big enough index that we only read some of it on a typical
661
index = self.make_index(nodes=self.make_nodes(64))
662
# ask for the key in the middle, but a key that is located in the
663
# unparsed region before the middle.
664
result = index._lookup_keys_via_location(
665
[(index._size // 2, (b'30', ))])
666
# check the parse map, this determines the test validity
667
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
668
self.assertEqual([((), self.make_key(26)),
669
(self.make_key(31), self.make_key(48))],
670
index._parsed_key_map)
671
self.assertEqual([((index._size // 2, (b'30', )), -1)],
674
def test_lookup_key_above_probed_area(self):
675
# generate a big enough index that we only read some of it on a typical
677
index = self.make_index(nodes=self.make_nodes(64))
678
# ask for the key in the middle, but a key that is located in the
679
# unparsed region after the middle.
680
result = index._lookup_keys_via_location(
681
[(index._size // 2, (b'50', ))])
682
# check the parse map, this determines the test validity
683
self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
684
self.assertEqual([((), self.make_key(26)),
685
(self.make_key(31), self.make_key(48))],
686
index._parsed_key_map)
687
self.assertEqual([((index._size // 2, (b'50', )), +1)],
690
def test_lookup_key_resolves_references(self):
691
# generate a big enough index that we only read some of it on a typical
694
for counter in range(99):
695
nodes.append((self.make_key(counter), self.make_value(counter),
696
((self.make_key(counter + 20),),)))
697
index = self.make_index(ref_lists=1, nodes=nodes)
698
# lookup a key in the middle that does not exist, so that when we can
699
# check that the referred-to-keys are not accessed automatically.
700
index_size = index._size
701
index_center = index_size // 2
702
result = index._lookup_keys_via_location(
703
[(index_center, (b'40', ))])
704
# check the parse map - only the start and middle should have been
706
self.assertEqual([(0, 4027), (10198, 14028)], index._parsed_byte_map)
707
self.assertEqual([((), self.make_key(17)),
708
(self.make_key(44), self.make_key(5))],
709
index._parsed_key_map)
710
# and check the transport activity likewise.
712
[('readv', 'index', [(index_center, 800), (0, 200)], True,
714
index._transport._activity)
715
# reset the transport log for testing the reference lookup
716
del index._transport._activity[:]
717
# now looking up a key in the portion of the file already parsed should
718
# only perform IO to resolve its key references.
719
result = index._lookup_keys_via_location([(11000, self.make_key(45))])
721
[((11000, self.make_key(45)),
722
(index, self.make_key(45), self.make_value(45),
723
((self.make_key(65),),)))],
725
self.assertEqual([('readv', 'index', [(15093, 800)], True, index_size)],
726
index._transport._activity)
728
def test_lookup_key_can_buffer_all(self):
730
for counter in range(64):
731
nodes.append((self.make_key(counter), self.make_value(counter),
732
((self.make_key(counter + 20),),)))
733
index = self.make_index(ref_lists=1, nodes=nodes)
734
# lookup a key in the middle that does not exist, so that when we can
735
# check that the referred-to-keys are not accessed automatically.
736
index_size = index._size
737
index_center = index_size // 2
738
result = index._lookup_keys_via_location([(index_center, (b'40', ))])
739
# check the parse map - only the start and middle should have been
741
self.assertEqual([(0, 3890), (6444, 10274)], index._parsed_byte_map)
742
self.assertEqual([((), self.make_key(25)),
743
(self.make_key(37), self.make_key(52))],
744
index._parsed_key_map)
745
# and check the transport activity likewise.
747
[('readv', 'index', [(index_center, 800), (0, 200)], True,
749
index._transport._activity)
750
# reset the transport log for testing the reference lookup
751
del index._transport._activity[:]
752
# now looking up a key in the portion of the file already parsed should
753
# only perform IO to resolve its key references.
754
result = index._lookup_keys_via_location([(7000, self.make_key(40))])
756
[((7000, self.make_key(40)),
757
(index, self.make_key(40), self.make_value(40),
758
((self.make_key(60),),)))],
760
# Resolving the references would have required more data read, and we
761
# are already above the 50% threshold, so it triggered a _buffer_all
762
self.assertEqual([('get', 'index')], index._transport._activity)
764
def test_iter_all_entries_empty(self):
765
index = self.make_index()
766
self.assertEqual([], list(index.iter_all_entries()))
768
def test_iter_all_entries_simple(self):
769
index = self.make_index(nodes=[((b'name', ), b'data', ())])
770
self.assertEqual([(index, (b'name', ), b'data')],
771
list(index.iter_all_entries()))
773
def test_iter_all_entries_simple_2_elements(self):
774
index = self.make_index(key_elements=2,
775
nodes=[((b'name', b'surname'), b'data', ())])
776
self.assertEqual([(index, (b'name', b'surname'), b'data')],
777
list(index.iter_all_entries()))
779
def test_iter_all_entries_references_resolved(self):
780
index = self.make_index(1, nodes=[
781
((b'name', ), b'data', ([(b'ref', )], )),
782
((b'ref', ), b'refdata', ([], ))])
783
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
784
(index, (b'ref', ), b'refdata', ((), ))},
785
set(index.iter_all_entries()))
787
def test_iter_entries_buffers_once(self):
788
index = self.make_index(nodes=self.make_nodes(2))
789
# reset the transport log
790
del index._transport._activity[:]
791
self.assertEqual({(index, self.make_key(1), self.make_value(1))},
792
set(index.iter_entries([self.make_key(1)])))
793
# We should have requested reading the header bytes
794
# But not needed any more than that because it would have triggered a
797
('readv', 'index', [(0, 200)], True, index._size),
799
index._transport._activity)
800
# And that should have been enough to trigger reading the whole index
802
self.assertIsNot(None, index._nodes)
804
def test_iter_entries_buffers_by_bytes_read(self):
805
index = self.make_index(nodes=self.make_nodes(64))
806
list(index.iter_entries([self.make_key(10)]))
807
# The first time through isn't enough to trigger a buffer all
808
self.assertIs(None, index._nodes)
809
self.assertEqual(4096, index._bytes_read)
810
# Grabbing a key in that same page won't trigger a buffer all, as we
811
# still haven't read 50% of the file
812
list(index.iter_entries([self.make_key(11)]))
813
self.assertIs(None, index._nodes)
814
self.assertEqual(4096, index._bytes_read)
815
# We haven't read more data, so reading outside the range won't trigger
816
# a buffer all right away
817
list(index.iter_entries([self.make_key(40)]))
818
self.assertIs(None, index._nodes)
819
self.assertEqual(8192, index._bytes_read)
820
# On the next pass, we will not trigger buffer all if the key is
821
# available without reading more
822
list(index.iter_entries([self.make_key(32)]))
823
self.assertIs(None, index._nodes)
824
# But if we *would* need to read more to resolve it, then we will
826
list(index.iter_entries([self.make_key(60)]))
827
self.assertIsNot(None, index._nodes)
829
def test_iter_entries_references_resolved(self):
830
index = self.make_index(1, nodes=[
831
((b'name', ), b'data', ([(b'ref', ), (b'ref', )], )),
832
((b'ref', ), b'refdata', ([], ))])
833
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',), (b'ref',)),)),
834
(index, (b'ref', ), b'refdata', ((), ))},
835
set(index.iter_entries([(b'name',), (b'ref',)])))
837
def test_iter_entries_references_2_refs_resolved(self):
838
index = self.make_index(2, nodes=[
839
((b'name', ), b'data', ([(b'ref', )], [(b'ref', )])),
840
((b'ref', ), b'refdata', ([], []))])
841
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),), ((b'ref',),))),
842
(index, (b'ref', ), b'refdata', ((), ()))},
843
set(index.iter_entries([(b'name',), (b'ref',)])))
845
def test_iteration_absent_skipped(self):
846
index = self.make_index(1, nodes=[
847
((b'name', ), b'data', ([(b'ref', )], ))])
848
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
849
set(index.iter_all_entries()))
850
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
851
set(index.iter_entries([(b'name', )])))
852
self.assertEqual([], list(index.iter_entries([(b'ref', )])))
854
def test_iteration_absent_skipped_2_element_keys(self):
855
index = self.make_index(1, key_elements=2, nodes=[
856
((b'name', b'fin'), b'data', ([(b'ref', b'erence')], ))])
857
self.assertEqual([(index, (b'name', b'fin'), b'data', (((b'ref', b'erence'),),))],
858
list(index.iter_all_entries()))
859
self.assertEqual([(index, (b'name', b'fin'), b'data', (((b'ref', b'erence'),),))],
860
list(index.iter_entries([(b'name', b'fin')])))
861
self.assertEqual([], list(index.iter_entries([(b'ref', b'erence')])))
863
def test_iter_all_keys(self):
864
index = self.make_index(1, nodes=[
865
((b'name', ), b'data', ([(b'ref', )], )),
866
((b'ref', ), b'refdata', ([], ))])
867
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
868
(index, (b'ref', ), b'refdata', ((), ))},
869
set(index.iter_entries([(b'name', ), (b'ref', )])))
871
def test_iter_nothing_empty(self):
872
index = self.make_index()
873
self.assertEqual([], list(index.iter_entries([])))
875
def test_iter_missing_entry_empty(self):
876
index = self.make_index()
877
self.assertEqual([], list(index.iter_entries([(b'a', )])))
879
def test_iter_missing_entry_empty_no_size(self):
880
idx = self.make_index()
881
idx = _mod_index.GraphIndex(idx._transport, 'index', None)
882
self.assertEqual([], list(idx.iter_entries([(b'a', )])))
884
def test_iter_key_prefix_1_element_key_None(self):
885
index = self.make_index()
886
self.assertRaises(_mod_index.BadIndexKey, list,
887
index.iter_entries_prefix([(None, )]))
889
def test_iter_key_prefix_wrong_length(self):
890
index = self.make_index()
891
self.assertRaises(_mod_index.BadIndexKey, list,
892
index.iter_entries_prefix([(b'foo', None)]))
893
index = self.make_index(key_elements=2)
894
self.assertRaises(_mod_index.BadIndexKey, list,
895
index.iter_entries_prefix([(b'foo', )]))
896
self.assertRaises(_mod_index.BadIndexKey, list,
897
index.iter_entries_prefix([(b'foo', None, None)]))
899
def test_iter_key_prefix_1_key_element_no_refs(self):
900
index = self.make_index(nodes=[
901
((b'name', ), b'data', ()),
902
((b'ref', ), b'refdata', ())])
903
self.assertEqual({(index, (b'name', ), b'data'),
904
(index, (b'ref', ), b'refdata')},
905
set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
907
def test_iter_key_prefix_1_key_element_refs(self):
908
index = self.make_index(1, nodes=[
909
((b'name', ), b'data', ([(b'ref', )], )),
910
((b'ref', ), b'refdata', ([], ))])
911
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
912
(index, (b'ref', ), b'refdata', ((), ))},
913
set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
915
def test_iter_key_prefix_2_key_element_no_refs(self):
916
index = self.make_index(key_elements=2, nodes=[
917
((b'name', b'fin1'), b'data', ()),
918
((b'name', b'fin2'), b'beta', ()),
919
((b'ref', b'erence'), b'refdata', ())])
920
self.assertEqual({(index, (b'name', b'fin1'), b'data'),
921
(index, (b'ref', b'erence'), b'refdata')},
922
set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
923
self.assertEqual({(index, (b'name', b'fin1'), b'data'),
924
(index, (b'name', b'fin2'), b'beta')},
925
set(index.iter_entries_prefix([(b'name', None)])))
927
def test_iter_key_prefix_2_key_element_refs(self):
928
index = self.make_index(1, key_elements=2, nodes=[
929
((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], )),
930
((b'name', b'fin2'), b'beta', ([], )),
931
((b'ref', b'erence'), b'refdata', ([], ))])
932
self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
933
(index, (b'ref', b'erence'), b'refdata', ((), ))},
934
set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
935
self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
936
(index, (b'name', b'fin2'), b'beta', ((), ))},
937
set(index.iter_entries_prefix([(b'name', None)])))
939
def test_key_count_empty(self):
940
index = self.make_index()
941
self.assertEqual(0, index.key_count())
943
def test_key_count_one(self):
944
index = self.make_index(nodes=[((b'name', ), b'', ())])
945
self.assertEqual(1, index.key_count())
947
def test_key_count_two(self):
948
index = self.make_index(nodes=[
949
((b'name', ), b'', ()), ((b'foo', ), b'', ())])
950
self.assertEqual(2, index.key_count())
952
def test_read_and_parse_tracks_real_read_value(self):
953
index = self.make_index(nodes=self.make_nodes(10))
954
del index._transport._activity[:]
955
index._read_and_parse([(0, 200)])
957
('readv', 'index', [(0, 200)], True, index._size),
959
index._transport._activity)
960
# The readv expansion code will expand the initial request to 4096
961
# bytes, which is more than enough to read the entire index, and we
962
# will track the fact that we read that many bytes.
963
self.assertEqual(index._size, index._bytes_read)
965
def test_read_and_parse_triggers_buffer_all(self):
966
index = self.make_index(key_elements=2, nodes=[
967
((b'name', b'fin1'), b'data', ()),
968
((b'name', b'fin2'), b'beta', ()),
969
((b'ref', b'erence'), b'refdata', ())])
970
self.assertTrue(index._size > 0)
971
self.assertIs(None, index._nodes)
972
index._read_and_parse([(0, index._size)])
973
self.assertIsNot(None, index._nodes)
975
def test_validate_bad_index_errors(self):
976
trans = self.get_transport()
977
trans.put_bytes('name', b"not an index\n")
978
idx = _mod_index.GraphIndex(trans, 'name', 13)
979
self.assertRaises(_mod_index.BadIndexFormatSignature, idx.validate)
981
def test_validate_bad_node_refs(self):
982
idx = self.make_index(2)
983
trans = self.get_transport()
984
content = trans.get_bytes('index')
985
# change the options line to end with a rather than a parseable number
986
new_content = content[:-2] + b'a\n\n'
987
trans.put_bytes('index', new_content)
988
self.assertRaises(_mod_index.BadIndexOptions, idx.validate)
990
def test_validate_missing_end_line_empty(self):
991
index = self.make_index(2)
992
trans = self.get_transport()
993
content = trans.get_bytes('index')
994
# truncate the last byte
995
trans.put_bytes('index', content[:-1])
996
self.assertRaises(_mod_index.BadIndexData, index.validate)
998
def test_validate_missing_end_line_nonempty(self):
999
index = self.make_index(2, nodes=[((b'key', ), b'', ([], []))])
1000
trans = self.get_transport()
1001
content = trans.get_bytes('index')
1002
# truncate the last byte
1003
trans.put_bytes('index', content[:-1])
1004
self.assertRaises(_mod_index.BadIndexData, index.validate)
1006
def test_validate_empty(self):
1007
index = self.make_index()
1010
def test_validate_no_refs_content(self):
1011
index = self.make_index(nodes=[((b'key', ), b'value', ())])
1014
# XXX: external_references tests are duplicated in test_btree_index. We
1015
# probably should have per_graph_index tests...
1016
def test_external_references_no_refs(self):
1017
index = self.make_index(ref_lists=0, nodes=[])
1018
self.assertRaises(ValueError, index.external_references, 0)
1020
def test_external_references_no_results(self):
1021
index = self.make_index(ref_lists=1, nodes=[
1022
((b'key',), b'value', ([],))])
1023
self.assertEqual(set(), index.external_references(0))
1025
def test_external_references_missing_ref(self):
1026
missing_key = (b'missing',)
1027
index = self.make_index(ref_lists=1, nodes=[
1028
((b'key',), b'value', ([missing_key],))])
1029
self.assertEqual({missing_key}, index.external_references(0))
1031
def test_external_references_multiple_ref_lists(self):
1032
missing_key = (b'missing',)
1033
index = self.make_index(ref_lists=2, nodes=[
1034
((b'key',), b'value', ([], [missing_key]))])
1035
self.assertEqual(set([]), index.external_references(0))
1036
self.assertEqual({missing_key}, index.external_references(1))
1038
def test_external_references_two_records(self):
1039
index = self.make_index(ref_lists=1, nodes=[
1040
((b'key-1',), b'value', ([(b'key-2',)],)),
1041
((b'key-2',), b'value', ([],)),
1043
self.assertEqual(set([]), index.external_references(0))
1045
def test__find_ancestors(self):
1048
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1049
(key1, b'value', ([key2],)),
1050
(key2, b'value', ([],)),
1053
missing_keys = set()
1054
search_keys = index._find_ancestors(
1055
[key1], 0, parent_map, missing_keys)
1056
self.assertEqual({key1: (key2,)}, parent_map)
1057
self.assertEqual(set(), missing_keys)
1058
self.assertEqual({key2}, search_keys)
1059
search_keys = index._find_ancestors(search_keys, 0, parent_map,
1061
self.assertEqual({key1: (key2,), key2: ()}, parent_map)
1062
self.assertEqual(set(), missing_keys)
1063
self.assertEqual(set(), search_keys)
1065
def test__find_ancestors_w_missing(self):
1069
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1070
(key1, b'value', ([key2],)),
1071
(key2, b'value', ([],)),
1074
missing_keys = set()
1075
search_keys = index._find_ancestors([key2, key3], 0, parent_map,
1077
self.assertEqual({key2: ()}, parent_map)
1078
self.assertEqual({key3}, missing_keys)
1079
self.assertEqual(set(), search_keys)
1081
def test__find_ancestors_dont_search_known(self):
1085
index = self.make_index(ref_lists=1, key_elements=1, nodes=[
1086
(key1, b'value', ([key2],)),
1087
(key2, b'value', ([key3],)),
1088
(key3, b'value', ([],)),
1090
# We already know about key2, so we won't try to search for key3
1091
parent_map = {key2: (key3,)}
1092
missing_keys = set()
1093
search_keys = index._find_ancestors([key1], 0, parent_map,
1095
self.assertEqual({key1: (key2,), key2: (key3,)}, parent_map)
1096
self.assertEqual(set(), missing_keys)
1097
self.assertEqual(set(), search_keys)
1099
def test_supports_unlimited_cache(self):
1100
builder = _mod_index.GraphIndexBuilder(0, key_elements=1)
1101
stream = builder.finish()
1102
trans = self.get_transport()
1103
size = trans.put_file('index', stream)
1104
# It doesn't matter what unlimited_cache does here, just that it can be
1106
idx = _mod_index.GraphIndex(trans, 'index', size, unlimited_cache=True)
1109
class TestCombinedGraphIndex(tests.TestCaseWithMemoryTransport):
1111
def make_index(self, name, ref_lists=0, key_elements=1, nodes=[]):
1112
builder = _mod_index.GraphIndexBuilder(
1113
ref_lists, key_elements=key_elements)
1114
for key, value, references in nodes:
1115
builder.add_node(key, value, references)
1116
stream = builder.finish()
1117
trans = self.get_transport()
1118
size = trans.put_file(name, stream)
1119
return _mod_index.GraphIndex(trans, name, size)
1121
def make_combined_index_with_missing(self, missing=['1', '2']):
1122
"""Create a CombinedGraphIndex which will have missing indexes.
1124
This creates a CGI which thinks it has 2 indexes, however they have
1125
been deleted. If CGI._reload_func() is called, then it will repopulate
1128
:param missing: The underlying indexes to delete
1129
:return: (CombinedGraphIndex, reload_counter)
1131
idx1 = self.make_index('1', nodes=[((b'1',), b'', ())])
1132
idx2 = self.make_index('2', nodes=[((b'2',), b'', ())])
1133
idx3 = self.make_index('3', nodes=[
1135
((b'2',), b'', ())])
1137
# total_reloads, num_changed, num_unchanged
1138
reload_counter = [0, 0, 0]
1141
reload_counter[0] += 1
1142
new_indices = [idx3]
1143
if idx._indices == new_indices:
1144
reload_counter[2] += 1
1146
reload_counter[1] += 1
1147
idx._indices[:] = new_indices
1149
idx = _mod_index.CombinedGraphIndex([idx1, idx2], reload_func=reload)
1150
trans = self.get_transport()
1151
for fname in missing:
1153
return idx, reload_counter
1155
def test_open_missing_index_no_error(self):
1156
trans = self.get_transport()
1157
idx1 = _mod_index.GraphIndex(trans, 'missing', 100)
1158
idx = _mod_index.CombinedGraphIndex([idx1])
1160
def test_add_index(self):
1161
idx = _mod_index.CombinedGraphIndex([])
1162
idx1 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
1163
idx.insert_index(0, idx1)
1164
self.assertEqual([(idx1, (b'key', ), b'')],
1165
list(idx.iter_all_entries()))
1167
def test_clear_cache(self):
1170
class ClearCacheProxy(object):
1172
def __init__(self, index):
1175
def __getattr__(self, name):
1176
return getattr(self._index)
1178
def clear_cache(self):
1179
log.append(self._index)
1180
return self._index.clear_cache()
1182
idx = _mod_index.CombinedGraphIndex([])
1183
idx1 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
1184
idx.insert_index(0, ClearCacheProxy(idx1))
1185
idx2 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
1186
idx.insert_index(1, ClearCacheProxy(idx2))
1187
# CombinedGraphIndex should call 'clear_cache()' on all children
1189
self.assertEqual(sorted([idx1, idx2]), sorted(log))
1191
def test_iter_all_entries_empty(self):
1192
idx = _mod_index.CombinedGraphIndex([])
1193
self.assertEqual([], list(idx.iter_all_entries()))
1195
def test_iter_all_entries_children_empty(self):
1196
idx1 = self.make_index('name')
1197
idx = _mod_index.CombinedGraphIndex([idx1])
1198
self.assertEqual([], list(idx.iter_all_entries()))
1200
def test_iter_all_entries_simple(self):
1201
idx1 = self.make_index('name', nodes=[((b'name', ), b'data', ())])
1202
idx = _mod_index.CombinedGraphIndex([idx1])
1203
self.assertEqual([(idx1, (b'name', ), b'data')],
1204
list(idx.iter_all_entries()))
1206
def test_iter_all_entries_two_indices(self):
1207
idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
1208
idx2 = self.make_index('name2', nodes=[((b'2', ), b'', ())])
1209
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1210
self.assertEqual([(idx1, (b'name', ), b'data'),
1211
(idx2, (b'2', ), b'')],
1212
list(idx.iter_all_entries()))
1214
def test_iter_entries_two_indices_dup_key(self):
1215
idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
1216
idx2 = self.make_index('name2', nodes=[((b'name', ), b'data', ())])
1217
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1218
self.assertEqual([(idx1, (b'name', ), b'data')],
1219
list(idx.iter_entries([(b'name', )])))
1221
def test_iter_all_entries_two_indices_dup_key(self):
1222
idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
1223
idx2 = self.make_index('name2', nodes=[((b'name', ), b'data', ())])
1224
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1225
self.assertEqual([(idx1, (b'name', ), b'data')],
1226
list(idx.iter_all_entries()))
1228
def test_iter_key_prefix_2_key_element_refs(self):
1229
idx1 = self.make_index('1', 1, key_elements=2, nodes=[
1230
((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], ))])
1231
idx2 = self.make_index('2', 1, key_elements=2, nodes=[
1232
((b'name', b'fin2'), b'beta', ([], )),
1233
((b'ref', b'erence'), b'refdata', ([], ))])
1234
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1235
self.assertEqual({(idx1, (b'name', b'fin1'), b'data',
1236
(((b'ref', b'erence'),),)),
1237
(idx2, (b'ref', b'erence'), b'refdata', ((), ))},
1238
set(idx.iter_entries_prefix([(b'name', b'fin1'),
1239
(b'ref', b'erence')])))
1240
self.assertEqual({(idx1, (b'name', b'fin1'), b'data',
1241
(((b'ref', b'erence'),),)),
1242
(idx2, (b'name', b'fin2'), b'beta', ((), ))},
1243
set(idx.iter_entries_prefix([(b'name', None)])))
1245
def test_iter_nothing_empty(self):
1246
idx = _mod_index.CombinedGraphIndex([])
1247
self.assertEqual([], list(idx.iter_entries([])))
1249
def test_iter_nothing_children_empty(self):
1250
idx1 = self.make_index('name')
1251
idx = _mod_index.CombinedGraphIndex([idx1])
1252
self.assertEqual([], list(idx.iter_entries([])))
1254
def test_iter_all_keys(self):
1255
idx1 = self.make_index('1', 1, nodes=[((b'name', ), b'data',
1257
idx2 = self.make_index(
1258
'2', 1, nodes=[((b'ref', ), b'refdata', ((), ))])
1259
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1260
self.assertEqual({(idx1, (b'name', ), b'data', (((b'ref', ), ), )),
1261
(idx2, (b'ref', ), b'refdata', ((), ))},
1262
set(idx.iter_entries([(b'name', ), (b'ref', )])))
1264
def test_iter_all_keys_dup_entry(self):
1265
idx1 = self.make_index('1', 1, nodes=[((b'name', ), b'data',
1267
((b'ref', ), b'refdata', ([], ))])
1268
idx2 = self.make_index(
1269
'2', 1, nodes=[((b'ref', ), b'refdata', ([], ))])
1270
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1271
self.assertEqual({(idx1, (b'name', ), b'data', (((b'ref',),),)),
1272
(idx1, (b'ref', ), b'refdata', ((), ))},
1273
set(idx.iter_entries([(b'name', ), (b'ref', )])))
1275
def test_iter_missing_entry_empty(self):
1276
idx = _mod_index.CombinedGraphIndex([])
1277
self.assertEqual([], list(idx.iter_entries([('a', )])))
1279
def test_iter_missing_entry_one_index(self):
1280
idx1 = self.make_index('1')
1281
idx = _mod_index.CombinedGraphIndex([idx1])
1282
self.assertEqual([], list(idx.iter_entries([(b'a', )])))
1284
def test_iter_missing_entry_two_index(self):
1285
idx1 = self.make_index('1')
1286
idx2 = self.make_index('2')
1287
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1288
self.assertEqual([], list(idx.iter_entries([('a', )])))
1290
def test_iter_entry_present_one_index_only(self):
1291
idx1 = self.make_index('1', nodes=[((b'key', ), b'', ())])
1292
idx2 = self.make_index('2', nodes=[])
1293
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1294
self.assertEqual([(idx1, (b'key', ), b'')],
1295
list(idx.iter_entries([(b'key', )])))
1296
# and in the other direction
1297
idx = _mod_index.CombinedGraphIndex([idx2, idx1])
1298
self.assertEqual([(idx1, (b'key', ), b'')],
1299
list(idx.iter_entries([(b'key', )])))
1301
def test_key_count_empty(self):
1302
idx1 = self.make_index('1', nodes=[])
1303
idx2 = self.make_index('2', nodes=[])
1304
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1305
self.assertEqual(0, idx.key_count())
1307
def test_key_count_sums_index_keys(self):
1308
idx1 = self.make_index('1', nodes=[
1310
((b'2',), b'', ())])
1311
idx2 = self.make_index('2', nodes=[((b'1',), b'', ())])
1312
idx = _mod_index.CombinedGraphIndex([idx1, idx2])
1313
self.assertEqual(3, idx.key_count())
1315
def test_validate_bad_child_index_errors(self):
1316
trans = self.get_transport()
1317
trans.put_bytes('name', b"not an index\n")
1318
idx1 = _mod_index.GraphIndex(trans, 'name', 13)
1319
idx = _mod_index.CombinedGraphIndex([idx1])
1320
self.assertRaises(_mod_index.BadIndexFormatSignature, idx.validate)
1322
def test_validate_empty(self):
1323
idx = _mod_index.CombinedGraphIndex([])
1326
def test_key_count_reloads(self):
1327
idx, reload_counter = self.make_combined_index_with_missing()
1328
self.assertEqual(2, idx.key_count())
1329
self.assertEqual([1, 1, 0], reload_counter)
1331
def test_key_count_no_reload(self):
1332
idx, reload_counter = self.make_combined_index_with_missing()
1333
idx._reload_func = None
1334
# Without a _reload_func we just raise the exception
1335
self.assertRaises(errors.NoSuchFile, idx.key_count)
1337
def test_key_count_reloads_and_fails(self):
1338
# We have deleted all underlying indexes, so we will try to reload, but
1339
# still fail. This is mostly to test we don't get stuck in an infinite
1340
# loop trying to reload
1341
idx, reload_counter = self.make_combined_index_with_missing(
1343
self.assertRaises(errors.NoSuchFile, idx.key_count)
1344
self.assertEqual([2, 1, 1], reload_counter)
1346
def test_iter_entries_reloads(self):
1347
index, reload_counter = self.make_combined_index_with_missing()
1348
result = list(index.iter_entries([(b'1',), (b'2',), (b'3',)]))
1349
index3 = index._indices[0]
1350
self.assertEqual({(index3, (b'1',), b''), (index3, (b'2',), b'')},
1352
self.assertEqual([1, 1, 0], reload_counter)
1354
def test_iter_entries_reloads_midway(self):
1355
# The first index still looks present, so we get interrupted mid-way
1357
index, reload_counter = self.make_combined_index_with_missing(['2'])
1358
index1, index2 = index._indices
1359
result = list(index.iter_entries([(b'1',), (b'2',), (b'3',)]))
1360
index3 = index._indices[0]
1361
# We had already yielded b'1', so we just go on to the next, we should
1362
# not yield b'1' twice.
1363
self.assertEqual([(index1, (b'1',), b''), (index3, (b'2',), b'')],
1365
self.assertEqual([1, 1, 0], reload_counter)
1367
def test_iter_entries_no_reload(self):
1368
index, reload_counter = self.make_combined_index_with_missing()
1369
index._reload_func = None
1370
# Without a _reload_func we just raise the exception
1371
self.assertListRaises(errors.NoSuchFile, index.iter_entries, [('3',)])
1373
def test_iter_entries_reloads_and_fails(self):
1374
index, reload_counter = self.make_combined_index_with_missing(
1376
self.assertListRaises(errors.NoSuchFile, index.iter_entries, [('3',)])
1377
self.assertEqual([2, 1, 1], reload_counter)
1379
def test_iter_all_entries_reloads(self):
1380
index, reload_counter = self.make_combined_index_with_missing()
1381
result = list(index.iter_all_entries())
1382
index3 = index._indices[0]
1383
self.assertEqual({(index3, (b'1',), b''), (index3, (b'2',), b'')},
1385
self.assertEqual([1, 1, 0], reload_counter)
1387
def test_iter_all_entries_reloads_midway(self):
1388
index, reload_counter = self.make_combined_index_with_missing(['2'])
1389
index1, index2 = index._indices
1390
result = list(index.iter_all_entries())
1391
index3 = index._indices[0]
1392
# We had already yielded '1', so we just go on to the next, we should
1393
# not yield '1' twice.
1394
self.assertEqual([(index1, (b'1',), b''), (index3, (b'2',), b'')],
1396
self.assertEqual([1, 1, 0], reload_counter)
1398
def test_iter_all_entries_no_reload(self):
1399
index, reload_counter = self.make_combined_index_with_missing()
1400
index._reload_func = None
1401
self.assertListRaises(errors.NoSuchFile, index.iter_all_entries)
1403
def test_iter_all_entries_reloads_and_fails(self):
1404
index, reload_counter = self.make_combined_index_with_missing(
1406
self.assertListRaises(errors.NoSuchFile, index.iter_all_entries)
1408
def test_iter_entries_prefix_reloads(self):
1409
index, reload_counter = self.make_combined_index_with_missing()
1410
result = list(index.iter_entries_prefix([(b'1',)]))
1411
index3 = index._indices[0]
1412
self.assertEqual([(index3, (b'1',), b'')], result)
1413
self.assertEqual([1, 1, 0], reload_counter)
1415
def test_iter_entries_prefix_reloads_midway(self):
1416
index, reload_counter = self.make_combined_index_with_missing(['2'])
1417
index1, index2 = index._indices
1418
result = list(index.iter_entries_prefix([(b'1',)]))
1419
index3 = index._indices[0]
1420
# We had already yielded b'1', so we just go on to the next, we should
1421
# not yield b'1' twice.
1422
self.assertEqual([(index1, (b'1',), b'')], result)
1423
self.assertEqual([1, 1, 0], reload_counter)
1425
def test_iter_entries_prefix_no_reload(self):
1426
index, reload_counter = self.make_combined_index_with_missing()
1427
index._reload_func = None
1428
self.assertListRaises(errors.NoSuchFile, index.iter_entries_prefix,
1431
def test_iter_entries_prefix_reloads_and_fails(self):
1432
index, reload_counter = self.make_combined_index_with_missing(
1434
self.assertListRaises(errors.NoSuchFile, index.iter_entries_prefix,
1437
def make_index_with_simple_nodes(self, name, num_nodes=1):
1438
"""Make an index named after 'name', with keys named after 'name' too.
1440
Nodes will have a value of '' and no references.
1443
((('index-%s-key-%s' % (name, n)).encode('ascii'),), b'', ())
1444
for n in range(1, num_nodes + 1)]
1445
return self.make_index('index-%s' % name, 0, nodes=nodes)
1447
def test_reorder_after_iter_entries(self):
1448
# Four indices: [key1] in idx1, [key2,key3] in idx2, [] in idx3,
1450
idx = _mod_index.CombinedGraphIndex([])
1451
idx.insert_index(0, self.make_index_with_simple_nodes('1'), b'1')
1452
idx.insert_index(1, self.make_index_with_simple_nodes('2'), b'2')
1453
idx.insert_index(2, self.make_index_with_simple_nodes('3'), b'3')
1454
idx.insert_index(3, self.make_index_with_simple_nodes('4'), b'4')
1455
idx1, idx2, idx3, idx4 = idx._indices
1456
# Query a key from idx4 and idx2.
1457
self.assertLength(2, list(idx.iter_entries(
1458
[(b'index-4-key-1',), (b'index-2-key-1',)])))
1459
# Now idx2 and idx4 should be moved to the front (and idx1 should
1460
# still be before idx3).
1461
self.assertEqual([idx2, idx4, idx1, idx3], idx._indices)
1462
self.assertEqual([b'2', b'4', b'1', b'3'], idx._index_names)
1464
def test_reorder_propagates_to_siblings(self):
1465
# Two CombinedGraphIndex objects, with the same number of indicies with
1467
cgi1 = _mod_index.CombinedGraphIndex([])
1468
cgi2 = _mod_index.CombinedGraphIndex([])
1469
cgi1.insert_index(0, self.make_index_with_simple_nodes('1-1'), 'one')
1470
cgi1.insert_index(1, self.make_index_with_simple_nodes('1-2'), 'two')
1471
cgi2.insert_index(0, self.make_index_with_simple_nodes('2-1'), 'one')
1472
cgi2.insert_index(1, self.make_index_with_simple_nodes('2-2'), 'two')
1473
index2_1, index2_2 = cgi2._indices
1474
cgi1.set_sibling_indices([cgi2])
1475
# Trigger a reordering in cgi1. cgi2 will be reordered as well.
1476
list(cgi1.iter_entries([(b'index-1-2-key-1',)]))
1477
self.assertEqual([index2_2, index2_1], cgi2._indices)
1478
self.assertEqual(['two', 'one'], cgi2._index_names)
1480
def test_validate_reloads(self):
1481
idx, reload_counter = self.make_combined_index_with_missing()
1483
self.assertEqual([1, 1, 0], reload_counter)
1485
def test_validate_reloads_midway(self):
1486
idx, reload_counter = self.make_combined_index_with_missing(['2'])
1489
def test_validate_no_reload(self):
1490
idx, reload_counter = self.make_combined_index_with_missing()
1491
idx._reload_func = None
1492
self.assertRaises(errors.NoSuchFile, idx.validate)
1494
def test_validate_reloads_and_fails(self):
1495
idx, reload_counter = self.make_combined_index_with_missing(
1497
self.assertRaises(errors.NoSuchFile, idx.validate)
1499
def test_find_ancestors_across_indexes(self):
1504
index1 = self.make_index('12', ref_lists=1, nodes=[
1505
(key1, b'value', ([],)),
1506
(key2, b'value', ([key1],)),
1508
index2 = self.make_index('34', ref_lists=1, nodes=[
1509
(key3, b'value', ([key2],)),
1510
(key4, b'value', ([key3],)),
1512
c_index = _mod_index.CombinedGraphIndex([index1, index2])
1513
parent_map, missing_keys = c_index.find_ancestry([key1], 0)
1514
self.assertEqual({key1: ()}, parent_map)
1515
self.assertEqual(set(), missing_keys)
1516
# Now look for a key from index2 which requires us to find the key in
1517
# the second index, and then continue searching for parents in the
1519
parent_map, missing_keys = c_index.find_ancestry([key3], 0)
1520
self.assertEqual({key1: (), key2: (key1,), key3: (key2,)}, parent_map)
1521
self.assertEqual(set(), missing_keys)
1523
def test_find_ancestors_missing_keys(self):
1528
index1 = self.make_index('12', ref_lists=1, nodes=[
1529
(key1, b'value', ([],)),
1530
(key2, b'value', ([key1],)),
1532
index2 = self.make_index('34', ref_lists=1, nodes=[
1533
(key3, b'value', ([key2],)),
1535
c_index = _mod_index.CombinedGraphIndex([index1, index2])
1536
# Searching for a key which is actually not present at all should
1537
# eventually converge
1538
parent_map, missing_keys = c_index.find_ancestry([key4], 0)
1539
self.assertEqual({}, parent_map)
1540
self.assertEqual({key4}, missing_keys)
1542
def test_find_ancestors_no_indexes(self):
1543
c_index = _mod_index.CombinedGraphIndex([])
1545
parent_map, missing_keys = c_index.find_ancestry([key1], 0)
1546
self.assertEqual({}, parent_map)
1547
self.assertEqual({key1}, missing_keys)
1549
def test_find_ancestors_ghost_parent(self):
1554
index1 = self.make_index('12', ref_lists=1, nodes=[
1555
(key1, b'value', ([],)),
1556
(key2, b'value', ([key1],)),
1558
index2 = self.make_index('34', ref_lists=1, nodes=[
1559
(key4, b'value', ([key2, key3],)),
1561
c_index = _mod_index.CombinedGraphIndex([index1, index2])
1562
# Searching for a key which is actually not present at all should
1563
# eventually converge
1564
parent_map, missing_keys = c_index.find_ancestry([key4], 0)
1565
self.assertEqual({key4: (key2, key3), key2: (key1,), key1: ()},
1567
self.assertEqual({key3}, missing_keys)
1569
def test__find_ancestors_empty_index(self):
1570
idx = self.make_index('test', ref_lists=1, key_elements=1, nodes=[])
1572
missing_keys = set()
1573
search_keys = idx._find_ancestors([(b'one',), (b'two',)], 0, parent_map,
1575
self.assertEqual(set(), search_keys)
1576
self.assertEqual({}, parent_map)
1577
self.assertEqual({(b'one',), (b'two',)}, missing_keys)
1580
class TestInMemoryGraphIndex(tests.TestCaseWithMemoryTransport):
1582
def make_index(self, ref_lists=0, key_elements=1, nodes=[]):
1583
result = _mod_index.InMemoryGraphIndex(
1584
ref_lists, key_elements=key_elements)
1585
result.add_nodes(nodes)
1588
def test_add_nodes_no_refs(self):
1589
index = self.make_index(0)
1590
index.add_nodes([((b'name', ), b'data')])
1591
index.add_nodes([((b'name2', ), b''), ((b'name3', ), b'')])
1593
(index, (b'name', ), b'data'),
1594
(index, (b'name2', ), b''),
1595
(index, (b'name3', ), b''),
1596
}, set(index.iter_all_entries()))
1598
def test_add_nodes(self):
1599
index = self.make_index(1)
1600
index.add_nodes([((b'name', ), b'data', ([],))])
1601
index.add_nodes([((b'name2', ), b'', ([],)),
1602
((b'name3', ), b'', ([(b'r', )],))])
1604
(index, (b'name', ), b'data', ((),)),
1605
(index, (b'name2', ), b'', ((),)),
1606
(index, (b'name3', ), b'', (((b'r', ), ), )),
1607
}, set(index.iter_all_entries()))
1609
def test_iter_all_entries_empty(self):
1610
index = self.make_index()
1611
self.assertEqual([], list(index.iter_all_entries()))
1613
def test_iter_all_entries_simple(self):
1614
index = self.make_index(nodes=[((b'name', ), b'data')])
1615
self.assertEqual([(index, (b'name', ), b'data')],
1616
list(index.iter_all_entries()))
1618
def test_iter_all_entries_references(self):
1619
index = self.make_index(1, nodes=[
1620
((b'name', ), b'data', ([(b'ref', )], )),
1621
((b'ref', ), b'refdata', ([], ))])
1622
self.assertEqual({(index, (b'name', ), b'data', (((b'ref', ),),)),
1623
(index, (b'ref', ), b'refdata', ((), ))},
1624
set(index.iter_all_entries()))
1626
def test_iteration_absent_skipped(self):
1627
index = self.make_index(1, nodes=[
1628
((b'name', ), b'data', ([(b'ref', )], ))])
1629
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
1630
set(index.iter_all_entries()))
1631
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
1632
set(index.iter_entries([(b'name', )])))
1633
self.assertEqual([], list(index.iter_entries([(b'ref', )])))
1635
def test_iter_all_keys(self):
1636
index = self.make_index(1, nodes=[
1637
((b'name', ), b'data', ([(b'ref', )], )),
1638
((b'ref', ), b'refdata', ([], ))])
1639
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
1640
(index, (b'ref', ), b'refdata', ((), ))},
1641
set(index.iter_entries([(b'name', ), (b'ref', )])))
1643
def test_iter_key_prefix_1_key_element_no_refs(self):
1644
index = self.make_index(nodes=[
1645
((b'name', ), b'data'),
1646
((b'ref', ), b'refdata')])
1647
self.assertEqual({(index, (b'name', ), b'data'),
1648
(index, (b'ref', ), b'refdata')},
1649
set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
1651
def test_iter_key_prefix_1_key_element_refs(self):
1652
index = self.make_index(1, nodes=[
1653
((b'name', ), b'data', ([(b'ref', )], )),
1654
((b'ref', ), b'refdata', ([], ))])
1655
self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
1656
(index, (b'ref', ), b'refdata', ((), ))},
1657
set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
1659
def test_iter_key_prefix_2_key_element_no_refs(self):
1660
index = self.make_index(key_elements=2, nodes=[
1661
((b'name', b'fin1'), b'data'),
1662
((b'name', b'fin2'), b'beta'),
1663
((b'ref', b'erence'), b'refdata')])
1664
self.assertEqual({(index, (b'name', b'fin1'), b'data'),
1665
(index, (b'ref', b'erence'), b'refdata')},
1666
set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
1667
self.assertEqual({(index, (b'name', b'fin1'), b'data'),
1668
(index, (b'name', b'fin2'), b'beta')},
1669
set(index.iter_entries_prefix([(b'name', None)])))
1671
def test_iter_key_prefix_2_key_element_refs(self):
1672
index = self.make_index(1, key_elements=2, nodes=[
1673
((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], )),
1674
((b'name', b'fin2'), b'beta', ([], )),
1675
((b'ref', b'erence'), b'refdata', ([], ))])
1676
self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
1677
(index, (b'ref', b'erence'), b'refdata', ((), ))},
1678
set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
1679
self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
1680
(index, (b'name', b'fin2'), b'beta', ((), ))},
1681
set(index.iter_entries_prefix([(b'name', None)])))
1683
def test_iter_nothing_empty(self):
1684
index = self.make_index()
1685
self.assertEqual([], list(index.iter_entries([])))
1687
def test_iter_missing_entry_empty(self):
1688
index = self.make_index()
1689
self.assertEqual([], list(index.iter_entries([b'a'])))
1691
def test_key_count_empty(self):
1692
index = self.make_index()
1693
self.assertEqual(0, index.key_count())
1695
def test_key_count_one(self):
1696
index = self.make_index(nodes=[((b'name', ), b'')])
1697
self.assertEqual(1, index.key_count())
1699
def test_key_count_two(self):
1700
index = self.make_index(nodes=[((b'name', ), b''), ((b'foo', ), b'')])
1701
self.assertEqual(2, index.key_count())
1703
def test_validate_empty(self):
1704
index = self.make_index()
1707
def test_validate_no_refs_content(self):
1708
index = self.make_index(nodes=[((b'key', ), b'value')])
1712
class TestGraphIndexPrefixAdapter(tests.TestCaseWithMemoryTransport):
1714
def make_index(self, ref_lists=1, key_elements=2, nodes=[],
1715
add_callback=False):
1716
result = _mod_index.InMemoryGraphIndex(
1717
ref_lists, key_elements=key_elements)
1718
result.add_nodes(nodes)
1720
add_nodes_callback = result.add_nodes
1722
add_nodes_callback = None
1723
adapter = _mod_index.GraphIndexPrefixAdapter(
1724
result, (b'prefix', ), key_elements - 1,
1725
add_nodes_callback=add_nodes_callback)
1726
return result, adapter
1728
def test_add_node(self):
1729
index, adapter = self.make_index(add_callback=True)
1730
adapter.add_node((b'key',), b'value', (((b'ref',),),))
1731
self.assertEqual({(index, (b'prefix', b'key'), b'value',
1732
(((b'prefix', b'ref'),),))},
1733
set(index.iter_all_entries()))
1735
def test_add_nodes(self):
1736
index, adapter = self.make_index(add_callback=True)
1738
((b'key',), b'value', (((b'ref',),),)),
1739
((b'key2',), b'value2', ((),)),
1742
(index, (b'prefix', b'key2'), b'value2', ((),)),
1743
(index, (b'prefix', b'key'), b'value', (((b'prefix', b'ref'),),))
1745
set(index.iter_all_entries()))
1747
def test_construct(self):
1748
idx = _mod_index.InMemoryGraphIndex()
1749
adapter = _mod_index.GraphIndexPrefixAdapter(idx, (b'prefix', ), 1)
1751
def test_construct_with_callback(self):
1752
idx = _mod_index.InMemoryGraphIndex()
1753
adapter = _mod_index.GraphIndexPrefixAdapter(idx, (b'prefix', ), 1,
1756
def test_iter_all_entries_cross_prefix_map_errors(self):
1757
index, adapter = self.make_index(nodes=[
1758
((b'prefix', b'key1'), b'data1', (((b'prefixaltered', b'key2'),),))])
1759
self.assertRaises(_mod_index.BadIndexData, list,
1760
adapter.iter_all_entries())
1762
def test_iter_all_entries(self):
1763
index, adapter = self.make_index(nodes=[
1764
((b'notprefix', b'key1'), b'data', ((), )),
1765
((b'prefix', b'key1'), b'data1', ((), )),
1766
((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
1767
self.assertEqual({(index, (b'key1', ), b'data1', ((),)),
1768
(index, (b'key2', ), b'data2', (((b'key1',),),))},
1769
set(adapter.iter_all_entries()))
1771
def test_iter_entries(self):
1772
index, adapter = self.make_index(nodes=[
1773
((b'notprefix', b'key1'), b'data', ((), )),
1774
((b'prefix', b'key1'), b'data1', ((), )),
1775
((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
1776
# ask for many - get all
1777
self.assertEqual({(index, (b'key1', ), b'data1', ((),)),
1778
(index, (b'key2', ), b'data2', (((b'key1', ),),))},
1779
set(adapter.iter_entries([(b'key1', ), (b'key2', )])))
1780
# ask for one, get one
1781
self.assertEqual({(index, (b'key1', ), b'data1', ((),))},
1782
set(adapter.iter_entries([(b'key1', )])))
1783
# ask for missing, get none
1784
self.assertEqual(set(),
1785
set(adapter.iter_entries([(b'key3', )])))
1787
def test_iter_entries_prefix(self):
1788
index, adapter = self.make_index(key_elements=3, nodes=[
1789
((b'notprefix', b'foo', b'key1'), b'data', ((), )),
1790
((b'prefix', b'prefix2', b'key1'), b'data1', ((), )),
1791
((b'prefix', b'prefix2', b'key2'), b'data2', (((b'prefix', b'prefix2', b'key1'),),))])
1792
# ask for a prefix, get the results for just that prefix, adjusted.
1793
self.assertEqual({(index, (b'prefix2', b'key1', ), b'data1', ((),)),
1794
(index, (b'prefix2', b'key2', ), b'data2', (((b'prefix2', b'key1', ),),))},
1795
set(adapter.iter_entries_prefix([(b'prefix2', None)])))
1797
def test_key_count_no_matching_keys(self):
1798
index, adapter = self.make_index(nodes=[
1799
((b'notprefix', b'key1'), b'data', ((), ))])
1800
self.assertEqual(0, adapter.key_count())
1802
def test_key_count_some_keys(self):
1803
index, adapter = self.make_index(nodes=[
1804
((b'notprefix', b'key1'), b'data', ((), )),
1805
((b'prefix', b'key1'), b'data1', ((), )),
1806
((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
1807
self.assertEqual(2, adapter.key_count())
1809
def test_validate(self):
1810
index, adapter = self.make_index()
1814
calls.append('called')
1815
index.validate = validate
1817
self.assertEqual(['called'], calls)