21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
24
from gzip import GzipFile
29
29
graph as _mod_graph,
36
from bzrlib.errors import (
38
RevisionAlreadyPresent,
41
from bzrlib.knit import (
39
from ..errors import (
41
RevisionAlreadyPresent,
43
from ..bzr.knit import (
48
from bzrlib.tests import (
48
from ..sixish import (
50
54
TestCaseWithMemoryTransport,
54
split_suite_by_condition,
57
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
from bzrlib.trace import mutter
59
from bzrlib.transport import get_transport
60
from bzrlib.transport.memory import MemoryTransport
61
from bzrlib.tsort import topo_sort
62
from bzrlib.tuned_gzip import GzipFile
63
import bzrlib.versionedfile as versionedfile
64
from bzrlib.versionedfile import (
58
from .http_utils import TestCaseWithWebserver
59
from ..transport.memory import MemoryTransport
60
from ..bzr import versionedfile as versionedfile
61
from ..bzr.versionedfile import (
66
63
HashEscapedPrefixMapper,
68
65
VirtualVersionedFiles,
69
66
make_versioned_files_factory,
71
from bzrlib.weave import WeaveFile
72
from bzrlib.weavefile import read_weave, write_weave
75
def load_tests(standard_tests, module, loader):
76
"""Parameterize VersionedFiles tests for different implementations."""
77
to_adapt, result = split_suite_by_condition(
78
standard_tests, condition_isinstance(TestVersionedFiles))
79
# We want to be sure of behaviour for:
80
# weaves prefix layout (weave texts)
81
# individually named weaves (weave inventories)
82
# annotated knits - prefix|hash|hash-escape layout, we test the third only
83
# as it is the most complex mapper.
84
# individually named knits
85
# individual no-graph knits in packs (signatures)
86
# individual graph knits in packs (inventories)
87
# individual graph nocompression knits in packs (revisions)
88
# plain text knits in packs (texts)
92
'factory':make_versioned_files_factory(WeaveFile,
93
ConstantMapper('inventory')),
96
'support_partial_insertion': False,
100
'factory':make_file_factory(False, ConstantMapper('revisions')),
103
'support_partial_insertion': False,
105
('named-nograph-nodelta-knit-pack', {
106
'cleanup':cleanup_pack_knit,
107
'factory':make_pack_factory(False, False, 1),
110
'support_partial_insertion': False,
112
('named-graph-knit-pack', {
113
'cleanup':cleanup_pack_knit,
114
'factory':make_pack_factory(True, True, 1),
117
'support_partial_insertion': True,
119
('named-graph-nodelta-knit-pack', {
120
'cleanup':cleanup_pack_knit,
121
'factory':make_pack_factory(True, False, 1),
124
'support_partial_insertion': False,
126
('groupcompress-nograph', {
127
'cleanup':groupcompress.cleanup_pack_group,
128
'factory':groupcompress.make_pack_factory(False, False, 1),
131
'support_partial_insertion':False,
134
len_two_scenarios = [
137
'factory':make_versioned_files_factory(WeaveFile,
141
'support_partial_insertion': False,
143
('annotated-knit-escape', {
145
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
148
'support_partial_insertion': False,
150
('plain-knit-pack', {
151
'cleanup':cleanup_pack_knit,
152
'factory':make_pack_factory(True, True, 2),
155
'support_partial_insertion': True,
158
'cleanup':groupcompress.cleanup_pack_group,
159
'factory':groupcompress.make_pack_factory(True, False, 1),
162
'support_partial_insertion':False,
165
scenarios = len_one_scenarios + len_two_scenarios
166
return multiply_tests(to_adapt, scenarios, result)
68
from ..bzr.weave import (
72
from ..bzr.weavefile import write_weave
73
from .scenarios import load_tests_apply_scenarios
76
load_tests = load_tests_apply_scenarios
169
79
def get_diamond_vf(f, trailing_eol=True, left_only=False):
298
212
def test_adds_with_parent_texts(self):
299
213
f = self.get_file()
300
214
parent_texts = {}
301
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
215
_, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
303
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
304
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
217
_, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
218
[b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
305
219
except NotImplementedError:
306
220
# if the format doesn't support ghosts, just add normally.
307
_, _, parent_texts['r1'] = f.add_lines('r1',
308
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
309
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
310
self.assertNotEqual(None, parent_texts['r0'])
311
self.assertNotEqual(None, parent_texts['r1'])
221
_, _, parent_texts[b'r1'] = f.add_lines(b'r1',
222
[b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
223
f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'],
224
parent_texts=parent_texts)
225
self.assertNotEqual(None, parent_texts[b'r0'])
226
self.assertNotEqual(None, parent_texts[b'r1'])
312
228
def verify_file(f):
313
229
versions = f.versions()
314
self.assertTrue('r0' in versions)
315
self.assertTrue('r1' in versions)
316
self.assertTrue('r2' in versions)
317
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
230
self.assertTrue(b'r0' in versions)
231
self.assertTrue(b'r1' in versions)
232
self.assertTrue(b'r2' in versions)
233
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
234
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
235
self.assertEqual(f.get_lines(b'r2'), [b'c\n', b'd\n'])
320
236
self.assertEqual(3, f.num_versions())
321
origins = f.annotate('r1')
322
self.assertEquals(origins[0][0], 'r0')
323
self.assertEquals(origins[1][0], 'r1')
324
origins = f.annotate('r2')
325
self.assertEquals(origins[0][0], 'r1')
326
self.assertEquals(origins[1][0], 'r2')
237
origins = f.annotate(b'r1')
238
self.assertEqual(origins[0][0], b'r0')
239
self.assertEqual(origins[1][0], b'r1')
240
origins = f.annotate(b'r2')
241
self.assertEqual(origins[0][0], b'r1')
242
self.assertEqual(origins[1][0], b'r2')
329
245
f = self.reopen_file()
348
264
vf = self.get_file()
349
265
if isinstance(vf, WeaveFile):
350
266
raise TestSkipped("WeaveFile ignores left_matching_blocks")
351
vf.add_lines('1', [], ['a\n'])
352
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
267
vf.add_lines(b'1', [], [b'a\n'])
268
vf.add_lines(b'2', [b'1'], [b'a\n', b'a\n', b'a\n'],
353
269
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
354
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
355
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
270
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'2'))
271
vf.add_lines(b'3', [b'1'], [b'a\n', b'a\n', b'a\n'],
356
272
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
357
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
273
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'3'))
359
275
def test_inline_newline_throws(self):
360
276
# \r characters are not permitted in lines being added
361
277
vf = self.get_file()
362
278
self.assertRaises(errors.BzrBadParameterContainsNewline,
363
vf.add_lines, 'a', [], ['a\n\n'])
279
vf.add_lines, b'a', [], [b'a\n\n'])
364
280
self.assertRaises(
365
281
(errors.BzrBadParameterContainsNewline, NotImplementedError),
366
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
282
vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
367
283
# but inline CR's are allowed
368
vf.add_lines('a', [], ['a\r\n'])
284
vf.add_lines(b'a', [], [b'a\r\n'])
370
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
286
vf.add_lines_with_ghosts(b'b', [], [b'a\r\n'])
371
287
except NotImplementedError:
374
290
def test_add_reserved(self):
375
291
vf = self.get_file()
376
292
self.assertRaises(errors.ReservedId,
377
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
293
vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
379
295
def test_add_lines_nostoresha(self):
380
296
"""When nostore_sha is supplied using old content raises."""
381
297
vf = self.get_file()
382
empty_text = ('a', [])
383
sample_text_nl = ('b', ["foo\n", "bar\n"])
384
sample_text_no_nl = ('c', ["foo\n", "bar"])
298
empty_text = (b'a', [])
299
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
300
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
386
302
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
387
303
sha, _, _ = vf.add_lines(version, [], lines)
389
305
# we now have a copy of all the lines in the vf.
390
306
for sha, (version, lines) in zip(
391
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
307
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
392
308
self.assertRaises(errors.ExistingContent,
393
vf.add_lines, version + "2", [], lines,
309
vf.add_lines, version + b"2", [], lines,
395
311
# and no new version should have been added.
396
312
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
399
315
def test_add_lines_with_ghosts_nostoresha(self):
400
316
"""When nostore_sha is supplied using old content raises."""
401
317
vf = self.get_file()
402
empty_text = ('a', [])
403
sample_text_nl = ('b', ["foo\n", "bar\n"])
404
sample_text_no_nl = ('c', ["foo\n", "bar"])
318
empty_text = (b'a', [])
319
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
320
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
406
322
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
407
323
sha, _, _ = vf.add_lines(version, [], lines)
538
454
def test_make_mpdiffs_with_ghosts(self):
539
455
vf = self.get_file('foo')
541
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
457
vf.add_lines_with_ghosts(b'text', [b'ghost'], [b'line\n'])
542
458
except NotImplementedError:
543
459
# old Weave formats do not allow ghosts
545
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
461
self.assertRaises(errors.RevisionNotPresent,
462
vf.make_mpdiffs, [b'ghost'])
547
464
def _setup_for_deltas(self, f):
548
465
self.assertFalse(f.has_version('base'))
549
466
# add texts that should trip the knit maximum delta chain threshold
550
467
# as well as doing parallel chains of data in knits.
551
468
# this is done by two chains of 25 insertions
552
f.add_lines('base', [], ['line\n'])
553
f.add_lines('noeol', ['base'], ['line'])
469
f.add_lines(b'base', [], [b'line\n'])
470
f.add_lines(b'noeol', [b'base'], [b'line'])
554
471
# detailed eol tests:
555
472
# shared last line with parent no-eol
556
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
473
f.add_lines(b'noeolsecond', [b'noeol'], [b'line\n', b'line'])
557
474
# differing last line with parent, both no-eol
558
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
475
f.add_lines(b'noeolnotshared', [b'noeolsecond'], [b'line\n', b'phone'])
559
476
# add eol following a noneol parent, change content
560
f.add_lines('eol', ['noeol'], ['phone\n'])
477
f.add_lines(b'eol', [b'noeol'], [b'phone\n'])
561
478
# add eol following a noneol parent, no change content
562
f.add_lines('eolline', ['noeol'], ['line\n'])
479
f.add_lines(b'eolline', [b'noeol'], [b'line\n'])
563
480
# noeol with no parents:
564
f.add_lines('noeolbase', [], ['line'])
481
f.add_lines(b'noeolbase', [], [b'line'])
565
482
# noeol preceeding its leftmost parent in the output:
566
483
# this is done by making it a merge of two parents with no common
567
484
# anestry: noeolbase and noeol with the
568
485
# later-inserted parent the leftmost.
569
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
486
f.add_lines(b'eolbeforefirstparent', [
487
b'noeolbase', b'noeol'], [b'line'])
570
488
# two identical eol texts
571
f.add_lines('noeoldup', ['noeol'], ['line'])
573
text_name = 'chain1-'
575
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
576
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
577
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
578
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
579
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
580
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
581
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
582
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
583
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
584
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
585
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
586
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
587
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
588
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
589
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
590
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
591
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
592
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
593
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
594
19:'1ebed371807ba5935958ad0884595126e8c4e823',
595
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
596
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
597
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
598
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
599
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
600
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
489
f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
490
next_parent = b'base'
491
text_name = b'chain1-'
493
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
494
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
495
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
496
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
497
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
498
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
499
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
500
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
501
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
502
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
503
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
504
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
505
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
506
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
507
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
508
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
509
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
510
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
511
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
512
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
513
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
514
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
515
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
516
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
517
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
518
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
602
520
for depth in range(26):
603
new_version = text_name + '%s' % depth
604
text = text + ['line\n']
521
new_version = text_name + b'%d' % depth
522
text = text + [b'line\n']
605
523
f.add_lines(new_version, [next_parent], text)
606
524
next_parent = new_version
608
text_name = 'chain2-'
525
next_parent = b'base'
526
text_name = b'chain2-'
610
528
for depth in range(26):
611
new_version = text_name + '%s' % depth
612
text = text + ['line\n']
529
new_version = text_name + b'%d' % depth
530
text = text + [b'line\n']
613
531
f.add_lines(new_version, [next_parent], text)
614
532
next_parent = new_version
617
535
def test_ancestry(self):
618
536
f = self.get_file()
619
537
self.assertEqual([], f.get_ancestry([]))
620
f.add_lines('r0', [], ['a\n', 'b\n'])
621
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
622
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
623
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
624
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
538
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
539
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
540
f.add_lines(b'r2', [b'r0'], [b'b\n', b'c\n'])
541
f.add_lines(b'r3', [b'r2'], [b'b\n', b'c\n'])
542
f.add_lines(b'rM', [b'r1', b'r2'], [b'b\n', b'c\n'])
625
543
self.assertEqual([], f.get_ancestry([]))
626
versions = f.get_ancestry(['rM'])
544
versions = f.get_ancestry([b'rM'])
627
545
# there are some possibilities:
631
549
# so we check indexes
632
r0 = versions.index('r0')
633
r1 = versions.index('r1')
634
r2 = versions.index('r2')
635
self.assertFalse('r3' in versions)
636
rM = versions.index('rM')
550
r0 = versions.index(b'r0')
551
r1 = versions.index(b'r1')
552
r2 = versions.index(b'r2')
553
self.assertFalse(b'r3' in versions)
554
rM = versions.index(b'rM')
637
555
self.assertTrue(r0 < r1)
638
556
self.assertTrue(r0 < r2)
639
557
self.assertTrue(r1 < rM)
640
558
self.assertTrue(r2 < rM)
642
560
self.assertRaises(RevisionNotPresent,
643
f.get_ancestry, ['rM', 'rX'])
561
f.get_ancestry, [b'rM', b'rX'])
645
self.assertEqual(set(f.get_ancestry('rM')),
646
set(f.get_ancestry('rM', topo_sorted=False)))
563
self.assertEqual(set(f.get_ancestry(b'rM')),
564
set(f.get_ancestry(b'rM', topo_sorted=False)))
648
566
def test_mutate_after_finish(self):
649
567
self._transaction = 'before'
650
568
f = self.get_file()
651
569
self._transaction = 'after'
652
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
653
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
570
self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
571
self.assertRaises(errors.OutSideTransaction,
572
f.add_lines_with_ghosts, b'', [], [])
655
574
def test_copy_to(self):
656
575
f = self.get_file()
657
f.add_lines('0', [], ['a\n'])
576
f.add_lines(b'0', [], [b'a\n'])
658
577
t = MemoryTransport()
659
578
f.copy_to('foo', t)
660
579
for suffix in self.get_factory().get_suffixes():
668
587
def test_get_parent_map(self):
669
588
f = self.get_file()
670
f.add_lines('r0', [], ['a\n', 'b\n'])
672
{'r0':()}, f.get_parent_map(['r0']))
673
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
675
{'r1':('r0',)}, f.get_parent_map(['r1']))
679
f.get_parent_map(['r0', 'r1']))
680
f.add_lines('r2', [], ['a\n', 'b\n'])
681
f.add_lines('r3', [], ['a\n', 'b\n'])
682
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
684
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
685
self.assertEqual({}, f.get_parent_map('y'))
689
f.get_parent_map(['r0', 'y', 'r1']))
589
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
591
{b'r0': ()}, f.get_parent_map([b'r0']))
592
f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
594
{b'r1': (b'r0',)}, f.get_parent_map([b'r1']))
598
f.get_parent_map([b'r0', b'r1']))
599
f.add_lines(b'r2', [], [b'a\n', b'b\n'])
600
f.add_lines(b'r3', [], [b'a\n', b'b\n'])
601
f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
603
{b'm': (b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
604
self.assertEqual({}, f.get_parent_map(b'y'))
608
f.get_parent_map([b'r0', b'y', b'r1']))
691
610
def test_annotate(self):
692
611
f = self.get_file()
693
f.add_lines('r0', [], ['a\n', 'b\n'])
694
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
695
origins = f.annotate('r1')
696
self.assertEquals(origins[0][0], 'r1')
697
self.assertEquals(origins[1][0], 'r0')
612
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
613
f.add_lines(b'r1', [b'r0'], [b'c\n', b'b\n'])
614
origins = f.annotate(b'r1')
615
self.assertEqual(origins[0][0], b'r1')
616
self.assertEqual(origins[1][0], b'r0')
699
618
self.assertRaises(RevisionNotPresent,
702
621
def test_detection(self):
703
622
# Test weaves detect corruption.
746
665
vf = self.get_file()
747
666
# add a base to get included
748
vf.add_lines('base', [], ['base\n'])
667
vf.add_lines(b'base', [], [b'base\n'])
749
668
# add a ancestor to be included on one side
750
vf.add_lines('lancestor', [], ['lancestor\n'])
669
vf.add_lines(b'lancestor', [], [b'lancestor\n'])
751
670
# add a ancestor to be included on the other side
752
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
671
vf.add_lines(b'rancestor', [b'base'], [b'rancestor\n'])
753
672
# add a child of rancestor with no eofile-nl
754
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
673
vf.add_lines(b'child', [b'rancestor'], [b'base\n', b'child\n'])
755
674
# add a child of lancestor and base to join the two roots
756
vf.add_lines('otherchild',
757
['lancestor', 'base'],
758
['base\n', 'lancestor\n', 'otherchild\n'])
675
vf.add_lines(b'otherchild',
676
[b'lancestor', b'base'],
677
[b'base\n', b'lancestor\n', b'otherchild\n'])
759
679
def iter_with_versions(versions, expected):
760
680
# now we need to see what lines are returned, and how often.
762
682
progress = InstrumentedProgress()
763
683
# iterate over the lines
764
684
for line in vf.iter_lines_added_or_present_in_versions(versions,
766
686
lines.setdefault(line, 0)
768
if []!= progress.updates:
688
if [] != progress.updates:
769
689
self.assertEqual(expected, progress.updates)
771
lines = iter_with_versions(['child', 'otherchild'],
691
lines = iter_with_versions([b'child', b'otherchild'],
772
692
[('Walking content', 0, 2),
773
693
('Walking content', 1, 2),
774
694
('Walking content', 2, 2)])
775
695
# we must see child and otherchild
776
self.assertTrue(lines[('child\n', 'child')] > 0)
777
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
696
self.assertTrue(lines[(b'child\n', b'child')] > 0)
697
self.assertTrue(lines[(b'otherchild\n', b'otherchild')] > 0)
778
698
# we dont care if we got more than that.
802
722
parent_id_unicode = u'b\xbfse'
803
723
parent_id_utf8 = parent_id_unicode.encode('utf8')
805
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
725
vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
806
726
except NotImplementedError:
807
727
# check the other ghost apis are also not implemented
808
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
809
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
728
self.assertRaises(NotImplementedError,
729
vf.get_ancestry_with_ghosts, [b'foo'])
730
self.assertRaises(NotImplementedError,
731
vf.get_parents_with_ghosts, b'foo')
811
733
vf = self.reopen_file()
812
734
# test key graph related apis: getncestry, _graph, get_parents
814
736
# - these are ghost unaware and must not be reflect ghosts
815
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
737
self.assertEqual([b'notbxbfse'], vf.get_ancestry(b'notbxbfse'))
816
738
self.assertFalse(vf.has_version(parent_id_utf8))
817
739
# we have _with_ghost apis to give us ghost information.
818
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
819
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
740
self.assertEqual([parent_id_utf8, b'notbxbfse'],
741
vf.get_ancestry_with_ghosts([b'notbxbfse']))
742
self.assertEqual([parent_id_utf8],
743
vf.get_parents_with_ghosts(b'notbxbfse'))
820
744
# if we add something that is a ghost of another, it should correct the
821
745
# results of the prior apis
822
746
vf.add_lines(parent_id_utf8, [], [])
823
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
824
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
825
vf.get_parent_map(['notbxbfse']))
747
self.assertEqual([parent_id_utf8, b'notbxbfse'],
748
vf.get_ancestry([b'notbxbfse']))
749
self.assertEqual({b'notbxbfse': (parent_id_utf8,)},
750
vf.get_parent_map([b'notbxbfse']))
826
751
self.assertTrue(vf.has_version(parent_id_utf8))
827
752
# we have _with_ghost apis to give us ghost information.
828
self.assertEqual([parent_id_utf8, 'notbxbfse'],
829
vf.get_ancestry_with_ghosts(['notbxbfse']))
830
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
753
self.assertEqual([parent_id_utf8, b'notbxbfse'],
754
vf.get_ancestry_with_ghosts([b'notbxbfse']))
755
self.assertEqual([parent_id_utf8],
756
vf.get_parents_with_ghosts(b'notbxbfse'))
832
758
def test_add_lines_with_ghosts_after_normal_revs(self):
833
759
# some versioned file formats allow lines to be added with parent
864
790
# check the sha1 data is available
865
791
vf = self.get_file()
867
vf.add_lines('a', [], ['a\n'])
793
vf.add_lines(b'a', [], [b'a\n'])
868
794
# the same file, different metadata
869
vf.add_lines('b', ['a'], ['a\n'])
795
vf.add_lines(b'b', [b'a'], [b'a\n'])
870
796
# a file differing only in last newline.
871
vf.add_lines('c', [], ['a'])
797
vf.add_lines(b'c', [], [b'a'])
872
798
self.assertEqual({
873
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
874
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
875
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
799
b'a': b'3f786850e387550fdab836ed7e6dc881de23001b',
800
b'c': b'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
801
b'b': b'3f786850e387550fdab836ed7e6dc881de23001b',
877
vf.get_sha1s(['a', 'c', 'b']))
803
vf.get_sha1s([b'a', b'c', b'b']))
880
806
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
882
808
def get_file(self, name='foo'):
883
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
get_scope=self.get_transaction)
809
return WeaveFile(name, self.get_transport(),
811
get_scope=self.get_transaction)
886
813
def get_file_corrupted_text(self):
887
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
get_scope=self.get_transaction)
889
w.add_lines('v1', [], ['hello\n'])
890
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
814
w = WeaveFile('foo', self.get_transport(),
816
get_scope=self.get_transaction)
817
w.add_lines(b'v1', [], [b'hello\n'])
818
w.add_lines(b'v2', [b'v1'], [b'hello\n', b'there\n'])
892
820
# We are going to invasively corrupt the text
893
821
# Make sure the internals of weave are the same
894
self.assertEqual([('{', 0)
822
self.assertEqual([(b'{', 0), b'hello\n', (b'}', None), (b'{', 1), b'there\n', (b'}', None)
902
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
903
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
825
self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f', b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
908
w._weave[4] = 'There\n'
830
w._weave[4] = b'There\n'
911
833
def get_file_corrupted_checksum(self):
912
834
w = self.get_file_corrupted_text()
914
w._weave[4] = 'there\n'
915
self.assertEqual('hello\nthere\n', w.get_text('v2'))
836
w._weave[4] = b'there\n'
837
self.assertEqual(b'hello\nthere\n', w.get_text(b'v2'))
917
#Invalid checksum, first digit changed
918
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
839
# Invalid checksum, first digit changed
840
w._sha1s[1] = b'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
921
843
def reopen_file(self, name='foo', create=False):
922
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
get_scope=self.get_transaction)
844
return WeaveFile(name, self.get_transport(),
846
get_scope=self.get_transaction)
925
848
def test_no_implicit_create(self):
926
849
self.assertRaises(errors.NoSuchFile,
929
get_transport(self.get_url('.')),
852
self.get_transport(),
930
853
get_scope=self.get_transaction)
932
855
def get_factory(self):
945
868
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
947
870
def test_add_lines(self):
948
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
949
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
950
('root', 'a'), [], [])
951
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
952
('root', 'a:'), None, [])
953
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
954
('root', 'a:'), [], None)
871
self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
872
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
873
(b'root', b'a'), [], [])
874
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
875
(b'root', b'a:'), None, [])
876
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
877
(b'root', b'a:'), [], None)
956
879
def setup_abcde(self):
957
self.vf1.add_lines(('root', 'A'), [], ['a'])
958
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
959
self.vf2.add_lines(('root', 'C'), [], ['c'])
960
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
961
self.plan_merge_vf.add_lines(('root', 'E:'),
962
[('root', 'B'), ('root', 'D')], ['e'])
880
self.vf1.add_lines((b'root', b'A'), [], [b'a'])
881
self.vf1.add_lines((b'root', b'B'), [(b'root', b'A')], [b'b'])
882
self.vf2.add_lines((b'root', b'C'), [], [b'c'])
883
self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
884
self.plan_merge_vf.add_lines((b'root', b'E:'),
885
[(b'root', b'B'), (b'root', b'D')], [b'e'])
964
887
def test_get_parents(self):
965
888
self.setup_abcde()
966
self.assertEqual({('root', 'B'):(('root', 'A'),)},
967
self.plan_merge_vf.get_parent_map([('root', 'B')]))
968
self.assertEqual({('root', 'D'):(('root', 'C'),)},
969
self.plan_merge_vf.get_parent_map([('root', 'D')]))
970
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
971
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
889
self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
890
self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
891
self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
892
self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
893
self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
894
self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
972
895
self.assertEqual({},
973
self.plan_merge_vf.get_parent_map([('root', 'F')]))
896
self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
974
897
self.assertEqual({
975
('root', 'B'):(('root', 'A'),),
976
('root', 'D'):(('root', 'C'),),
977
('root', 'E:'):(('root', 'B'),('root', 'D')),
898
(b'root', b'B'): ((b'root', b'A'),),
899
(b'root', b'D'): ((b'root', b'C'),),
900
(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
979
902
self.plan_merge_vf.get_parent_map(
980
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
903
[(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
982
905
def test_get_record_stream(self):
983
906
self.setup_abcde()
984
908
def get_record(suffix):
985
return self.plan_merge_vf.get_record_stream(
986
[('root', suffix)], 'unordered', True).next()
987
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
988
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
989
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
909
return next(self.plan_merge_vf.get_record_stream(
910
[(b'root', suffix)], 'unordered', True))
911
self.assertEqual(b'a', get_record(b'A').get_bytes_as('fulltext'))
912
self.assertEqual(b'c', get_record(b'C').get_bytes_as('fulltext'))
913
self.assertEqual(b'e', get_record(b'E:').get_bytes_as('fulltext'))
990
914
self.assertEqual('absent', get_record('F').storage_kind)
1023
955
class MergeCasesMixin(object):
1025
957
def doMerge(self, base, a, b, mp):
1026
from cStringIO import StringIO
1027
958
from textwrap import dedent
1032
963
w = self.get_file()
1033
w.add_lines('text0', [], map(addcrlf, base))
1034
w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
w.add_lines('text2', ['text0'], map(addcrlf, b))
964
w.add_lines(b'text0', [], list(map(addcrlf, base)))
965
w.add_lines(b'text1', [b'text0'], list(map(addcrlf, a)))
966
w.add_lines(b'text2', [b'text0'], list(map(addcrlf, b)))
1037
968
self.log_contents(w)
1039
970
self.log('merge plan:')
1040
p = list(w.plan_merge('text1', 'text2'))
971
p = list(w.plan_merge(b'text1', b'text2'))
1041
972
for state, line in p:
1043
974
self.log('%12s | %s' % (state, line[:-1]))
1045
976
self.log('merge:')
1047
978
mt.writelines(w.weave_merge(p))
1049
980
self.log(mt.getvalue())
1051
mp = map(addcrlf, mp)
982
mp = list(map(addcrlf, mp))
1052
983
self.assertEqual(mt.readlines(), mp)
1055
985
def testOneInsert(self):
1056
986
self.doMerge([],
1061
991
def testSeparateInserts(self):
1062
self.doMerge(['aaa', 'bbb', 'ccc'],
1063
['aaa', 'xxx', 'bbb', 'ccc'],
1064
['aaa', 'bbb', 'yyy', 'ccc'],
1065
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
992
self.doMerge([b'aaa', b'bbb', b'ccc'],
993
[b'aaa', b'xxx', b'bbb', b'ccc'],
994
[b'aaa', b'bbb', b'yyy', b'ccc'],
995
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
1067
997
def testSameInsert(self):
1068
self.doMerge(['aaa', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1071
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1072
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
998
self.doMerge([b'aaa', b'bbb', b'ccc'],
999
[b'aaa', b'xxx', b'bbb', b'ccc'],
1000
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
1001
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
1002
overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
1073
1004
def testOverlappedInsert(self):
1074
self.doMerge(['aaa', 'bbb'],
1075
['aaa', 'xxx', 'yyy', 'bbb'],
1076
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1005
self.doMerge([b'aaa', b'bbb'],
1006
[b'aaa', b'xxx', b'yyy', b'bbb'],
1007
[b'aaa', b'xxx', b'bbb'], self.overlappedInsertExpected)
1078
1009
# really it ought to reduce this to
1079
# ['aaa', 'xxx', 'yyy', 'bbb']
1010
# [b'aaa', b'xxx', b'yyy', b'bbb']
1082
1012
def testClashReplace(self):
1083
self.doMerge(['aaa'],
1086
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1013
self.doMerge([b'aaa'],
1016
[b'<<<<<<< ', b'xxx', b'=======', b'yyy', b'zzz',
1089
1019
def testNonClashInsert1(self):
1090
self.doMerge(['aaa'],
1093
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1020
self.doMerge([b'aaa'],
1023
[b'<<<<<<< ', b'xxx', b'aaa', b'=======', b'yyy', b'zzz',
1096
1026
def testNonClashInsert2(self):
1097
self.doMerge(['aaa'],
1027
self.doMerge([b'aaa'],
1103
1032
def testDeleteAndModify(self):
1104
1033
"""Clashing delete and modification.
1430
1366
def test_identity_mapper(self):
1431
1367
mapper = versionedfile.ConstantMapper("inventory")
1432
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1433
self.assertEqual("inventory", mapper.map(('quux',)))
1368
self.assertEqual("inventory", mapper.map((b'foo@ar',)))
1369
self.assertEqual("inventory", mapper.map((b'quux',)))
1435
1371
def test_prefix_mapper(self):
1436
1372
#format5: plain
1437
1373
mapper = versionedfile.PrefixMapper()
1438
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1439
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1440
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1441
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1374
self.assertEqual("file-id", mapper.map((b"file-id", b"revision-id")))
1375
self.assertEqual("new-id", mapper.map((b"new-id", b"revision-id")))
1376
self.assertEqual((b'file-id',), mapper.unmap("file-id"))
1377
self.assertEqual((b'new-id',), mapper.unmap("new-id"))
1443
1379
def test_hash_prefix_mapper(self):
1444
1380
#format6: hash + plain
1445
1381
mapper = versionedfile.HashPrefixMapper()
1446
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1447
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1448
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1449
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1383
"9b/file-id", mapper.map((b"file-id", b"revision-id")))
1384
self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1385
self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1386
self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
1451
1388
def test_hash_escaped_mapper(self):
1452
1389
#knit1: hash + escaped
1453
1390
mapper = versionedfile.HashEscapedPrefixMapper()
1454
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1455
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1457
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1459
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1460
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1391
self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1392
self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1394
self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1396
self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1397
self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1463
1400
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1401
"""Tests for the multiple-file variant of VersionedFile."""
1403
# We want to be sure of behaviour for:
1404
# weaves prefix layout (weave texts)
1405
# individually named weaves (weave inventories)
1406
# annotated knits - prefix|hash|hash-escape layout, we test the third only
1407
# as it is the most complex mapper.
1408
# individually named knits
1409
# individual no-graph knits in packs (signatures)
1410
# individual graph knits in packs (inventories)
1411
# individual graph nocompression knits in packs (revisions)
1412
# plain text knits in packs (texts)
1413
len_one_scenarios = [
1416
'factory': make_versioned_files_factory(WeaveFile,
1417
ConstantMapper('inventory')),
1420
'support_partial_insertion': False,
1424
'factory': make_file_factory(False, ConstantMapper('revisions')),
1427
'support_partial_insertion': False,
1429
('named-nograph-nodelta-knit-pack', {
1430
'cleanup': cleanup_pack_knit,
1431
'factory': make_pack_factory(False, False, 1),
1434
'support_partial_insertion': False,
1436
('named-graph-knit-pack', {
1437
'cleanup': cleanup_pack_knit,
1438
'factory': make_pack_factory(True, True, 1),
1441
'support_partial_insertion': True,
1443
('named-graph-nodelta-knit-pack', {
1444
'cleanup': cleanup_pack_knit,
1445
'factory': make_pack_factory(True, False, 1),
1448
'support_partial_insertion': False,
1450
('groupcompress-nograph', {
1451
'cleanup': groupcompress.cleanup_pack_group,
1452
'factory': groupcompress.make_pack_factory(False, False, 1),
1455
'support_partial_insertion': False,
1458
len_two_scenarios = [
1461
'factory': make_versioned_files_factory(WeaveFile,
1465
'support_partial_insertion': False,
1467
('annotated-knit-escape', {
1469
'factory': make_file_factory(True, HashEscapedPrefixMapper()),
1472
'support_partial_insertion': False,
1474
('plain-knit-pack', {
1475
'cleanup': cleanup_pack_knit,
1476
'factory': make_pack_factory(True, True, 2),
1479
'support_partial_insertion': True,
1482
'cleanup': groupcompress.cleanup_pack_group,
1483
'factory': groupcompress.make_pack_factory(True, False, 1),
1486
'support_partial_insertion': False,
1490
scenarios = len_one_scenarios + len_two_scenarios
1466
1492
def get_versionedfiles(self, relpath='files'):
1467
1493
transport = self.get_transport(relpath)
1468
1494
if relpath != '.':
1477
1503
if self.key_length == 1:
1478
1504
return (suffix,)
1480
return ('FileA',) + (suffix,)
1506
return (b'FileA',) + (suffix,)
1508
def test_add_fallback_implies_without_fallbacks(self):
1509
f = self.get_versionedfiles('files')
1510
if getattr(f, 'add_fallback_versioned_files', None) is None:
1511
raise TestNotApplicable("%s doesn't support fallbacks"
1512
% (f.__class__.__name__,))
1513
g = self.get_versionedfiles('fallback')
1514
key_a = self.get_simple_key(b'a')
1515
g.add_lines(key_a, [], [b'\n'])
1516
f.add_fallback_versioned_files(g)
1517
self.assertTrue(key_a in f.get_parent_map([key_a]))
1519
key_a in f.without_fallbacks().get_parent_map([key_a]))
1482
1521
def test_add_lines(self):
1483
1522
f = self.get_versionedfiles()
1484
key0 = self.get_simple_key('r0')
1485
key1 = self.get_simple_key('r1')
1486
key2 = self.get_simple_key('r2')
1487
keyf = self.get_simple_key('foo')
1488
f.add_lines(key0, [], ['a\n', 'b\n'])
1490
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1492
f.add_lines(key1, [], ['b\n', 'c\n'])
1494
self.assertTrue(key0 in keys)
1495
self.assertTrue(key1 in keys)
1497
for record in f.get_record_stream([key0, key1], 'unordered', True):
1498
records.append((record.key, record.get_bytes_as('fulltext')))
1500
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1502
def test__add_text(self):
1503
f = self.get_versionedfiles()
1504
key0 = self.get_simple_key('r0')
1505
key1 = self.get_simple_key('r1')
1506
key2 = self.get_simple_key('r2')
1507
keyf = self.get_simple_key('foo')
1508
f._add_text(key0, [], 'a\nb\n')
1510
f._add_text(key1, [key0], 'b\nc\n')
1512
f._add_text(key1, [], 'b\nc\n')
1514
self.assertTrue(key0 in keys)
1515
self.assertTrue(key1 in keys)
1517
for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
records.append((record.key, record.get_bytes_as('fulltext')))
1520
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1523
key0 = self.get_simple_key(b'r0')
1524
key1 = self.get_simple_key(b'r1')
1525
key2 = self.get_simple_key(b'r2')
1526
keyf = self.get_simple_key(b'foo')
1527
f.add_lines(key0, [], [b'a\n', b'b\n'])
1529
f.add_lines(key1, [key0], [b'b\n', b'c\n'])
1531
f.add_lines(key1, [], [b'b\n', b'c\n'])
1533
self.assertTrue(key0 in keys)
1534
self.assertTrue(key1 in keys)
1536
for record in f.get_record_stream([key0, key1], 'unordered', True):
1537
records.append((record.key, record.get_bytes_as('fulltext')))
1539
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1522
1541
def test_annotate(self):
1523
1542
files = self.get_versionedfiles()
1525
1544
if self.key_length == 1:
1547
prefix = (b'FileA',)
1529
1548
# introduced full text
1530
origins = files.annotate(prefix + ('origin',))
1549
origins = files.annotate(prefix + (b'origin',))
1531
1550
self.assertEqual([
1532
(prefix + ('origin',), 'origin\n')],
1551
(prefix + (b'origin',), b'origin\n')],
1535
origins = files.annotate(prefix + ('base',))
1554
origins = files.annotate(prefix + (b'base',))
1536
1555
self.assertEqual([
1537
(prefix + ('base',), 'base\n')],
1556
(prefix + (b'base',), b'base\n')],
1540
origins = files.annotate(prefix + ('merged',))
1559
origins = files.annotate(prefix + (b'merged',))
1542
1561
self.assertEqual([
1543
(prefix + ('base',), 'base\n'),
1544
(prefix + ('left',), 'left\n'),
1545
(prefix + ('right',), 'right\n'),
1546
(prefix + ('merged',), 'merged\n')
1562
(prefix + (b'base',), b'base\n'),
1563
(prefix + (b'left',), b'left\n'),
1564
(prefix + (b'right',), b'right\n'),
1565
(prefix + (b'merged',), b'merged\n')
1550
1569
# Without a graph everything is new.
1551
1570
self.assertEqual([
1552
(prefix + ('merged',), 'base\n'),
1553
(prefix + ('merged',), 'left\n'),
1554
(prefix + ('merged',), 'right\n'),
1555
(prefix + ('merged',), 'merged\n')
1571
(prefix + (b'merged',), b'base\n'),
1572
(prefix + (b'merged',), b'left\n'),
1573
(prefix + (b'merged',), b'right\n'),
1574
(prefix + (b'merged',), b'merged\n')
1558
1577
self.assertRaises(RevisionNotPresent,
1559
files.annotate, prefix + ('missing-key',))
1578
files.annotate, prefix + ('missing-key',))
1561
1580
def test_check_no_parameters(self):
1562
1581
files = self.get_versionedfiles()
1589
1608
files = self.get_versionedfiles()
1591
1610
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1593
1612
return get_diamond_files(files, self.key_length,
1594
trailing_eol=trailing_eol, nograph=not self.graph,
1595
left_only=left_only, nokeys=nokeys)
1613
trailing_eol=trailing_eol, nograph=not self.graph,
1614
left_only=left_only, nokeys=nokeys)
1597
1616
def _add_content_nostoresha(self, add_lines):
1598
1617
"""When nostore_sha is supplied using old content raises."""
1599
1618
vf = self.get_versionedfiles()
1600
empty_text = ('a', [])
1601
sample_text_nl = ('b', ["foo\n", "bar\n"])
1602
sample_text_no_nl = ('c', ["foo\n", "bar"])
1619
empty_text = (b'a', [])
1620
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
1621
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
1604
1623
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1606
1625
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1609
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1628
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1611
1630
shas.append(sha)
1612
1631
# we now have a copy of all the lines in the vf.
1613
1632
for sha, (version, lines) in zip(
1614
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1615
new_key = self.get_simple_key(version + "2")
1616
self.assertRaises(errors.ExistingContent,
1617
vf.add_lines, new_key, [], lines,
1619
self.assertRaises(errors.ExistingContent,
1620
vf._add_text, new_key, [], ''.join(lines),
1633
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1634
new_key = self.get_simple_key(version + b"2")
1635
self.assertRaises(errors.ExistingContent,
1636
vf.add_lines, new_key, [], lines,
1638
self.assertRaises(errors.ExistingContent,
1639
vf.add_lines, new_key, [], lines,
1622
1641
# and no new version should have been added.
1623
record = vf.get_record_stream([new_key], 'unordered', True).next()
1642
record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1643
self.assertEqual('absent', record.storage_kind)
1626
1645
def test_add_lines_nostoresha(self):
1627
1646
self._add_content_nostoresha(add_lines=True)
1629
def test__add_text_nostoresha(self):
1630
self._add_content_nostoresha(add_lines=False)
1632
1648
def test_add_lines_return(self):
1633
1649
files = self.get_versionedfiles()
1634
1650
# save code by using the stock data insertion helper.
1640
1656
results.append(add[:2])
1641
1657
if self.key_length == 1:
1642
1658
self.assertEqual([
1643
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1644
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1645
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1646
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1647
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1659
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1660
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1661
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1662
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1663
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1649
1665
elif self.key_length == 2:
1650
1666
self.assertEqual([
1651
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1652
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1653
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1654
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1655
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1656
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1657
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1658
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1659
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1660
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1667
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1668
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1669
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1670
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1671
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1672
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1673
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1674
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1675
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1676
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1663
1679
def test_add_lines_no_key_generates_chk_key(self):
1671
1687
results.append(add[:2])
1672
1688
if self.key_length == 1:
1673
1689
self.assertEqual([
1674
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1675
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1676
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1677
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1678
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1690
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1691
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1692
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1693
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1694
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1680
1696
# Check the added items got CHK keys.
1681
self.assertEqual(set([
1682
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1683
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1684
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1685
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1686
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1698
(b'sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1699
(b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1700
(b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1701
(b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1702
(b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1689
1705
elif self.key_length == 2:
1690
1706
self.assertEqual([
1691
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1692
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1693
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1694
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1695
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1696
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1697
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1698
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1699
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1700
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1707
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1708
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1709
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1710
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1711
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1712
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1713
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1714
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1715
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1716
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1702
1718
# Check the added items got CHK keys.
1703
self.assertEqual(set([
1704
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1705
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1706
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1707
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1708
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1709
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1710
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1711
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1712
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1713
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1720
(b'FileA', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1721
(b'FileA', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1722
(b'FileA', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1723
(b'FileA', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1724
(b'FileA', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1725
(b'FileB', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1726
(b'FileB', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1727
(b'FileB', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1728
(b'FileB', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1729
(b'FileB', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1717
1733
def test_empty_lines(self):
1718
1734
"""Empty files can be stored."""
1719
1735
f = self.get_versionedfiles()
1720
key_a = self.get_simple_key('a')
1736
key_a = self.get_simple_key(b'a')
1721
1737
f.add_lines(key_a, [], [])
1722
self.assertEqual('',
1723
f.get_record_stream([key_a], 'unordered', True
1724
).next().get_bytes_as('fulltext'))
1725
key_b = self.get_simple_key('b')
1738
self.assertEqual(b'',
1739
next(f.get_record_stream([key_a], 'unordered', True
1740
)).get_bytes_as('fulltext'))
1741
key_b = self.get_simple_key(b'b')
1726
1742
f.add_lines(key_b, self.get_parents([key_a]), [])
1727
self.assertEqual('',
1728
f.get_record_stream([key_b], 'unordered', True
1729
).next().get_bytes_as('fulltext'))
1743
self.assertEqual(b'',
1744
next(f.get_record_stream([key_b], 'unordered', True
1745
)).get_bytes_as('fulltext'))
1731
1747
def test_newline_only(self):
1732
1748
f = self.get_versionedfiles()
1733
key_a = self.get_simple_key('a')
1734
f.add_lines(key_a, [], ['\n'])
1735
self.assertEqual('\n',
1736
f.get_record_stream([key_a], 'unordered', True
1737
).next().get_bytes_as('fulltext'))
1738
key_b = self.get_simple_key('b')
1739
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1740
self.assertEqual('\n',
1741
f.get_record_stream([key_b], 'unordered', True
1742
).next().get_bytes_as('fulltext'))
1749
key_a = self.get_simple_key(b'a')
1750
f.add_lines(key_a, [], [b'\n'])
1751
self.assertEqual(b'\n',
1752
next(f.get_record_stream([key_a], 'unordered', True
1753
)).get_bytes_as('fulltext'))
1754
key_b = self.get_simple_key(b'b')
1755
f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1756
self.assertEqual(b'\n',
1757
next(f.get_record_stream([key_b], 'unordered', True
1758
)).get_bytes_as('fulltext'))
1744
1760
def test_get_known_graph_ancestry(self):
1745
1761
f = self.get_versionedfiles()
1746
1762
if not self.graph:
1747
1763
raise TestNotApplicable('ancestry info only relevant with graph.')
1748
key_a = self.get_simple_key('a')
1749
key_b = self.get_simple_key('b')
1750
key_c = self.get_simple_key('c')
1764
key_a = self.get_simple_key(b'a')
1765
key_b = self.get_simple_key(b'b')
1766
key_c = self.get_simple_key(b'c')
1756
f.add_lines(key_a, [], ['\n'])
1757
f.add_lines(key_b, [key_a], ['\n'])
1758
f.add_lines(key_c, [key_a, key_b], ['\n'])
1772
f.add_lines(key_a, [], [b'\n'])
1773
f.add_lines(key_b, [key_a], [b'\n'])
1774
f.add_lines(key_c, [key_a, key_b], [b'\n'])
1759
1775
kg = f.get_known_graph_ancestry([key_c])
1760
1776
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1761
1777
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1828
1844
def get_keys_and_sort_order(self):
1829
1845
"""Get diamond test keys list, and their sort ordering."""
1830
1846
if self.key_length == 1:
1831
keys = [('merged',), ('left',), ('right',), ('base',)]
1832
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1847
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1848
sort_order = {(b'merged',): 2, (b'left',): 1,
1849
(b'right',): 1, (b'base',): 0}
1835
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1837
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1852
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1853
(b'FileA', b'base'),
1854
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1855
(b'FileB', b'base'),
1841
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1842
('FileA', 'base'):0,
1843
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1844
('FileB', 'base'):0,
1858
(b'FileA', b'merged'): 2, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1859
(b'FileA', b'base'): 0,
1860
(b'FileB', b'merged'): 2, (b'FileB', b'left'): 1, (b'FileB', b'right'): 1,
1861
(b'FileB', b'base'): 0,
1846
1863
return keys, sort_order
1848
1865
def get_keys_and_groupcompress_sort_order(self):
1849
1866
"""Get diamond test keys list, and their groupcompress sort ordering."""
1850
1867
if self.key_length == 1:
1851
keys = [('merged',), ('left',), ('right',), ('base',)]
1852
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1868
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1869
sort_order = {(b'merged',): 0, (b'left',): 1,
1870
(b'right',): 1, (b'base',): 2}
1855
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1857
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1873
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1874
(b'FileA', b'base'),
1875
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1876
(b'FileB', b'base'),
1861
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1862
('FileA', 'base'):2,
1863
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1864
('FileB', 'base'):5,
1879
(b'FileA', b'merged'): 0, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1880
(b'FileA', b'base'): 2,
1881
(b'FileB', b'merged'): 3, (b'FileB', b'left'): 4, (b'FileB', b'right'): 4,
1882
(b'FileB', b'base'): 5,
1866
1884
return keys, sort_order
1951
1969
self.assertEqual(parent_map[factory.key], factory.parents)
1952
1970
# currently no stream emits mpdiff
1953
1971
self.assertRaises(errors.UnavailableRepresentation,
1954
factory.get_bytes_as, 'mpdiff')
1972
factory.get_bytes_as, 'mpdiff')
1955
1973
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1957
1975
self.assertEqual(set(keys), seen)
1959
1977
def test_get_record_stream_missing_records_are_absent(self):
1960
1978
files = self.get_versionedfiles()
1961
1979
self.get_diamond_files(files)
1962
1980
if self.key_length == 1:
1963
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1981
keys = [(b'merged',), (b'left',), (b'right',),
1982
(b'absent',), (b'base',)]
1966
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1967
('FileA', 'absent'), ('FileA', 'base'),
1968
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1969
('FileB', 'absent'), ('FileB', 'base'),
1970
('absent', 'absent'),
1985
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1986
(b'FileA', b'absent'), (b'FileA', b'base'),
1987
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1988
(b'FileB', b'absent'), (b'FileB', b'base'),
1989
(b'absent', b'absent'),
1972
1991
parent_map = files.get_parent_map(keys)
1973
1992
entries = files.get_record_stream(keys, 'unordered', False)
2229
2251
self.assertRaises(RevisionNotPresent,
2230
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2252
files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
2232
2254
def test_get_parent_map(self):
2233
2255
files = self.get_versionedfiles()
2234
2256
if self.key_length == 1:
2235
2257
parent_details = [
2236
(('r0',), self.get_parents(())),
2237
(('r1',), self.get_parents((('r0',),))),
2238
(('r2',), self.get_parents(())),
2239
(('r3',), self.get_parents(())),
2240
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2258
((b'r0',), self.get_parents(())),
2259
((b'r1',), self.get_parents(((b'r0',),))),
2260
((b'r2',), self.get_parents(())),
2261
((b'r3',), self.get_parents(())),
2262
((b'm',), self.get_parents(((b'r0',), (b'r1',), (b'r2',), (b'r3',)))),
2243
2265
parent_details = [
2244
(('FileA', 'r0'), self.get_parents(())),
2245
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2246
(('FileA', 'r2'), self.get_parents(())),
2247
(('FileA', 'r3'), self.get_parents(())),
2248
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2249
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2266
((b'FileA', b'r0'), self.get_parents(())),
2267
((b'FileA', b'r1'), self.get_parents(((b'FileA', b'r0'),))),
2268
((b'FileA', b'r2'), self.get_parents(())),
2269
((b'FileA', b'r3'), self.get_parents(())),
2270
((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2271
(b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2251
2273
for key, parents in parent_details:
2252
2274
files.add_lines(key, parents, [])
2253
2275
# immediately after adding it should be queryable.
2254
self.assertEqual({key:parents}, files.get_parent_map([key]))
2276
self.assertEqual({key: parents}, files.get_parent_map([key]))
2255
2277
# We can ask for an empty set
2256
2278
self.assertEqual({}, files.get_parent_map([]))
2257
2279
# We can ask for many keys
2258
2280
all_parents = dict(parent_details)
2259
2281
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2260
2282
# Absent keys are just not included in the result.
2261
keys = all_parents.keys()
2283
keys = list(all_parents.keys())
2262
2284
if self.key_length == 1:
2263
keys.insert(1, ('missing',))
2285
keys.insert(1, (b'missing',))
2265
keys.insert(1, ('missing', 'missing'))
2287
keys.insert(1, (b'missing', b'missing'))
2266
2288
# Absent keys are just ignored
2267
2289
self.assertEqual(all_parents, files.get_parent_map(keys))
2416
2440
source = self.get_versionedfiles('source')
2417
2441
self.get_diamond_files(source)
2418
2442
if self.key_length == 1:
2419
origin_keys = [('origin',)]
2420
end_keys = [('merged',), ('left',)]
2421
start_keys = [('right',), ('base',)]
2443
origin_keys = [(b'origin',)]
2444
end_keys = [(b'merged',), (b'left',)]
2445
start_keys = [(b'right',), (b'base',)]
2423
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2424
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2425
('FileB', 'merged',), ('FileB', 'left',)]
2426
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2427
('FileB', 'right',), ('FileB', 'base',)]
2428
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2447
origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2448
end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2449
(b'FileB', b'merged',), (b'FileB', b'left',)]
2450
start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2451
(b'FileB', b'right',), (b'FileB', b'base',)]
2452
origin_entries = source.get_record_stream(
2453
origin_keys, 'unordered', False)
2429
2454
end_entries = source.get_record_stream(end_keys, 'topological', False)
2430
start_entries = source.get_record_stream(start_keys, 'topological', False)
2431
entries = chain(origin_entries, end_entries, start_entries)
2455
start_entries = source.get_record_stream(
2456
start_keys, 'topological', False)
2457
entries = itertools.chain(origin_entries, end_entries, start_entries)
2433
2459
files.insert_record_stream(entries)
2434
2460
except RevisionNotPresent:
2559
2587
files = self.get_versionedfiles()
2560
2588
# add a base to get included
2561
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2589
files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
2562
2590
# add a ancestor to be included on one side
2563
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2591
files.add_lines(self.get_simple_key(
2592
b'lancestor'), (), [b'lancestor\n'])
2564
2593
# add a ancestor to be included on the other side
2565
files.add_lines(self.get_simple_key('rancestor'),
2566
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2594
files.add_lines(self.get_simple_key(b'rancestor'),
2595
self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2567
2596
# add a child of rancestor with no eofile-nl
2568
files.add_lines(self.get_simple_key('child'),
2569
self.get_parents([self.get_simple_key('rancestor')]),
2570
['base\n', 'child\n'])
2597
files.add_lines(self.get_simple_key(b'child'),
2598
self.get_parents([self.get_simple_key(b'rancestor')]),
2599
[b'base\n', b'child\n'])
2571
2600
# add a child of lancestor and base to join the two roots
2572
files.add_lines(self.get_simple_key('otherchild'),
2573
self.get_parents([self.get_simple_key('lancestor'),
2574
self.get_simple_key('base')]),
2575
['base\n', 'lancestor\n', 'otherchild\n'])
2601
files.add_lines(self.get_simple_key(b'otherchild'),
2602
self.get_parents([self.get_simple_key(b'lancestor'),
2603
self.get_simple_key(b'base')]),
2604
[b'base\n', b'lancestor\n', b'otherchild\n'])
2576
2606
def iter_with_keys(keys, expected):
2577
2607
# now we need to see what lines are returned, and how often.
2579
2609
progress = InstrumentedProgress()
2580
2610
# iterate over the lines
2581
2611
for line in files.iter_lines_added_or_present_in_keys(keys,
2583
2613
lines.setdefault(line, 0)
2584
2614
lines[line] += 1
2585
if []!= progress.updates:
2615
if [] != progress.updates:
2586
2616
self.assertEqual(expected, progress.updates)
2588
2618
lines = iter_with_keys(
2589
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2619
[self.get_simple_key(b'child'),
2620
self.get_simple_key(b'otherchild')],
2590
2621
[('Walking content', 0, 2),
2591
2622
('Walking content', 1, 2),
2592
2623
('Walking content', 2, 2)])
2593
2624
# we must see child and otherchild
2594
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2625
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2595
2626
self.assertTrue(
2596
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2627
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2597
2628
# we dont care if we got more than that.
2599
2630
# test all lines
2600
2631
lines = iter_with_keys(files.keys(),
2601
[('Walking content', 0, 5),
2602
('Walking content', 1, 5),
2603
('Walking content', 2, 5),
2604
('Walking content', 3, 5),
2605
('Walking content', 4, 5),
2606
('Walking content', 5, 5)])
2632
[('Walking content', 0, 5),
2633
('Walking content', 1, 5),
2634
('Walking content', 2, 5),
2635
('Walking content', 3, 5),
2636
('Walking content', 4, 5),
2637
('Walking content', 5, 5)])
2607
2638
# all lines must be seen at least once
2608
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2610
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2612
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2613
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2615
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2639
self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2641
lines[(b'lancestor\n', self.get_simple_key(b'lancestor'))] > 0)
2643
lines[(b'rancestor\n', self.get_simple_key(b'rancestor'))] > 0)
2644
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2646
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2617
2648
def test_make_mpdiffs(self):
2618
from bzrlib import multiparent
2649
from breezy import multiparent
2619
2650
files = self.get_versionedfiles('source')
2620
2651
# add texts that should trip the knit maximum delta chain threshold
2621
2652
# as well as doing parallel chains of data in knits.
2622
2653
# this is done by two chains of 25 insertions
2623
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2624
files.add_lines(self.get_simple_key('noeol'),
2625
self.get_parents([self.get_simple_key('base')]), ['line'])
2654
files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2655
files.add_lines(self.get_simple_key(b'noeol'),
2656
self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2626
2657
# detailed eol tests:
2627
2658
# shared last line with parent no-eol
2628
files.add_lines(self.get_simple_key('noeolsecond'),
2629
self.get_parents([self.get_simple_key('noeol')]),
2659
files.add_lines(self.get_simple_key(b'noeolsecond'),
2660
self.get_parents([self.get_simple_key(b'noeol')]),
2661
[b'line\n', b'line'])
2631
2662
# differing last line with parent, both no-eol
2632
files.add_lines(self.get_simple_key('noeolnotshared'),
2633
self.get_parents([self.get_simple_key('noeolsecond')]),
2634
['line\n', 'phone'])
2663
files.add_lines(self.get_simple_key(b'noeolnotshared'),
2665
[self.get_simple_key(b'noeolsecond')]),
2666
[b'line\n', b'phone'])
2635
2667
# add eol following a noneol parent, change content
2636
files.add_lines(self.get_simple_key('eol'),
2637
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2668
files.add_lines(self.get_simple_key(b'eol'),
2669
self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2638
2670
# add eol following a noneol parent, no change content
2639
files.add_lines(self.get_simple_key('eolline'),
2640
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2671
files.add_lines(self.get_simple_key(b'eolline'),
2672
self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2641
2673
# noeol with no parents:
2642
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2674
files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
2643
2675
# noeol preceeding its leftmost parent in the output:
2644
2676
# this is done by making it a merge of two parents with no common
2645
2677
# anestry: noeolbase and noeol with the
2646
2678
# later-inserted parent the leftmost.
2647
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2648
self.get_parents([self.get_simple_key('noeolbase'),
2649
self.get_simple_key('noeol')]),
2679
files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2680
self.get_parents([self.get_simple_key(b'noeolbase'),
2681
self.get_simple_key(b'noeol')]),
2651
2683
# two identical eol texts
2652
files.add_lines(self.get_simple_key('noeoldup'),
2653
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2654
next_parent = self.get_simple_key('base')
2655
text_name = 'chain1-'
2657
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2658
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2659
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2660
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2661
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2662
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2663
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2664
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2665
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2666
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2667
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2668
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2669
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2670
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2671
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2672
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2673
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2674
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2675
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2676
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2677
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2678
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2679
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2680
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2681
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2682
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2684
files.add_lines(self.get_simple_key(b'noeoldup'),
2685
self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2686
next_parent = self.get_simple_key(b'base')
2687
text_name = b'chain1-'
2689
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2690
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2691
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2692
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2693
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
2694
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2695
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2696
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2697
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
2698
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2699
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2700
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2701
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
2702
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2703
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
2704
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2705
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
2706
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2707
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
2708
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
2709
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2710
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2711
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2712
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2713
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2714
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2684
2716
for depth in range(26):
2685
new_version = self.get_simple_key(text_name + '%s' % depth)
2686
text = text + ['line\n']
2717
new_version = self.get_simple_key(text_name + b'%d' % depth)
2718
text = text + [b'line\n']
2687
2719
files.add_lines(new_version, self.get_parents([next_parent]), text)
2688
2720
next_parent = new_version
2689
next_parent = self.get_simple_key('base')
2690
text_name = 'chain2-'
2721
next_parent = self.get_simple_key(b'base')
2722
text_name = b'chain2-'
2692
2724
for depth in range(26):
2693
new_version = self.get_simple_key(text_name + '%s' % depth)
2694
text = text + ['line\n']
2725
new_version = self.get_simple_key(text_name + b'%d' % depth)
2726
text = text + [b'line\n']
2695
2727
files.add_lines(new_version, self.get_parents([next_parent]), text)
2696
2728
next_parent = new_version
2697
2729
target = self.get_versionedfiles('target')
2755
2787
def test_get_sha1s_nonexistent(self):
2756
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2788
self.assertEqual({}, self.texts.get_sha1s([(b"NONEXISTENT",)]))
2758
2790
def test_get_sha1s(self):
2759
self._lines["key"] = ["dataline1", "dataline2"]
2760
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2761
self.texts.get_sha1s([("key",)]))
2791
self._lines[b"key"] = [b"dataline1", b"dataline2"]
2792
self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2793
self.texts.get_sha1s([(b"key",)]))
2763
2795
def test_get_parent_map(self):
2764
self._parent_map = {"G": ("A", "B")}
2765
self.assertEquals({("G",): (("A",),("B",))},
2766
self.texts.get_parent_map([("G",), ("L",)]))
2796
self._parent_map = {b"G": (b"A", b"B")}
2797
self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2798
self.texts.get_parent_map([(b"G",), (b"L",)]))
2768
2800
def test_get_record_stream(self):
2769
self._lines["A"] = ["FOO", "BAR"]
2770
it = self.texts.get_record_stream([("A",)], "unordered", True)
2772
self.assertEquals("chunked", record.storage_kind)
2773
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2801
self._lines[b"A"] = [b"FOO", b"BAR"]
2802
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2804
self.assertEqual("chunked", record.storage_kind)
2805
self.assertEqual(b"FOOBAR", record.get_bytes_as("fulltext"))
2806
self.assertEqual([b"FOO", b"BAR"], record.get_bytes_as("chunked"))
2776
2808
def test_get_record_stream_absent(self):
2777
it = self.texts.get_record_stream([("A",)], "unordered", True)
2779
self.assertEquals("absent", record.storage_kind)
2809
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2811
self.assertEqual("absent", record.storage_kind)
2781
2813
def test_iter_lines_added_or_present_in_keys(self):
2782
self._lines["A"] = ["FOO", "BAR"]
2783
self._lines["B"] = ["HEY"]
2784
self._lines["C"] = ["Alberta"]
2785
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2814
self._lines[b"A"] = [b"FOO", b"BAR"]
2815
self._lines[b"B"] = [b"HEY"]
2816
self._lines[b"C"] = [b"Alberta"]
2817
it = self.texts.iter_lines_added_or_present_in_keys([(b"A",), (b"B",)])
2818
self.assertEqual(sorted([(b"FOO", b"A"), (b"BAR", b"A"), (b"HEY", b"B")]),
2790
2822
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2809
2842
self.assertEqual([], vf.calls)
2811
2844
def test_get_record_stream_topological(self):
2812
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2813
request_keys = [('B',), ('C',), ('D',), ('A',)]
2845
vf = self.get_ordering_vf(
2846
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2847
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2814
2848
keys = [r.key for r in vf.get_record_stream(request_keys,
2815
'topological', False)]
2849
'topological', False)]
2816
2850
# We should have gotten the keys in topological order
2817
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2851
self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
2818
2852
# And recorded that the request was made
2819
2853
self.assertEqual([('get_record_stream', request_keys, 'topological',
2820
2854
False)], vf.calls)
2822
2856
def test_get_record_stream_ordered(self):
2823
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2824
request_keys = [('B',), ('C',), ('D',), ('A',)]
2857
vf = self.get_ordering_vf(
2858
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2859
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2825
2860
keys = [r.key for r in vf.get_record_stream(request_keys,
2826
'unordered', False)]
2861
'unordered', False)]
2827
2862
# They should be returned based on their priority
2828
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2863
self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
2829
2864
# And the request recorded
2830
2865
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2831
2866
False)], vf.calls)
2833
2868
def test_get_record_stream_implicit_order(self):
2834
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2835
request_keys = [('B',), ('C',), ('D',), ('A',)]
2869
vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2870
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2836
2871
keys = [r.key for r in vf.get_record_stream(request_keys,
2837
'unordered', False)]
2872
'unordered', False)]
2838
2873
# A and C are not in the map, so they get sorted to the front. A comes
2839
2874
# before C alphabetically, so it comes back first
2840
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2875
self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)
2841
2876
# And the request recorded
2842
2877
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2843
2878
False)], vf.calls)