21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
24
from gzip import GzipFile
29
29
graph as _mod_graph,
36
from bzrlib.errors import (
38
RevisionAlreadyPresent,
41
from bzrlib.knit import (
39
from ..errors import (
41
RevisionAlreadyPresent,
43
from ..bzr.knit import (
48
from bzrlib.tests import (
48
from ..sixish import (
50
54
TestCaseWithMemoryTransport,
54
split_suite_by_condition,
57
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
from bzrlib.trace import mutter
59
from bzrlib.transport import get_transport
60
from bzrlib.transport.memory import MemoryTransport
61
from bzrlib.tsort import topo_sort
62
from bzrlib.tuned_gzip import GzipFile
63
import bzrlib.versionedfile as versionedfile
64
from bzrlib.versionedfile import (
58
from .http_utils import TestCaseWithWebserver
59
from ..transport.memory import MemoryTransport
60
from ..bzr import versionedfile as versionedfile
61
from ..bzr.versionedfile import (
62
ChunkedContentFactory,
66
64
HashEscapedPrefixMapper,
68
66
VirtualVersionedFiles,
69
67
make_versioned_files_factory,
71
from bzrlib.weave import WeaveFile
72
from bzrlib.weavefile import read_weave, write_weave
75
def load_tests(standard_tests, module, loader):
76
"""Parameterize VersionedFiles tests for different implementations."""
77
to_adapt, result = split_suite_by_condition(
78
standard_tests, condition_isinstance(TestVersionedFiles))
79
# We want to be sure of behaviour for:
80
# weaves prefix layout (weave texts)
81
# individually named weaves (weave inventories)
82
# annotated knits - prefix|hash|hash-escape layout, we test the third only
83
# as it is the most complex mapper.
84
# individually named knits
85
# individual no-graph knits in packs (signatures)
86
# individual graph knits in packs (inventories)
87
# individual graph nocompression knits in packs (revisions)
88
# plain text knits in packs (texts)
92
'factory':make_versioned_files_factory(WeaveFile,
93
ConstantMapper('inventory')),
96
'support_partial_insertion': False,
100
'factory':make_file_factory(False, ConstantMapper('revisions')),
103
'support_partial_insertion': False,
105
('named-nograph-nodelta-knit-pack', {
106
'cleanup':cleanup_pack_knit,
107
'factory':make_pack_factory(False, False, 1),
110
'support_partial_insertion': False,
112
('named-graph-knit-pack', {
113
'cleanup':cleanup_pack_knit,
114
'factory':make_pack_factory(True, True, 1),
117
'support_partial_insertion': True,
119
('named-graph-nodelta-knit-pack', {
120
'cleanup':cleanup_pack_knit,
121
'factory':make_pack_factory(True, False, 1),
124
'support_partial_insertion': False,
126
('groupcompress-nograph', {
127
'cleanup':groupcompress.cleanup_pack_group,
128
'factory':groupcompress.make_pack_factory(False, False, 1),
131
'support_partial_insertion':False,
134
len_two_scenarios = [
137
'factory':make_versioned_files_factory(WeaveFile,
141
'support_partial_insertion': False,
143
('annotated-knit-escape', {
145
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
148
'support_partial_insertion': False,
150
('plain-knit-pack', {
151
'cleanup':cleanup_pack_knit,
152
'factory':make_pack_factory(True, True, 2),
155
'support_partial_insertion': True,
158
'cleanup':groupcompress.cleanup_pack_group,
159
'factory':groupcompress.make_pack_factory(True, False, 1),
162
'support_partial_insertion':False,
165
scenarios = len_one_scenarios + len_two_scenarios
166
return multiply_tests(to_adapt, scenarios, result)
69
from ..bzr.weave import (
73
from ..bzr.weavefile import write_weave
74
from .scenarios import load_tests_apply_scenarios
77
load_tests = load_tests_apply_scenarios
169
80
def get_diamond_vf(f, trailing_eol=True, left_only=False):
298
213
def test_adds_with_parent_texts(self):
299
214
f = self.get_file()
300
215
parent_texts = {}
301
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
216
_, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
303
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
304
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
218
_, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
219
[b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
305
220
except NotImplementedError:
306
221
# if the format doesn't support ghosts, just add normally.
307
_, _, parent_texts['r1'] = f.add_lines('r1',
308
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
309
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
310
self.assertNotEqual(None, parent_texts['r0'])
311
self.assertNotEqual(None, parent_texts['r1'])
222
_, _, parent_texts[b'r1'] = f.add_lines(b'r1',
223
[b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
224
f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'],
225
parent_texts=parent_texts)
226
self.assertNotEqual(None, parent_texts[b'r0'])
227
self.assertNotEqual(None, parent_texts[b'r1'])
312
229
def verify_file(f):
313
230
versions = f.versions()
314
self.assertTrue('r0' in versions)
315
self.assertTrue('r1' in versions)
316
self.assertTrue('r2' in versions)
317
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
231
self.assertTrue(b'r0' in versions)
232
self.assertTrue(b'r1' in versions)
233
self.assertTrue(b'r2' in versions)
234
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
235
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
236
self.assertEqual(f.get_lines(b'r2'), [b'c\n', b'd\n'])
320
237
self.assertEqual(3, f.num_versions())
321
origins = f.annotate('r1')
322
self.assertEquals(origins[0][0], 'r0')
323
self.assertEquals(origins[1][0], 'r1')
324
origins = f.annotate('r2')
325
self.assertEquals(origins[0][0], 'r1')
326
self.assertEquals(origins[1][0], 'r2')
238
origins = f.annotate(b'r1')
239
self.assertEqual(origins[0][0], b'r0')
240
self.assertEqual(origins[1][0], b'r1')
241
origins = f.annotate(b'r2')
242
self.assertEqual(origins[0][0], b'r1')
243
self.assertEqual(origins[1][0], b'r2')
329
246
f = self.reopen_file()
348
265
vf = self.get_file()
349
266
if isinstance(vf, WeaveFile):
350
267
raise TestSkipped("WeaveFile ignores left_matching_blocks")
351
vf.add_lines('1', [], ['a\n'])
352
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
268
vf.add_lines(b'1', [], [b'a\n'])
269
vf.add_lines(b'2', [b'1'], [b'a\n', b'a\n', b'a\n'],
353
270
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
354
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
355
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
271
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'2'))
272
vf.add_lines(b'3', [b'1'], [b'a\n', b'a\n', b'a\n'],
356
273
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
357
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
274
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'3'))
359
276
def test_inline_newline_throws(self):
360
277
# \r characters are not permitted in lines being added
361
278
vf = self.get_file()
362
279
self.assertRaises(errors.BzrBadParameterContainsNewline,
363
vf.add_lines, 'a', [], ['a\n\n'])
280
vf.add_lines, b'a', [], [b'a\n\n'])
364
281
self.assertRaises(
365
282
(errors.BzrBadParameterContainsNewline, NotImplementedError),
366
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
283
vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
367
284
# but inline CR's are allowed
368
vf.add_lines('a', [], ['a\r\n'])
285
vf.add_lines(b'a', [], [b'a\r\n'])
370
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
287
vf.add_lines_with_ghosts(b'b', [], [b'a\r\n'])
371
288
except NotImplementedError:
374
291
def test_add_reserved(self):
375
292
vf = self.get_file()
376
293
self.assertRaises(errors.ReservedId,
377
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
294
vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
379
296
def test_add_lines_nostoresha(self):
380
297
"""When nostore_sha is supplied using old content raises."""
381
298
vf = self.get_file()
382
empty_text = ('a', [])
383
sample_text_nl = ('b', ["foo\n", "bar\n"])
384
sample_text_no_nl = ('c', ["foo\n", "bar"])
299
empty_text = (b'a', [])
300
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
301
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
386
303
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
387
304
sha, _, _ = vf.add_lines(version, [], lines)
389
306
# we now have a copy of all the lines in the vf.
390
307
for sha, (version, lines) in zip(
391
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
308
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
392
309
self.assertRaises(errors.ExistingContent,
393
vf.add_lines, version + "2", [], lines,
310
vf.add_lines, version + b"2", [], lines,
395
312
# and no new version should have been added.
396
313
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
399
316
def test_add_lines_with_ghosts_nostoresha(self):
400
317
"""When nostore_sha is supplied using old content raises."""
401
318
vf = self.get_file()
402
empty_text = ('a', [])
403
sample_text_nl = ('b', ["foo\n", "bar\n"])
404
sample_text_no_nl = ('c', ["foo\n", "bar"])
319
empty_text = (b'a', [])
320
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
321
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
406
323
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
407
324
sha, _, _ = vf.add_lines(version, [], lines)
538
455
def test_make_mpdiffs_with_ghosts(self):
539
456
vf = self.get_file('foo')
541
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
458
vf.add_lines_with_ghosts(b'text', [b'ghost'], [b'line\n'])
542
459
except NotImplementedError:
543
460
# old Weave formats do not allow ghosts
545
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
462
self.assertRaises(errors.RevisionNotPresent,
463
vf.make_mpdiffs, [b'ghost'])
547
465
def _setup_for_deltas(self, f):
548
466
self.assertFalse(f.has_version('base'))
549
467
# add texts that should trip the knit maximum delta chain threshold
550
468
# as well as doing parallel chains of data in knits.
551
469
# this is done by two chains of 25 insertions
552
f.add_lines('base', [], ['line\n'])
553
f.add_lines('noeol', ['base'], ['line'])
470
f.add_lines(b'base', [], [b'line\n'])
471
f.add_lines(b'noeol', [b'base'], [b'line'])
554
472
# detailed eol tests:
555
473
# shared last line with parent no-eol
556
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
474
f.add_lines(b'noeolsecond', [b'noeol'], [b'line\n', b'line'])
557
475
# differing last line with parent, both no-eol
558
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
476
f.add_lines(b'noeolnotshared', [b'noeolsecond'], [b'line\n', b'phone'])
559
477
# add eol following a noneol parent, change content
560
f.add_lines('eol', ['noeol'], ['phone\n'])
478
f.add_lines(b'eol', [b'noeol'], [b'phone\n'])
561
479
# add eol following a noneol parent, no change content
562
f.add_lines('eolline', ['noeol'], ['line\n'])
480
f.add_lines(b'eolline', [b'noeol'], [b'line\n'])
563
481
# noeol with no parents:
564
f.add_lines('noeolbase', [], ['line'])
482
f.add_lines(b'noeolbase', [], [b'line'])
565
483
# noeol preceeding its leftmost parent in the output:
566
484
# this is done by making it a merge of two parents with no common
567
485
# anestry: noeolbase and noeol with the
568
486
# later-inserted parent the leftmost.
569
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
487
f.add_lines(b'eolbeforefirstparent', [
488
b'noeolbase', b'noeol'], [b'line'])
570
489
# two identical eol texts
571
f.add_lines('noeoldup', ['noeol'], ['line'])
573
text_name = 'chain1-'
575
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
576
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
577
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
578
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
579
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
580
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
581
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
582
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
583
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
584
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
585
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
586
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
587
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
588
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
589
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
590
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
591
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
592
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
593
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
594
19:'1ebed371807ba5935958ad0884595126e8c4e823',
595
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
596
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
597
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
598
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
599
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
600
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
490
f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
491
next_parent = b'base'
492
text_name = b'chain1-'
494
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
495
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
496
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
497
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
498
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
499
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
500
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
501
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
502
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
503
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
504
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
505
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
506
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
507
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
508
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
509
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
510
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
511
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
512
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
513
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
514
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
515
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
516
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
517
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
518
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
519
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
602
521
for depth in range(26):
603
new_version = text_name + '%s' % depth
604
text = text + ['line\n']
522
new_version = text_name + b'%d' % depth
523
text = text + [b'line\n']
605
524
f.add_lines(new_version, [next_parent], text)
606
525
next_parent = new_version
608
text_name = 'chain2-'
526
next_parent = b'base'
527
text_name = b'chain2-'
610
529
for depth in range(26):
611
new_version = text_name + '%s' % depth
612
text = text + ['line\n']
530
new_version = text_name + b'%d' % depth
531
text = text + [b'line\n']
613
532
f.add_lines(new_version, [next_parent], text)
614
533
next_parent = new_version
617
536
def test_ancestry(self):
618
537
f = self.get_file()
619
538
self.assertEqual([], f.get_ancestry([]))
620
f.add_lines('r0', [], ['a\n', 'b\n'])
621
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
622
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
623
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
624
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
539
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
540
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
541
f.add_lines(b'r2', [b'r0'], [b'b\n', b'c\n'])
542
f.add_lines(b'r3', [b'r2'], [b'b\n', b'c\n'])
543
f.add_lines(b'rM', [b'r1', b'r2'], [b'b\n', b'c\n'])
625
544
self.assertEqual([], f.get_ancestry([]))
626
versions = f.get_ancestry(['rM'])
545
versions = f.get_ancestry([b'rM'])
627
546
# there are some possibilities:
631
550
# so we check indexes
632
r0 = versions.index('r0')
633
r1 = versions.index('r1')
634
r2 = versions.index('r2')
635
self.assertFalse('r3' in versions)
636
rM = versions.index('rM')
551
r0 = versions.index(b'r0')
552
r1 = versions.index(b'r1')
553
r2 = versions.index(b'r2')
554
self.assertFalse(b'r3' in versions)
555
rM = versions.index(b'rM')
637
556
self.assertTrue(r0 < r1)
638
557
self.assertTrue(r0 < r2)
639
558
self.assertTrue(r1 < rM)
640
559
self.assertTrue(r2 < rM)
642
561
self.assertRaises(RevisionNotPresent,
643
f.get_ancestry, ['rM', 'rX'])
562
f.get_ancestry, [b'rM', b'rX'])
645
self.assertEqual(set(f.get_ancestry('rM')),
646
set(f.get_ancestry('rM', topo_sorted=False)))
564
self.assertEqual(set(f.get_ancestry(b'rM')),
565
set(f.get_ancestry(b'rM', topo_sorted=False)))
648
567
def test_mutate_after_finish(self):
649
568
self._transaction = 'before'
650
569
f = self.get_file()
651
570
self._transaction = 'after'
652
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
653
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
571
self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
572
self.assertRaises(errors.OutSideTransaction,
573
f.add_lines_with_ghosts, b'', [], [])
655
575
def test_copy_to(self):
656
576
f = self.get_file()
657
f.add_lines('0', [], ['a\n'])
577
f.add_lines(b'0', [], [b'a\n'])
658
578
t = MemoryTransport()
659
579
f.copy_to('foo', t)
660
580
for suffix in self.get_factory().get_suffixes():
668
588
def test_get_parent_map(self):
669
589
f = self.get_file()
670
f.add_lines('r0', [], ['a\n', 'b\n'])
672
{'r0':()}, f.get_parent_map(['r0']))
673
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
675
{'r1':('r0',)}, f.get_parent_map(['r1']))
679
f.get_parent_map(['r0', 'r1']))
680
f.add_lines('r2', [], ['a\n', 'b\n'])
681
f.add_lines('r3', [], ['a\n', 'b\n'])
682
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
684
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
685
self.assertEqual({}, f.get_parent_map('y'))
689
f.get_parent_map(['r0', 'y', 'r1']))
590
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
592
{b'r0': ()}, f.get_parent_map([b'r0']))
593
f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
595
{b'r1': (b'r0',)}, f.get_parent_map([b'r1']))
599
f.get_parent_map([b'r0', b'r1']))
600
f.add_lines(b'r2', [], [b'a\n', b'b\n'])
601
f.add_lines(b'r3', [], [b'a\n', b'b\n'])
602
f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
604
{b'm': (b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
605
self.assertEqual({}, f.get_parent_map(b'y'))
609
f.get_parent_map([b'r0', b'y', b'r1']))
691
611
def test_annotate(self):
692
612
f = self.get_file()
693
f.add_lines('r0', [], ['a\n', 'b\n'])
694
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
695
origins = f.annotate('r1')
696
self.assertEquals(origins[0][0], 'r1')
697
self.assertEquals(origins[1][0], 'r0')
613
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
614
f.add_lines(b'r1', [b'r0'], [b'c\n', b'b\n'])
615
origins = f.annotate(b'r1')
616
self.assertEqual(origins[0][0], b'r1')
617
self.assertEqual(origins[1][0], b'r0')
699
619
self.assertRaises(RevisionNotPresent,
702
622
def test_detection(self):
703
623
# Test weaves detect corruption.
746
666
vf = self.get_file()
747
667
# add a base to get included
748
vf.add_lines('base', [], ['base\n'])
668
vf.add_lines(b'base', [], [b'base\n'])
749
669
# add a ancestor to be included on one side
750
vf.add_lines('lancestor', [], ['lancestor\n'])
670
vf.add_lines(b'lancestor', [], [b'lancestor\n'])
751
671
# add a ancestor to be included on the other side
752
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
672
vf.add_lines(b'rancestor', [b'base'], [b'rancestor\n'])
753
673
# add a child of rancestor with no eofile-nl
754
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
674
vf.add_lines(b'child', [b'rancestor'], [b'base\n', b'child\n'])
755
675
# add a child of lancestor and base to join the two roots
756
vf.add_lines('otherchild',
757
['lancestor', 'base'],
758
['base\n', 'lancestor\n', 'otherchild\n'])
676
vf.add_lines(b'otherchild',
677
[b'lancestor', b'base'],
678
[b'base\n', b'lancestor\n', b'otherchild\n'])
759
680
def iter_with_versions(versions, expected):
760
681
# now we need to see what lines are returned, and how often.
762
683
progress = InstrumentedProgress()
763
684
# iterate over the lines
764
685
for line in vf.iter_lines_added_or_present_in_versions(versions,
766
687
lines.setdefault(line, 0)
768
if []!= progress.updates:
689
if [] != progress.updates:
769
690
self.assertEqual(expected, progress.updates)
771
lines = iter_with_versions(['child', 'otherchild'],
692
lines = iter_with_versions([b'child', b'otherchild'],
772
693
[('Walking content', 0, 2),
773
694
('Walking content', 1, 2),
774
695
('Walking content', 2, 2)])
775
696
# we must see child and otherchild
776
self.assertTrue(lines[('child\n', 'child')] > 0)
777
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
697
self.assertTrue(lines[(b'child\n', b'child')] > 0)
698
self.assertTrue(lines[(b'otherchild\n', b'otherchild')] > 0)
778
699
# we dont care if we got more than that.
802
723
parent_id_unicode = u'b\xbfse'
803
724
parent_id_utf8 = parent_id_unicode.encode('utf8')
805
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
726
vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
806
727
except NotImplementedError:
807
728
# check the other ghost apis are also not implemented
808
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
809
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
729
self.assertRaises(NotImplementedError,
730
vf.get_ancestry_with_ghosts, [b'foo'])
731
self.assertRaises(NotImplementedError,
732
vf.get_parents_with_ghosts, b'foo')
811
734
vf = self.reopen_file()
812
735
# test key graph related apis: getncestry, _graph, get_parents
814
737
# - these are ghost unaware and must not be reflect ghosts
815
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
738
self.assertEqual([b'notbxbfse'], vf.get_ancestry(b'notbxbfse'))
816
739
self.assertFalse(vf.has_version(parent_id_utf8))
817
740
# we have _with_ghost apis to give us ghost information.
818
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
819
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
741
self.assertEqual([parent_id_utf8, b'notbxbfse'],
742
vf.get_ancestry_with_ghosts([b'notbxbfse']))
743
self.assertEqual([parent_id_utf8],
744
vf.get_parents_with_ghosts(b'notbxbfse'))
820
745
# if we add something that is a ghost of another, it should correct the
821
746
# results of the prior apis
822
747
vf.add_lines(parent_id_utf8, [], [])
823
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
824
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
825
vf.get_parent_map(['notbxbfse']))
748
self.assertEqual([parent_id_utf8, b'notbxbfse'],
749
vf.get_ancestry([b'notbxbfse']))
750
self.assertEqual({b'notbxbfse': (parent_id_utf8,)},
751
vf.get_parent_map([b'notbxbfse']))
826
752
self.assertTrue(vf.has_version(parent_id_utf8))
827
753
# we have _with_ghost apis to give us ghost information.
828
self.assertEqual([parent_id_utf8, 'notbxbfse'],
829
vf.get_ancestry_with_ghosts(['notbxbfse']))
830
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
754
self.assertEqual([parent_id_utf8, b'notbxbfse'],
755
vf.get_ancestry_with_ghosts([b'notbxbfse']))
756
self.assertEqual([parent_id_utf8],
757
vf.get_parents_with_ghosts(b'notbxbfse'))
832
759
def test_add_lines_with_ghosts_after_normal_revs(self):
833
760
# some versioned file formats allow lines to be added with parent
864
791
# check the sha1 data is available
865
792
vf = self.get_file()
867
vf.add_lines('a', [], ['a\n'])
794
vf.add_lines(b'a', [], [b'a\n'])
868
795
# the same file, different metadata
869
vf.add_lines('b', ['a'], ['a\n'])
796
vf.add_lines(b'b', [b'a'], [b'a\n'])
870
797
# a file differing only in last newline.
871
vf.add_lines('c', [], ['a'])
798
vf.add_lines(b'c', [], [b'a'])
872
799
self.assertEqual({
873
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
874
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
875
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
800
b'a': b'3f786850e387550fdab836ed7e6dc881de23001b',
801
b'c': b'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
802
b'b': b'3f786850e387550fdab836ed7e6dc881de23001b',
877
vf.get_sha1s(['a', 'c', 'b']))
804
vf.get_sha1s([b'a', b'c', b'b']))
880
807
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
882
809
def get_file(self, name='foo'):
883
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
get_scope=self.get_transaction)
810
return WeaveFile(name, self.get_transport(),
812
get_scope=self.get_transaction)
886
814
def get_file_corrupted_text(self):
887
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
get_scope=self.get_transaction)
889
w.add_lines('v1', [], ['hello\n'])
890
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
815
w = WeaveFile('foo', self.get_transport(),
817
get_scope=self.get_transaction)
818
w.add_lines(b'v1', [], [b'hello\n'])
819
w.add_lines(b'v2', [b'v1'], [b'hello\n', b'there\n'])
892
821
# We are going to invasively corrupt the text
893
822
# Make sure the internals of weave are the same
894
self.assertEqual([('{', 0)
823
self.assertEqual([(b'{', 0), b'hello\n', (b'}', None), (b'{', 1), b'there\n', (b'}', None)
902
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
903
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
826
self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f', b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
908
w._weave[4] = 'There\n'
831
w._weave[4] = b'There\n'
911
834
def get_file_corrupted_checksum(self):
912
835
w = self.get_file_corrupted_text()
914
w._weave[4] = 'there\n'
915
self.assertEqual('hello\nthere\n', w.get_text('v2'))
837
w._weave[4] = b'there\n'
838
self.assertEqual(b'hello\nthere\n', w.get_text(b'v2'))
917
#Invalid checksum, first digit changed
918
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
840
# Invalid checksum, first digit changed
841
w._sha1s[1] = b'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
921
844
def reopen_file(self, name='foo', create=False):
922
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
get_scope=self.get_transaction)
845
return WeaveFile(name, self.get_transport(),
847
get_scope=self.get_transaction)
925
849
def test_no_implicit_create(self):
926
850
self.assertRaises(errors.NoSuchFile,
929
get_transport(self.get_url('.')),
853
self.get_transport(),
930
854
get_scope=self.get_transaction)
932
856
def get_factory(self):
945
869
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
947
871
def test_add_lines(self):
948
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
949
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
950
('root', 'a'), [], [])
951
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
952
('root', 'a:'), None, [])
953
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
954
('root', 'a:'), [], None)
872
self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
873
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
874
(b'root', b'a'), [], [])
875
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
876
(b'root', b'a:'), None, [])
877
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
878
(b'root', b'a:'), [], None)
956
880
def setup_abcde(self):
957
self.vf1.add_lines(('root', 'A'), [], ['a'])
958
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
959
self.vf2.add_lines(('root', 'C'), [], ['c'])
960
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
961
self.plan_merge_vf.add_lines(('root', 'E:'),
962
[('root', 'B'), ('root', 'D')], ['e'])
881
self.vf1.add_lines((b'root', b'A'), [], [b'a'])
882
self.vf1.add_lines((b'root', b'B'), [(b'root', b'A')], [b'b'])
883
self.vf2.add_lines((b'root', b'C'), [], [b'c'])
884
self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
885
self.plan_merge_vf.add_lines((b'root', b'E:'),
886
[(b'root', b'B'), (b'root', b'D')], [b'e'])
964
888
def test_get_parents(self):
965
889
self.setup_abcde()
966
self.assertEqual({('root', 'B'):(('root', 'A'),)},
967
self.plan_merge_vf.get_parent_map([('root', 'B')]))
968
self.assertEqual({('root', 'D'):(('root', 'C'),)},
969
self.plan_merge_vf.get_parent_map([('root', 'D')]))
970
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
971
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
890
self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
891
self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
892
self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
893
self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
894
self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
895
self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
972
896
self.assertEqual({},
973
self.plan_merge_vf.get_parent_map([('root', 'F')]))
897
self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
974
898
self.assertEqual({
975
('root', 'B'):(('root', 'A'),),
976
('root', 'D'):(('root', 'C'),),
977
('root', 'E:'):(('root', 'B'),('root', 'D')),
899
(b'root', b'B'): ((b'root', b'A'),),
900
(b'root', b'D'): ((b'root', b'C'),),
901
(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
979
903
self.plan_merge_vf.get_parent_map(
980
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
904
[(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
982
906
def test_get_record_stream(self):
983
907
self.setup_abcde()
984
909
def get_record(suffix):
985
return self.plan_merge_vf.get_record_stream(
986
[('root', suffix)], 'unordered', True).next()
987
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
988
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
989
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
910
return next(self.plan_merge_vf.get_record_stream(
911
[(b'root', suffix)], 'unordered', True))
912
self.assertEqual(b'a', get_record(b'A').get_bytes_as('fulltext'))
913
self.assertEqual(b'a', b''.join(get_record(b'A').iter_bytes_as('chunked')))
914
self.assertEqual(b'c', get_record(b'C').get_bytes_as('fulltext'))
915
self.assertEqual(b'e', get_record(b'E:').get_bytes_as('fulltext'))
990
916
self.assertEqual('absent', get_record('F').storage_kind)
1023
957
class MergeCasesMixin(object):
1025
959
def doMerge(self, base, a, b, mp):
1026
from cStringIO import StringIO
1027
960
from textwrap import dedent
1032
965
w = self.get_file()
1033
w.add_lines('text0', [], map(addcrlf, base))
1034
w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
w.add_lines('text2', ['text0'], map(addcrlf, b))
966
w.add_lines(b'text0', [], list(map(addcrlf, base)))
967
w.add_lines(b'text1', [b'text0'], list(map(addcrlf, a)))
968
w.add_lines(b'text2', [b'text0'], list(map(addcrlf, b)))
1037
970
self.log_contents(w)
1039
972
self.log('merge plan:')
1040
p = list(w.plan_merge('text1', 'text2'))
973
p = list(w.plan_merge(b'text1', b'text2'))
1041
974
for state, line in p:
1043
976
self.log('%12s | %s' % (state, line[:-1]))
1045
978
self.log('merge:')
1047
980
mt.writelines(w.weave_merge(p))
1049
982
self.log(mt.getvalue())
1051
mp = map(addcrlf, mp)
984
mp = list(map(addcrlf, mp))
1052
985
self.assertEqual(mt.readlines(), mp)
1055
987
def testOneInsert(self):
1056
988
self.doMerge([],
1061
993
def testSeparateInserts(self):
1062
self.doMerge(['aaa', 'bbb', 'ccc'],
1063
['aaa', 'xxx', 'bbb', 'ccc'],
1064
['aaa', 'bbb', 'yyy', 'ccc'],
1065
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
994
self.doMerge([b'aaa', b'bbb', b'ccc'],
995
[b'aaa', b'xxx', b'bbb', b'ccc'],
996
[b'aaa', b'bbb', b'yyy', b'ccc'],
997
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
1067
999
def testSameInsert(self):
1068
self.doMerge(['aaa', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1071
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1072
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1000
self.doMerge([b'aaa', b'bbb', b'ccc'],
1001
[b'aaa', b'xxx', b'bbb', b'ccc'],
1002
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
1003
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
1004
overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
1073
1006
def testOverlappedInsert(self):
1074
self.doMerge(['aaa', 'bbb'],
1075
['aaa', 'xxx', 'yyy', 'bbb'],
1076
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1007
self.doMerge([b'aaa', b'bbb'],
1008
[b'aaa', b'xxx', b'yyy', b'bbb'],
1009
[b'aaa', b'xxx', b'bbb'], self.overlappedInsertExpected)
1078
1011
# really it ought to reduce this to
1079
# ['aaa', 'xxx', 'yyy', 'bbb']
1012
# [b'aaa', b'xxx', b'yyy', b'bbb']
1082
1014
def testClashReplace(self):
1083
self.doMerge(['aaa'],
1086
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1015
self.doMerge([b'aaa'],
1018
[b'<<<<<<< ', b'xxx', b'=======', b'yyy', b'zzz',
1089
1021
def testNonClashInsert1(self):
1090
self.doMerge(['aaa'],
1093
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1022
self.doMerge([b'aaa'],
1025
[b'<<<<<<< ', b'xxx', b'aaa', b'=======', b'yyy', b'zzz',
1096
1028
def testNonClashInsert2(self):
1097
self.doMerge(['aaa'],
1029
self.doMerge([b'aaa'],
1103
1034
def testDeleteAndModify(self):
1104
1035
"""Clashing delete and modification.
1322
1262
# we need a full text, and a delta
1323
1263
f = self.get_knit()
1324
1264
get_diamond_files(f, 1, trailing_eol=False)
1325
ft_data, delta_data = self.helpGetBytes(f,
1326
_mod_knit.FTAnnotatedToUnannotated(None),
1327
_mod_knit.DeltaAnnotatedToUnannotated(None))
1329
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1332
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1334
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1335
'1,2,3\nleft\nright\nmerged\nend merged\n',
1336
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1265
ft_data, delta_data = self.helpGetBytes(
1266
f, 'knit-ft-gz', _mod_knit.FTAnnotatedToUnannotated(None),
1267
'knit-delta-gz', _mod_knit.DeltaAnnotatedToUnannotated(None))
1269
b'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1272
GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1274
b'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1275
b'1,2,3\nleft\nright\nmerged\nend merged\n',
1276
GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1338
1278
def test_deannotation(self):
1339
1279
"""Test converting annotated knits to unannotated knits."""
1340
1280
# we need a full text, and a delta
1341
1281
f = self.get_knit()
1342
1282
get_diamond_files(f, 1)
1343
ft_data, delta_data = self.helpGetBytes(f,
1344
_mod_knit.FTAnnotatedToUnannotated(None),
1345
_mod_knit.DeltaAnnotatedToUnannotated(None))
1347
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1350
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1352
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1353
'2,2,2\nright\nmerged\nend merged\n',
1354
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1283
ft_data, delta_data = self.helpGetBytes(
1284
f, 'knit-ft-gz', _mod_knit.FTAnnotatedToUnannotated(None),
1285
'knit-delta-gz', _mod_knit.DeltaAnnotatedToUnannotated(None))
1287
b'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1290
GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1292
b'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1293
b'2,2,2\nright\nmerged\nend merged\n',
1294
GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1356
1296
def test_annotated_to_fulltext_no_eol(self):
1357
1297
"""Test adapting annotated knits to full texts (for -> weaves)."""
1430
1370
def test_identity_mapper(self):
1431
1371
mapper = versionedfile.ConstantMapper("inventory")
1432
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1433
self.assertEqual("inventory", mapper.map(('quux',)))
1372
self.assertEqual("inventory", mapper.map((b'foo@ar',)))
1373
self.assertEqual("inventory", mapper.map((b'quux',)))
1435
1375
def test_prefix_mapper(self):
1436
1376
#format5: plain
1437
1377
mapper = versionedfile.PrefixMapper()
1438
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1439
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1440
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1441
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1378
self.assertEqual("file-id", mapper.map((b"file-id", b"revision-id")))
1379
self.assertEqual("new-id", mapper.map((b"new-id", b"revision-id")))
1380
self.assertEqual((b'file-id',), mapper.unmap("file-id"))
1381
self.assertEqual((b'new-id',), mapper.unmap("new-id"))
1443
1383
def test_hash_prefix_mapper(self):
1444
1384
#format6: hash + plain
1445
1385
mapper = versionedfile.HashPrefixMapper()
1446
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1447
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1448
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1449
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1387
"9b/file-id", mapper.map((b"file-id", b"revision-id")))
1388
self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1389
self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1390
self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
1451
1392
def test_hash_escaped_mapper(self):
1452
1393
#knit1: hash + escaped
1453
1394
mapper = versionedfile.HashEscapedPrefixMapper()
1454
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1455
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1457
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1459
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1460
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1395
self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1396
self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1398
self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1400
self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1401
self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1463
1404
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1405
"""Tests for the multiple-file variant of VersionedFile."""
1407
# We want to be sure of behaviour for:
1408
# weaves prefix layout (weave texts)
1409
# individually named weaves (weave inventories)
1410
# annotated knits - prefix|hash|hash-escape layout, we test the third only
1411
# as it is the most complex mapper.
1412
# individually named knits
1413
# individual no-graph knits in packs (signatures)
1414
# individual graph knits in packs (inventories)
1415
# individual graph nocompression knits in packs (revisions)
1416
# plain text knits in packs (texts)
1417
len_one_scenarios = [
1420
'factory': make_versioned_files_factory(WeaveFile,
1421
ConstantMapper('inventory')),
1424
'support_partial_insertion': False,
1428
'factory': make_file_factory(False, ConstantMapper('revisions')),
1431
'support_partial_insertion': False,
1433
('named-nograph-nodelta-knit-pack', {
1434
'cleanup': cleanup_pack_knit,
1435
'factory': make_pack_factory(False, False, 1),
1438
'support_partial_insertion': False,
1440
('named-graph-knit-pack', {
1441
'cleanup': cleanup_pack_knit,
1442
'factory': make_pack_factory(True, True, 1),
1445
'support_partial_insertion': True,
1447
('named-graph-nodelta-knit-pack', {
1448
'cleanup': cleanup_pack_knit,
1449
'factory': make_pack_factory(True, False, 1),
1452
'support_partial_insertion': False,
1454
('groupcompress-nograph', {
1455
'cleanup': groupcompress.cleanup_pack_group,
1456
'factory': groupcompress.make_pack_factory(False, False, 1),
1459
'support_partial_insertion': False,
1462
len_two_scenarios = [
1465
'factory': make_versioned_files_factory(WeaveFile,
1469
'support_partial_insertion': False,
1471
('annotated-knit-escape', {
1473
'factory': make_file_factory(True, HashEscapedPrefixMapper()),
1476
'support_partial_insertion': False,
1478
('plain-knit-pack', {
1479
'cleanup': cleanup_pack_knit,
1480
'factory': make_pack_factory(True, True, 2),
1483
'support_partial_insertion': True,
1486
'cleanup': groupcompress.cleanup_pack_group,
1487
'factory': groupcompress.make_pack_factory(True, False, 1),
1490
'support_partial_insertion': False,
1494
scenarios = len_one_scenarios + len_two_scenarios
1466
1496
def get_versionedfiles(self, relpath='files'):
1467
1497
transport = self.get_transport(relpath)
1468
1498
if relpath != '.':
1497
1540
for record in f.get_record_stream([key0, key1], 'unordered', True):
1498
1541
records.append((record.key, record.get_bytes_as('fulltext')))
1500
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1543
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1502
def test__add_text(self):
1545
def test_add_chunks(self):
1503
1546
f = self.get_versionedfiles()
1504
key0 = self.get_simple_key('r0')
1505
key1 = self.get_simple_key('r1')
1506
key2 = self.get_simple_key('r2')
1507
keyf = self.get_simple_key('foo')
1508
f._add_text(key0, [], 'a\nb\n')
1547
key0 = self.get_simple_key(b'r0')
1548
key1 = self.get_simple_key(b'r1')
1549
key2 = self.get_simple_key(b'r2')
1550
keyf = self.get_simple_key(b'foo')
1551
def add_chunks(key, parents, chunks):
1552
factory = ChunkedContentFactory(
1553
key, parents, osutils.sha_strings(chunks), chunks)
1554
return f.add_content(factory)
1556
add_chunks(key0, [], [b'a', b'\nb\n'])
1510
f._add_text(key1, [key0], 'b\nc\n')
1558
add_chunks(key1, [key0], [b'b', b'\n', b'c\n'])
1512
f._add_text(key1, [], 'b\nc\n')
1560
add_chunks(key1, [], [b'b\n', b'c\n'])
1513
1561
keys = f.keys()
1514
self.assertTrue(key0 in keys)
1515
self.assertTrue(key1 in keys)
1562
self.assertIn(key0, keys)
1563
self.assertIn(key1, keys)
1517
1565
for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
1566
records.append((record.key, record.get_bytes_as('fulltext')))
1520
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1568
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1522
1570
def test_annotate(self):
1523
1571
files = self.get_versionedfiles()
1525
1573
if self.key_length == 1:
1576
prefix = (b'FileA',)
1529
1577
# introduced full text
1530
origins = files.annotate(prefix + ('origin',))
1578
origins = files.annotate(prefix + (b'origin',))
1531
1579
self.assertEqual([
1532
(prefix + ('origin',), 'origin\n')],
1580
(prefix + (b'origin',), b'origin\n')],
1535
origins = files.annotate(prefix + ('base',))
1583
origins = files.annotate(prefix + (b'base',))
1536
1584
self.assertEqual([
1537
(prefix + ('base',), 'base\n')],
1585
(prefix + (b'base',), b'base\n')],
1540
origins = files.annotate(prefix + ('merged',))
1588
origins = files.annotate(prefix + (b'merged',))
1542
1590
self.assertEqual([
1543
(prefix + ('base',), 'base\n'),
1544
(prefix + ('left',), 'left\n'),
1545
(prefix + ('right',), 'right\n'),
1546
(prefix + ('merged',), 'merged\n')
1591
(prefix + (b'base',), b'base\n'),
1592
(prefix + (b'left',), b'left\n'),
1593
(prefix + (b'right',), b'right\n'),
1594
(prefix + (b'merged',), b'merged\n')
1550
1598
# Without a graph everything is new.
1551
1599
self.assertEqual([
1552
(prefix + ('merged',), 'base\n'),
1553
(prefix + ('merged',), 'left\n'),
1554
(prefix + ('merged',), 'right\n'),
1555
(prefix + ('merged',), 'merged\n')
1600
(prefix + (b'merged',), b'base\n'),
1601
(prefix + (b'merged',), b'left\n'),
1602
(prefix + (b'merged',), b'right\n'),
1603
(prefix + (b'merged',), b'merged\n')
1558
1606
self.assertRaises(RevisionNotPresent,
1559
files.annotate, prefix + ('missing-key',))
1607
files.annotate, prefix + ('missing-key',))
1561
1609
def test_check_no_parameters(self):
1562
1610
files = self.get_versionedfiles()
1589
1637
files = self.get_versionedfiles()
1591
1639
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1593
1641
return get_diamond_files(files, self.key_length,
1594
trailing_eol=trailing_eol, nograph=not self.graph,
1595
left_only=left_only, nokeys=nokeys)
1642
trailing_eol=trailing_eol, nograph=not self.graph,
1643
left_only=left_only, nokeys=nokeys)
1597
1645
def _add_content_nostoresha(self, add_lines):
1598
1646
"""When nostore_sha is supplied using old content raises."""
1599
1647
vf = self.get_versionedfiles()
1600
empty_text = ('a', [])
1601
sample_text_nl = ('b', ["foo\n", "bar\n"])
1602
sample_text_no_nl = ('c', ["foo\n", "bar"])
1648
empty_text = (b'a', [])
1649
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
1650
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
1604
1652
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1606
1654
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1609
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1657
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1611
1659
shas.append(sha)
1612
1660
# we now have a copy of all the lines in the vf.
1613
1661
for sha, (version, lines) in zip(
1614
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1615
new_key = self.get_simple_key(version + "2")
1616
self.assertRaises(errors.ExistingContent,
1617
vf.add_lines, new_key, [], lines,
1619
self.assertRaises(errors.ExistingContent,
1620
vf._add_text, new_key, [], ''.join(lines),
1662
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1663
new_key = self.get_simple_key(version + b"2")
1664
self.assertRaises(errors.ExistingContent,
1665
vf.add_lines, new_key, [], lines,
1667
self.assertRaises(errors.ExistingContent,
1668
vf.add_lines, new_key, [], lines,
1622
1670
# and no new version should have been added.
1623
record = vf.get_record_stream([new_key], 'unordered', True).next()
1671
record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1672
self.assertEqual('absent', record.storage_kind)
1626
1674
def test_add_lines_nostoresha(self):
1627
1675
self._add_content_nostoresha(add_lines=True)
1629
def test__add_text_nostoresha(self):
1630
self._add_content_nostoresha(add_lines=False)
1632
1677
def test_add_lines_return(self):
1633
1678
files = self.get_versionedfiles()
1634
1679
# save code by using the stock data insertion helper.
1640
1685
results.append(add[:2])
1641
1686
if self.key_length == 1:
1642
1687
self.assertEqual([
1643
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1644
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1645
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1646
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1647
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1688
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1689
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1690
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1691
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1692
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1649
1694
elif self.key_length == 2:
1650
1695
self.assertEqual([
1651
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1652
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1653
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1654
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1655
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1656
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1657
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1658
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1659
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1660
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1696
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1697
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1698
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1699
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1700
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1701
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1702
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1703
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1704
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1705
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1663
1708
def test_add_lines_no_key_generates_chk_key(self):
1671
1716
results.append(add[:2])
1672
1717
if self.key_length == 1:
1673
1718
self.assertEqual([
1674
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1675
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1676
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1677
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1678
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1719
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1720
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1721
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1722
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1723
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1680
1725
# Check the added items got CHK keys.
1681
self.assertEqual(set([
1682
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1683
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1684
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1685
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1686
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1727
(b'sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1728
(b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1729
(b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1730
(b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1731
(b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1689
1734
elif self.key_length == 2:
1690
1735
self.assertEqual([
1691
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1692
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1693
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1694
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1695
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1696
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1697
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1698
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1699
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1700
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1736
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1737
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1738
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1739
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1740
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1741
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1742
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1743
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1744
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1745
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1702
1747
# Check the added items got CHK keys.
1703
self.assertEqual(set([
1704
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1705
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1706
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1707
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1708
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1709
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1710
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1711
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1712
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1713
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1749
(b'FileA', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1750
(b'FileA', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1751
(b'FileA', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1752
(b'FileA', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1753
(b'FileA', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1754
(b'FileB', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1755
(b'FileB', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1756
(b'FileB', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1757
(b'FileB', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1758
(b'FileB', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1717
1762
def test_empty_lines(self):
1718
1763
"""Empty files can be stored."""
1719
1764
f = self.get_versionedfiles()
1720
key_a = self.get_simple_key('a')
1765
key_a = self.get_simple_key(b'a')
1721
1766
f.add_lines(key_a, [], [])
1722
self.assertEqual('',
1723
f.get_record_stream([key_a], 'unordered', True
1724
).next().get_bytes_as('fulltext'))
1725
key_b = self.get_simple_key('b')
1767
self.assertEqual(b'',
1768
next(f.get_record_stream([key_a], 'unordered', True
1769
)).get_bytes_as('fulltext'))
1770
key_b = self.get_simple_key(b'b')
1726
1771
f.add_lines(key_b, self.get_parents([key_a]), [])
1727
self.assertEqual('',
1728
f.get_record_stream([key_b], 'unordered', True
1729
).next().get_bytes_as('fulltext'))
1772
self.assertEqual(b'',
1773
next(f.get_record_stream([key_b], 'unordered', True
1774
)).get_bytes_as('fulltext'))
1731
1776
def test_newline_only(self):
1732
1777
f = self.get_versionedfiles()
1733
key_a = self.get_simple_key('a')
1734
f.add_lines(key_a, [], ['\n'])
1735
self.assertEqual('\n',
1736
f.get_record_stream([key_a], 'unordered', True
1737
).next().get_bytes_as('fulltext'))
1738
key_b = self.get_simple_key('b')
1739
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1740
self.assertEqual('\n',
1741
f.get_record_stream([key_b], 'unordered', True
1742
).next().get_bytes_as('fulltext'))
1778
key_a = self.get_simple_key(b'a')
1779
f.add_lines(key_a, [], [b'\n'])
1780
self.assertEqual(b'\n',
1781
next(f.get_record_stream([key_a], 'unordered', True
1782
)).get_bytes_as('fulltext'))
1783
key_b = self.get_simple_key(b'b')
1784
f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1785
self.assertEqual(b'\n',
1786
next(f.get_record_stream([key_b], 'unordered', True
1787
)).get_bytes_as('fulltext'))
1744
1789
def test_get_known_graph_ancestry(self):
1745
1790
f = self.get_versionedfiles()
1746
1791
if not self.graph:
1747
1792
raise TestNotApplicable('ancestry info only relevant with graph.')
1748
key_a = self.get_simple_key('a')
1749
key_b = self.get_simple_key('b')
1750
key_c = self.get_simple_key('c')
1793
key_a = self.get_simple_key(b'a')
1794
key_b = self.get_simple_key(b'b')
1795
key_c = self.get_simple_key(b'c')
1756
f.add_lines(key_a, [], ['\n'])
1757
f.add_lines(key_b, [key_a], ['\n'])
1758
f.add_lines(key_c, [key_a, key_b], ['\n'])
1801
f.add_lines(key_a, [], [b'\n'])
1802
f.add_lines(key_b, [key_a], [b'\n'])
1803
f.add_lines(key_c, [key_a, key_b], [b'\n'])
1759
1804
kg = f.get_known_graph_ancestry([key_c])
1760
1805
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1761
1806
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1828
1873
def get_keys_and_sort_order(self):
1829
1874
"""Get diamond test keys list, and their sort ordering."""
1830
1875
if self.key_length == 1:
1831
keys = [('merged',), ('left',), ('right',), ('base',)]
1832
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1876
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1877
sort_order = {(b'merged',): 2, (b'left',): 1,
1878
(b'right',): 1, (b'base',): 0}
1835
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1837
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1881
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1882
(b'FileA', b'base'),
1883
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1884
(b'FileB', b'base'),
1841
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1842
('FileA', 'base'):0,
1843
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1844
('FileB', 'base'):0,
1887
(b'FileA', b'merged'): 2, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1888
(b'FileA', b'base'): 0,
1889
(b'FileB', b'merged'): 2, (b'FileB', b'left'): 1, (b'FileB', b'right'): 1,
1890
(b'FileB', b'base'): 0,
1846
1892
return keys, sort_order
1848
1894
def get_keys_and_groupcompress_sort_order(self):
1849
1895
"""Get diamond test keys list, and their groupcompress sort ordering."""
1850
1896
if self.key_length == 1:
1851
keys = [('merged',), ('left',), ('right',), ('base',)]
1852
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1897
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1898
sort_order = {(b'merged',): 0, (b'left',): 1,
1899
(b'right',): 1, (b'base',): 2}
1855
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1857
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1902
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1903
(b'FileA', b'base'),
1904
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1905
(b'FileB', b'base'),
1861
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1862
('FileA', 'base'):2,
1863
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1864
('FileB', 'base'):5,
1908
(b'FileA', b'merged'): 0, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1909
(b'FileA', b'base'): 2,
1910
(b'FileB', b'merged'): 3, (b'FileB', b'left'): 4, (b'FileB', b'right'): 4,
1911
(b'FileB', b'base'): 5,
1866
1913
return keys, sort_order
1951
2000
self.assertEqual(parent_map[factory.key], factory.parents)
1952
2001
# currently no stream emits mpdiff
1953
2002
self.assertRaises(errors.UnavailableRepresentation,
1954
factory.get_bytes_as, 'mpdiff')
2003
factory.get_bytes_as, 'mpdiff')
1955
2004
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1957
2006
self.assertEqual(set(keys), seen)
1959
2008
def test_get_record_stream_missing_records_are_absent(self):
1960
2009
files = self.get_versionedfiles()
1961
2010
self.get_diamond_files(files)
1962
2011
if self.key_length == 1:
1963
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
2012
keys = [(b'merged',), (b'left',), (b'right',),
2013
(b'absent',), (b'base',)]
1966
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1967
('FileA', 'absent'), ('FileA', 'base'),
1968
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1969
('FileB', 'absent'), ('FileB', 'base'),
1970
('absent', 'absent'),
2016
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
2017
(b'FileA', b'absent'), (b'FileA', b'base'),
2018
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
2019
(b'FileB', b'absent'), (b'FileB', b'base'),
2020
(b'absent', b'absent'),
1972
2022
parent_map = files.get_parent_map(keys)
1973
2023
entries = files.get_record_stream(keys, 'unordered', False)
2229
2282
self.assertRaises(RevisionNotPresent,
2230
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2283
files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
2232
2285
def test_get_parent_map(self):
2233
2286
files = self.get_versionedfiles()
2234
2287
if self.key_length == 1:
2235
2288
parent_details = [
2236
(('r0',), self.get_parents(())),
2237
(('r1',), self.get_parents((('r0',),))),
2238
(('r2',), self.get_parents(())),
2239
(('r3',), self.get_parents(())),
2240
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2289
((b'r0',), self.get_parents(())),
2290
((b'r1',), self.get_parents(((b'r0',),))),
2291
((b'r2',), self.get_parents(())),
2292
((b'r3',), self.get_parents(())),
2293
((b'm',), self.get_parents(((b'r0',), (b'r1',), (b'r2',), (b'r3',)))),
2243
2296
parent_details = [
2244
(('FileA', 'r0'), self.get_parents(())),
2245
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2246
(('FileA', 'r2'), self.get_parents(())),
2247
(('FileA', 'r3'), self.get_parents(())),
2248
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2249
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2297
((b'FileA', b'r0'), self.get_parents(())),
2298
((b'FileA', b'r1'), self.get_parents(((b'FileA', b'r0'),))),
2299
((b'FileA', b'r2'), self.get_parents(())),
2300
((b'FileA', b'r3'), self.get_parents(())),
2301
((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2302
(b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2251
2304
for key, parents in parent_details:
2252
2305
files.add_lines(key, parents, [])
2253
2306
# immediately after adding it should be queryable.
2254
self.assertEqual({key:parents}, files.get_parent_map([key]))
2307
self.assertEqual({key: parents}, files.get_parent_map([key]))
2255
2308
# We can ask for an empty set
2256
2309
self.assertEqual({}, files.get_parent_map([]))
2257
2310
# We can ask for many keys
2258
2311
all_parents = dict(parent_details)
2259
2312
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2260
2313
# Absent keys are just not included in the result.
2261
keys = all_parents.keys()
2314
keys = list(all_parents.keys())
2262
2315
if self.key_length == 1:
2263
keys.insert(1, ('missing',))
2316
keys.insert(1, (b'missing',))
2265
keys.insert(1, ('missing', 'missing'))
2318
keys.insert(1, (b'missing', b'missing'))
2266
2319
# Absent keys are just ignored
2267
2320
self.assertEqual(all_parents, files.get_parent_map(keys))
2416
2471
source = self.get_versionedfiles('source')
2417
2472
self.get_diamond_files(source)
2418
2473
if self.key_length == 1:
2419
origin_keys = [('origin',)]
2420
end_keys = [('merged',), ('left',)]
2421
start_keys = [('right',), ('base',)]
2474
origin_keys = [(b'origin',)]
2475
end_keys = [(b'merged',), (b'left',)]
2476
start_keys = [(b'right',), (b'base',)]
2423
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2424
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2425
('FileB', 'merged',), ('FileB', 'left',)]
2426
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2427
('FileB', 'right',), ('FileB', 'base',)]
2428
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2478
origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2479
end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2480
(b'FileB', b'merged',), (b'FileB', b'left',)]
2481
start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2482
(b'FileB', b'right',), (b'FileB', b'base',)]
2483
origin_entries = source.get_record_stream(
2484
origin_keys, 'unordered', False)
2429
2485
end_entries = source.get_record_stream(end_keys, 'topological', False)
2430
start_entries = source.get_record_stream(start_keys, 'topological', False)
2431
entries = chain(origin_entries, end_entries, start_entries)
2486
start_entries = source.get_record_stream(
2487
start_keys, 'topological', False)
2488
entries = itertools.chain(origin_entries, end_entries, start_entries)
2433
2490
files.insert_record_stream(entries)
2434
2491
except RevisionNotPresent:
2559
2618
files = self.get_versionedfiles()
2560
2619
# add a base to get included
2561
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2620
files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
2562
2621
# add a ancestor to be included on one side
2563
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2622
files.add_lines(self.get_simple_key(
2623
b'lancestor'), (), [b'lancestor\n'])
2564
2624
# add a ancestor to be included on the other side
2565
files.add_lines(self.get_simple_key('rancestor'),
2566
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2625
files.add_lines(self.get_simple_key(b'rancestor'),
2626
self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2567
2627
# add a child of rancestor with no eofile-nl
2568
files.add_lines(self.get_simple_key('child'),
2569
self.get_parents([self.get_simple_key('rancestor')]),
2570
['base\n', 'child\n'])
2628
files.add_lines(self.get_simple_key(b'child'),
2629
self.get_parents([self.get_simple_key(b'rancestor')]),
2630
[b'base\n', b'child\n'])
2571
2631
# add a child of lancestor and base to join the two roots
2572
files.add_lines(self.get_simple_key('otherchild'),
2573
self.get_parents([self.get_simple_key('lancestor'),
2574
self.get_simple_key('base')]),
2575
['base\n', 'lancestor\n', 'otherchild\n'])
2632
files.add_lines(self.get_simple_key(b'otherchild'),
2633
self.get_parents([self.get_simple_key(b'lancestor'),
2634
self.get_simple_key(b'base')]),
2635
[b'base\n', b'lancestor\n', b'otherchild\n'])
2576
2637
def iter_with_keys(keys, expected):
2577
2638
# now we need to see what lines are returned, and how often.
2579
2640
progress = InstrumentedProgress()
2580
2641
# iterate over the lines
2581
2642
for line in files.iter_lines_added_or_present_in_keys(keys,
2583
2644
lines.setdefault(line, 0)
2584
2645
lines[line] += 1
2585
if []!= progress.updates:
2646
if [] != progress.updates:
2586
2647
self.assertEqual(expected, progress.updates)
2588
2649
lines = iter_with_keys(
2589
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2650
[self.get_simple_key(b'child'),
2651
self.get_simple_key(b'otherchild')],
2590
2652
[('Walking content', 0, 2),
2591
2653
('Walking content', 1, 2),
2592
2654
('Walking content', 2, 2)])
2593
2655
# we must see child and otherchild
2594
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2656
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2595
2657
self.assertTrue(
2596
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2658
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2597
2659
# we dont care if we got more than that.
2599
2661
# test all lines
2600
2662
lines = iter_with_keys(files.keys(),
2601
[('Walking content', 0, 5),
2602
('Walking content', 1, 5),
2603
('Walking content', 2, 5),
2604
('Walking content', 3, 5),
2605
('Walking content', 4, 5),
2606
('Walking content', 5, 5)])
2663
[('Walking content', 0, 5),
2664
('Walking content', 1, 5),
2665
('Walking content', 2, 5),
2666
('Walking content', 3, 5),
2667
('Walking content', 4, 5),
2668
('Walking content', 5, 5)])
2607
2669
# all lines must be seen at least once
2608
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2610
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2612
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2613
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2615
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2670
self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2672
lines[(b'lancestor\n', self.get_simple_key(b'lancestor'))] > 0)
2674
lines[(b'rancestor\n', self.get_simple_key(b'rancestor'))] > 0)
2675
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2677
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2617
2679
def test_make_mpdiffs(self):
2618
from bzrlib import multiparent
2680
from breezy import multiparent
2619
2681
files = self.get_versionedfiles('source')
2620
2682
# add texts that should trip the knit maximum delta chain threshold
2621
2683
# as well as doing parallel chains of data in knits.
2622
2684
# this is done by two chains of 25 insertions
2623
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2624
files.add_lines(self.get_simple_key('noeol'),
2625
self.get_parents([self.get_simple_key('base')]), ['line'])
2685
files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2686
files.add_lines(self.get_simple_key(b'noeol'),
2687
self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2626
2688
# detailed eol tests:
2627
2689
# shared last line with parent no-eol
2628
files.add_lines(self.get_simple_key('noeolsecond'),
2629
self.get_parents([self.get_simple_key('noeol')]),
2690
files.add_lines(self.get_simple_key(b'noeolsecond'),
2691
self.get_parents([self.get_simple_key(b'noeol')]),
2692
[b'line\n', b'line'])
2631
2693
# differing last line with parent, both no-eol
2632
files.add_lines(self.get_simple_key('noeolnotshared'),
2633
self.get_parents([self.get_simple_key('noeolsecond')]),
2634
['line\n', 'phone'])
2694
files.add_lines(self.get_simple_key(b'noeolnotshared'),
2696
[self.get_simple_key(b'noeolsecond')]),
2697
[b'line\n', b'phone'])
2635
2698
# add eol following a noneol parent, change content
2636
files.add_lines(self.get_simple_key('eol'),
2637
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2699
files.add_lines(self.get_simple_key(b'eol'),
2700
self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2638
2701
# add eol following a noneol parent, no change content
2639
files.add_lines(self.get_simple_key('eolline'),
2640
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2702
files.add_lines(self.get_simple_key(b'eolline'),
2703
self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2641
2704
# noeol with no parents:
2642
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2705
files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
2643
2706
# noeol preceeding its leftmost parent in the output:
2644
2707
# this is done by making it a merge of two parents with no common
2645
2708
# anestry: noeolbase and noeol with the
2646
2709
# later-inserted parent the leftmost.
2647
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2648
self.get_parents([self.get_simple_key('noeolbase'),
2649
self.get_simple_key('noeol')]),
2710
files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2711
self.get_parents([self.get_simple_key(b'noeolbase'),
2712
self.get_simple_key(b'noeol')]),
2651
2714
# two identical eol texts
2652
files.add_lines(self.get_simple_key('noeoldup'),
2653
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2654
next_parent = self.get_simple_key('base')
2655
text_name = 'chain1-'
2657
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2658
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2659
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2660
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2661
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2662
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2663
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2664
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2665
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2666
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2667
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2668
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2669
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2670
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2671
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2672
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2673
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2674
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2675
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2676
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2677
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2678
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2679
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2680
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2681
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2682
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2715
files.add_lines(self.get_simple_key(b'noeoldup'),
2716
self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2717
next_parent = self.get_simple_key(b'base')
2718
text_name = b'chain1-'
2720
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2721
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2722
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2723
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2724
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
2725
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2726
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2727
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2728
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
2729
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2730
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2731
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2732
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
2733
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2734
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
2735
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2736
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
2737
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2738
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
2739
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
2740
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2741
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2742
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2743
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2744
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2745
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2684
2747
for depth in range(26):
2685
new_version = self.get_simple_key(text_name + '%s' % depth)
2686
text = text + ['line\n']
2748
new_version = self.get_simple_key(text_name + b'%d' % depth)
2749
text = text + [b'line\n']
2687
2750
files.add_lines(new_version, self.get_parents([next_parent]), text)
2688
2751
next_parent = new_version
2689
next_parent = self.get_simple_key('base')
2690
text_name = 'chain2-'
2752
next_parent = self.get_simple_key(b'base')
2753
text_name = b'chain2-'
2692
2755
for depth in range(26):
2693
new_version = self.get_simple_key(text_name + '%s' % depth)
2694
text = text + ['line\n']
2756
new_version = self.get_simple_key(text_name + b'%d' % depth)
2757
text = text + [b'line\n']
2695
2758
files.add_lines(new_version, self.get_parents([next_parent]), text)
2696
2759
next_parent = new_version
2697
2760
target = self.get_versionedfiles('target')
2755
2818
def test_get_sha1s_nonexistent(self):
2756
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2819
self.assertEqual({}, self.texts.get_sha1s([(b"NONEXISTENT",)]))
2758
2821
def test_get_sha1s(self):
2759
self._lines["key"] = ["dataline1", "dataline2"]
2760
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2761
self.texts.get_sha1s([("key",)]))
2822
self._lines[b"key"] = [b"dataline1", b"dataline2"]
2823
self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2824
self.texts.get_sha1s([(b"key",)]))
2763
2826
def test_get_parent_map(self):
2764
self._parent_map = {"G": ("A", "B")}
2765
self.assertEquals({("G",): (("A",),("B",))},
2766
self.texts.get_parent_map([("G",), ("L",)]))
2827
self._parent_map = {b"G": (b"A", b"B")}
2828
self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2829
self.texts.get_parent_map([(b"G",), (b"L",)]))
2768
2831
def test_get_record_stream(self):
2769
self._lines["A"] = ["FOO", "BAR"]
2770
it = self.texts.get_record_stream([("A",)], "unordered", True)
2772
self.assertEquals("chunked", record.storage_kind)
2773
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2832
self._lines[b"A"] = [b"FOO", b"BAR"]
2833
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2835
self.assertEqual("chunked", record.storage_kind)
2836
self.assertEqual(b"FOOBAR", record.get_bytes_as("fulltext"))
2837
self.assertEqual([b"FOO", b"BAR"], record.get_bytes_as("chunked"))
2776
2839
def test_get_record_stream_absent(self):
2777
it = self.texts.get_record_stream([("A",)], "unordered", True)
2779
self.assertEquals("absent", record.storage_kind)
2840
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2842
self.assertEqual("absent", record.storage_kind)
2781
2844
def test_iter_lines_added_or_present_in_keys(self):
2782
self._lines["A"] = ["FOO", "BAR"]
2783
self._lines["B"] = ["HEY"]
2784
self._lines["C"] = ["Alberta"]
2785
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2845
self._lines[b"A"] = [b"FOO", b"BAR"]
2846
self._lines[b"B"] = [b"HEY"]
2847
self._lines[b"C"] = [b"Alberta"]
2848
it = self.texts.iter_lines_added_or_present_in_keys([(b"A",), (b"B",)])
2849
self.assertEqual(sorted([(b"FOO", b"A"), (b"BAR", b"A"), (b"HEY", b"B")]),
2790
2853
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2809
2873
self.assertEqual([], vf.calls)
2811
2875
def test_get_record_stream_topological(self):
2812
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2813
request_keys = [('B',), ('C',), ('D',), ('A',)]
2876
vf = self.get_ordering_vf(
2877
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2878
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2814
2879
keys = [r.key for r in vf.get_record_stream(request_keys,
2815
'topological', False)]
2880
'topological', False)]
2816
2881
# We should have gotten the keys in topological order
2817
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2882
self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
2818
2883
# And recorded that the request was made
2819
2884
self.assertEqual([('get_record_stream', request_keys, 'topological',
2820
2885
False)], vf.calls)
2822
2887
def test_get_record_stream_ordered(self):
2823
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2824
request_keys = [('B',), ('C',), ('D',), ('A',)]
2888
vf = self.get_ordering_vf(
2889
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2890
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2825
2891
keys = [r.key for r in vf.get_record_stream(request_keys,
2826
'unordered', False)]
2892
'unordered', False)]
2827
2893
# They should be returned based on their priority
2828
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2894
self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
2829
2895
# And the request recorded
2830
2896
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2831
2897
False)], vf.calls)
2833
2899
def test_get_record_stream_implicit_order(self):
2834
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2835
request_keys = [('B',), ('C',), ('D',), ('A',)]
2900
vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2901
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2836
2902
keys = [r.key for r in vf.get_record_stream(request_keys,
2837
'unordered', False)]
2903
'unordered', False)]
2838
2904
# A and C are not in the map, so they get sorted to the front. A comes
2839
2905
# before C alphabetically, so it comes back first
2840
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2906
self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)
2841
2907
# And the request recorded
2842
2908
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2843
2909
False)], vf.calls)