82
82
:param trailing_eol: If True end the last line with \n.
86
b'base': ((b'origin',),),
87
b'left': ((b'base',),),
88
b'right': ((b'base',),),
89
b'merged': ((b'left',), (b'right',)),
86
'base': (('origin',),),
88
'right': (('base',),),
89
'merged': (('left',), ('right',)),
91
91
# insert a diamond graph to exercise deltas and merges.
96
f.add_lines(b'origin', [], [b'origin' + last_char])
97
f.add_lines(b'base', [b'origin'], [b'base' + last_char])
98
f.add_lines(b'left', [b'base'], [b'base\n', b'left' + last_char])
96
f.add_lines('origin', [], ['origin' + last_char])
97
f.add_lines('base', ['origin'], ['base' + last_char])
98
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
100
f.add_lines(b'right', [b'base'],
101
[b'base\n', b'right' + last_char])
102
f.add_lines(b'merged', [b'left', b'right'],
103
[b'base\n', b'left\n', b'right\n', b'merged' + last_char])
100
f.add_lines('right', ['base'],
101
['base\n', 'right' + last_char])
102
f.add_lines('merged', ['left', 'right'],
103
['base\n', 'left\n', 'right\n', 'merged' + last_char])
104
104
return f, parents
107
107
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
108
nograph=False, nokeys=False):
108
nograph=False, nokeys=False):
109
109
"""Get a diamond graph to exercise deltas and merges.
111
111
This creates a 5-node graph in files. If files supports 2-length keys two
150
148
# we loop over each key because that spreads the inserts across prefixes,
151
149
# which is how commit operates.
152
150
for prefix in prefixes:
153
result.append(files.add_lines(prefix + get_key(b'origin'), (),
154
[b'origin' + last_char]))
155
for prefix in prefixes:
156
result.append(files.add_lines(prefix + get_key(b'base'),
157
get_parents([(b'origin',)]), [b'base' + last_char]))
158
for prefix in prefixes:
159
result.append(files.add_lines(prefix + get_key(b'left'),
160
get_parents([(b'base',)]),
161
[b'base\n', b'left' + last_char]))
151
result.append(files.add_lines(prefix + get_key('origin'), (),
152
['origin' + last_char]))
153
for prefix in prefixes:
154
result.append(files.add_lines(prefix + get_key('base'),
155
get_parents([('origin',)]), ['base' + last_char]))
156
for prefix in prefixes:
157
result.append(files.add_lines(prefix + get_key('left'),
158
get_parents([('base',)]),
159
['base\n', 'left' + last_char]))
162
160
if not left_only:
163
161
for prefix in prefixes:
164
result.append(files.add_lines(prefix + get_key(b'right'),
165
get_parents([(b'base',)]),
166
[b'base\n', b'right' + last_char]))
162
result.append(files.add_lines(prefix + get_key('right'),
163
get_parents([('base',)]),
164
['base\n', 'right' + last_char]))
167
165
for prefix in prefixes:
168
result.append(files.add_lines(prefix + get_key(b'merged'),
170
[(b'left',), (b'right',)]),
171
[b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
166
result.append(files.add_lines(prefix + get_key('merged'),
167
get_parents([('left',), ('right',)]),
168
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
212
208
def test_adds_with_parent_texts(self):
213
209
f = self.get_file()
214
210
parent_texts = {}
215
_, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
211
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
217
_, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
218
[b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
213
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
214
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
219
215
except NotImplementedError:
220
216
# if the format doesn't support ghosts, just add normally.
221
_, _, parent_texts[b'r1'] = f.add_lines(b'r1',
222
[b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
223
f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'],
224
parent_texts=parent_texts)
225
self.assertNotEqual(None, parent_texts[b'r0'])
226
self.assertNotEqual(None, parent_texts[b'r1'])
217
_, _, parent_texts['r1'] = f.add_lines('r1',
218
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
219
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
220
self.assertNotEqual(None, parent_texts['r0'])
221
self.assertNotEqual(None, parent_texts['r1'])
228
222
def verify_file(f):
229
223
versions = f.versions()
230
self.assertTrue(b'r0' in versions)
231
self.assertTrue(b'r1' in versions)
232
self.assertTrue(b'r2' in versions)
233
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
234
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
235
self.assertEqual(f.get_lines(b'r2'), [b'c\n', b'd\n'])
224
self.assertTrue('r0' in versions)
225
self.assertTrue('r1' in versions)
226
self.assertTrue('r2' in versions)
227
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
228
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
229
self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
236
230
self.assertEqual(3, f.num_versions())
237
origins = f.annotate(b'r1')
238
self.assertEqual(origins[0][0], b'r0')
239
self.assertEqual(origins[1][0], b'r1')
240
origins = f.annotate(b'r2')
241
self.assertEqual(origins[0][0], b'r1')
242
self.assertEqual(origins[1][0], b'r2')
231
origins = f.annotate('r1')
232
self.assertEqual(origins[0][0], 'r0')
233
self.assertEqual(origins[1][0], 'r1')
234
origins = f.annotate('r2')
235
self.assertEqual(origins[0][0], 'r1')
236
self.assertEqual(origins[1][0], 'r2')
245
239
f = self.reopen_file()
264
258
vf = self.get_file()
265
259
if isinstance(vf, WeaveFile):
266
260
raise TestSkipped("WeaveFile ignores left_matching_blocks")
267
vf.add_lines(b'1', [], [b'a\n'])
268
vf.add_lines(b'2', [b'1'], [b'a\n', b'a\n', b'a\n'],
261
vf.add_lines('1', [], ['a\n'])
262
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
269
263
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
270
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'2'))
271
vf.add_lines(b'3', [b'1'], [b'a\n', b'a\n', b'a\n'],
264
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
265
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
272
266
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
273
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'3'))
267
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
275
269
def test_inline_newline_throws(self):
276
270
# \r characters are not permitted in lines being added
277
271
vf = self.get_file()
278
272
self.assertRaises(errors.BzrBadParameterContainsNewline,
279
vf.add_lines, b'a', [], [b'a\n\n'])
273
vf.add_lines, 'a', [], ['a\n\n'])
280
274
self.assertRaises(
281
275
(errors.BzrBadParameterContainsNewline, NotImplementedError),
282
vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
276
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
283
277
# but inline CR's are allowed
284
vf.add_lines(b'a', [], [b'a\r\n'])
278
vf.add_lines('a', [], ['a\r\n'])
286
vf.add_lines_with_ghosts(b'b', [], [b'a\r\n'])
280
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
287
281
except NotImplementedError:
290
284
def test_add_reserved(self):
291
285
vf = self.get_file()
292
286
self.assertRaises(errors.ReservedId,
293
vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
287
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
295
289
def test_add_lines_nostoresha(self):
296
290
"""When nostore_sha is supplied using old content raises."""
297
291
vf = self.get_file()
298
empty_text = (b'a', [])
299
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
300
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
292
empty_text = ('a', [])
293
sample_text_nl = ('b', ["foo\n", "bar\n"])
294
sample_text_no_nl = ('c', ["foo\n", "bar"])
302
296
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
303
297
sha, _, _ = vf.add_lines(version, [], lines)
305
299
# we now have a copy of all the lines in the vf.
306
300
for sha, (version, lines) in zip(
307
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
308
self.assertRaises(ExistingContent,
309
vf.add_lines, version + b"2", [], lines,
301
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
302
self.assertRaises(errors.ExistingContent,
303
vf.add_lines, version + "2", [], lines,
311
305
# and no new version should have been added.
312
306
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
315
309
def test_add_lines_with_ghosts_nostoresha(self):
316
310
"""When nostore_sha is supplied using old content raises."""
317
311
vf = self.get_file()
318
empty_text = (b'a', [])
319
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
320
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
312
empty_text = ('a', [])
313
sample_text_nl = ('b', ["foo\n", "bar\n"])
314
sample_text_no_nl = ('c', ["foo\n", "bar"])
322
316
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
323
317
sha, _, _ = vf.add_lines(version, [], lines)
427
421
# Test adding this in two situations:
428
422
# On top of a new insertion
429
423
vf = self.get_file('fulltext')
430
vf.add_lines(b'noeol', [], [b'line'])
431
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
432
left_matching_blocks=[(0, 1, 1)])
433
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
424
vf.add_lines('noeol', [], ['line'])
425
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
426
left_matching_blocks=[(0, 1, 1)])
427
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
434
428
# On top of a delta
435
429
vf = self.get_file('delta')
436
vf.add_lines(b'base', [], [b'line'])
437
vf.add_lines(b'noeol', [b'base'], [b'prelude\n', b'line'])
438
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
439
left_matching_blocks=[(1, 1, 1)])
440
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
430
vf.add_lines('base', [], ['line'])
431
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
432
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
433
left_matching_blocks=[(1, 1, 1)])
434
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
442
436
def test_make_mpdiffs(self):
443
437
from breezy import multiparent
454
448
def test_make_mpdiffs_with_ghosts(self):
455
449
vf = self.get_file('foo')
457
vf.add_lines_with_ghosts(b'text', [b'ghost'], [b'line\n'])
451
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
458
452
except NotImplementedError:
459
453
# old Weave formats do not allow ghosts
461
self.assertRaises(errors.RevisionNotPresent,
462
vf.make_mpdiffs, [b'ghost'])
455
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
464
457
def _setup_for_deltas(self, f):
465
458
self.assertFalse(f.has_version('base'))
466
459
# add texts that should trip the knit maximum delta chain threshold
467
460
# as well as doing parallel chains of data in knits.
468
461
# this is done by two chains of 25 insertions
469
f.add_lines(b'base', [], [b'line\n'])
470
f.add_lines(b'noeol', [b'base'], [b'line'])
462
f.add_lines('base', [], ['line\n'])
463
f.add_lines('noeol', ['base'], ['line'])
471
464
# detailed eol tests:
472
465
# shared last line with parent no-eol
473
f.add_lines(b'noeolsecond', [b'noeol'], [b'line\n', b'line'])
466
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
474
467
# differing last line with parent, both no-eol
475
f.add_lines(b'noeolnotshared', [b'noeolsecond'], [b'line\n', b'phone'])
468
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
476
469
# add eol following a noneol parent, change content
477
f.add_lines(b'eol', [b'noeol'], [b'phone\n'])
470
f.add_lines('eol', ['noeol'], ['phone\n'])
478
471
# add eol following a noneol parent, no change content
479
f.add_lines(b'eolline', [b'noeol'], [b'line\n'])
472
f.add_lines('eolline', ['noeol'], ['line\n'])
480
473
# noeol with no parents:
481
f.add_lines(b'noeolbase', [], [b'line'])
474
f.add_lines('noeolbase', [], ['line'])
482
475
# noeol preceeding its leftmost parent in the output:
483
476
# this is done by making it a merge of two parents with no common
484
477
# anestry: noeolbase and noeol with the
485
478
# later-inserted parent the leftmost.
486
f.add_lines(b'eolbeforefirstparent', [
487
b'noeolbase', b'noeol'], [b'line'])
479
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
488
480
# two identical eol texts
489
f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
490
next_parent = b'base'
491
text_name = b'chain1-'
493
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
494
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
495
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
496
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
497
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
498
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
499
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
500
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
501
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
502
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
503
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
504
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
505
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
506
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
507
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
508
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
509
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
510
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
511
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
512
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
513
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
514
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
515
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
516
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
517
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
518
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
481
f.add_lines('noeoldup', ['noeol'], ['line'])
483
text_name = 'chain1-'
485
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
486
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
487
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
488
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
489
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
490
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
491
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
492
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
493
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
494
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
495
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
496
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
497
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
498
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
499
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
500
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
501
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
502
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
503
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
504
19:'1ebed371807ba5935958ad0884595126e8c4e823',
505
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
506
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
507
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
508
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
509
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
510
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
520
512
for depth in range(26):
521
new_version = text_name + b'%d' % depth
522
text = text + [b'line\n']
513
new_version = text_name + '%s' % depth
514
text = text + ['line\n']
523
515
f.add_lines(new_version, [next_parent], text)
524
516
next_parent = new_version
525
next_parent = b'base'
526
text_name = b'chain2-'
518
text_name = 'chain2-'
528
520
for depth in range(26):
529
new_version = text_name + b'%d' % depth
530
text = text + [b'line\n']
521
new_version = text_name + '%s' % depth
522
text = text + ['line\n']
531
523
f.add_lines(new_version, [next_parent], text)
532
524
next_parent = new_version
535
527
def test_ancestry(self):
536
528
f = self.get_file()
537
self.assertEqual(set(), f.get_ancestry([]))
538
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
539
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
540
f.add_lines(b'r2', [b'r0'], [b'b\n', b'c\n'])
541
f.add_lines(b'r3', [b'r2'], [b'b\n', b'c\n'])
542
f.add_lines(b'rM', [b'r1', b'r2'], [b'b\n', b'c\n'])
543
self.assertEqual(set(), f.get_ancestry([]))
544
versions = f.get_ancestry([b'rM'])
529
self.assertEqual([], f.get_ancestry([]))
530
f.add_lines('r0', [], ['a\n', 'b\n'])
531
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
532
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
533
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
534
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
535
self.assertEqual([], f.get_ancestry([]))
536
versions = f.get_ancestry(['rM'])
537
# there are some possibilities:
541
# so we check indexes
542
r0 = versions.index('r0')
543
r1 = versions.index('r1')
544
r2 = versions.index('r2')
545
self.assertFalse('r3' in versions)
546
rM = versions.index('rM')
547
self.assertTrue(r0 < r1)
548
self.assertTrue(r0 < r2)
549
self.assertTrue(r1 < rM)
550
self.assertTrue(r2 < rM)
546
552
self.assertRaises(RevisionNotPresent,
547
f.get_ancestry, [b'rM', b'rX'])
553
f.get_ancestry, ['rM', 'rX'])
549
self.assertEqual(set(f.get_ancestry(b'rM')),
550
set(f.get_ancestry(b'rM')))
555
self.assertEqual(set(f.get_ancestry('rM')),
556
set(f.get_ancestry('rM', topo_sorted=False)))
552
558
def test_mutate_after_finish(self):
553
559
self._transaction = 'before'
554
560
f = self.get_file()
555
561
self._transaction = 'after'
556
self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
557
self.assertRaises(errors.OutSideTransaction,
558
f.add_lines_with_ghosts, b'', [], [])
562
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
563
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
560
565
def test_copy_to(self):
561
566
f = self.get_file()
562
f.add_lines(b'0', [], [b'a\n'])
567
f.add_lines('0', [], ['a\n'])
563
568
t = MemoryTransport()
564
569
f.copy_to('foo', t)
565
570
for suffix in self.get_factory().get_suffixes():
573
578
def test_get_parent_map(self):
574
579
f = self.get_file()
575
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
577
{b'r0': ()}, f.get_parent_map([b'r0']))
578
f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
580
{b'r1': (b'r0',)}, f.get_parent_map([b'r1']))
584
f.get_parent_map([b'r0', b'r1']))
585
f.add_lines(b'r2', [], [b'a\n', b'b\n'])
586
f.add_lines(b'r3', [], [b'a\n', b'b\n'])
587
f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
589
{b'm': (b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
590
self.assertEqual({}, f.get_parent_map(b'y'))
594
f.get_parent_map([b'r0', b'y', b'r1']))
580
f.add_lines('r0', [], ['a\n', 'b\n'])
582
{'r0':()}, f.get_parent_map(['r0']))
583
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
585
{'r1':('r0',)}, f.get_parent_map(['r1']))
589
f.get_parent_map(['r0', 'r1']))
590
f.add_lines('r2', [], ['a\n', 'b\n'])
591
f.add_lines('r3', [], ['a\n', 'b\n'])
592
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
594
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
595
self.assertEqual({}, f.get_parent_map('y'))
599
f.get_parent_map(['r0', 'y', 'r1']))
596
601
def test_annotate(self):
597
602
f = self.get_file()
598
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
599
f.add_lines(b'r1', [b'r0'], [b'c\n', b'b\n'])
600
origins = f.annotate(b'r1')
601
self.assertEqual(origins[0][0], b'r1')
602
self.assertEqual(origins[1][0], b'r0')
603
f.add_lines('r0', [], ['a\n', 'b\n'])
604
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
605
origins = f.annotate('r1')
606
self.assertEqual(origins[0][0], 'r1')
607
self.assertEqual(origins[1][0], 'r0')
604
609
self.assertRaises(RevisionNotPresent,
607
612
def test_detection(self):
608
613
# Test weaves detect corruption.
651
656
vf = self.get_file()
652
657
# add a base to get included
653
vf.add_lines(b'base', [], [b'base\n'])
658
vf.add_lines('base', [], ['base\n'])
654
659
# add a ancestor to be included on one side
655
vf.add_lines(b'lancestor', [], [b'lancestor\n'])
660
vf.add_lines('lancestor', [], ['lancestor\n'])
656
661
# add a ancestor to be included on the other side
657
vf.add_lines(b'rancestor', [b'base'], [b'rancestor\n'])
662
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
658
663
# add a child of rancestor with no eofile-nl
659
vf.add_lines(b'child', [b'rancestor'], [b'base\n', b'child\n'])
664
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
660
665
# add a child of lancestor and base to join the two roots
661
vf.add_lines(b'otherchild',
662
[b'lancestor', b'base'],
663
[b'base\n', b'lancestor\n', b'otherchild\n'])
666
vf.add_lines('otherchild',
667
['lancestor', 'base'],
668
['base\n', 'lancestor\n', 'otherchild\n'])
665
669
def iter_with_versions(versions, expected):
666
670
# now we need to see what lines are returned, and how often.
668
672
progress = InstrumentedProgress()
669
673
# iterate over the lines
670
674
for line in vf.iter_lines_added_or_present_in_versions(versions,
672
676
lines.setdefault(line, 0)
674
if [] != progress.updates:
678
if []!= progress.updates:
675
679
self.assertEqual(expected, progress.updates)
677
lines = iter_with_versions([b'child', b'otherchild'],
681
lines = iter_with_versions(['child', 'otherchild'],
678
682
[('Walking content', 0, 2),
679
683
('Walking content', 1, 2),
680
684
('Walking content', 2, 2)])
681
685
# we must see child and otherchild
682
self.assertTrue(lines[(b'child\n', b'child')] > 0)
683
self.assertTrue(lines[(b'otherchild\n', b'otherchild')] > 0)
686
self.assertTrue(lines[('child\n', 'child')] > 0)
687
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
684
688
# we dont care if we got more than that.
708
712
parent_id_unicode = u'b\xbfse'
709
713
parent_id_utf8 = parent_id_unicode.encode('utf8')
711
vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
715
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
712
716
except NotImplementedError:
713
717
# check the other ghost apis are also not implemented
714
self.assertRaises(NotImplementedError,
715
vf.get_ancestry_with_ghosts, [b'foo'])
716
self.assertRaises(NotImplementedError,
717
vf.get_parents_with_ghosts, b'foo')
718
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
719
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
719
721
vf = self.reopen_file()
720
722
# test key graph related apis: getncestry, _graph, get_parents
722
724
# - these are ghost unaware and must not be reflect ghosts
723
self.assertEqual(set([b'notbxbfse']), vf.get_ancestry(b'notbxbfse'))
725
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
724
726
self.assertFalse(vf.has_version(parent_id_utf8))
725
727
# we have _with_ghost apis to give us ghost information.
726
self.assertEqual(set([parent_id_utf8, b'notbxbfse']),
727
vf.get_ancestry_with_ghosts([b'notbxbfse']))
728
self.assertEqual([parent_id_utf8],
729
vf.get_parents_with_ghosts(b'notbxbfse'))
728
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
729
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
730
730
# if we add something that is a ghost of another, it should correct the
731
731
# results of the prior apis
732
732
vf.add_lines(parent_id_utf8, [], [])
733
self.assertEqual(set([parent_id_utf8, b'notbxbfse']),
734
vf.get_ancestry([b'notbxbfse']))
735
self.assertEqual({b'notbxbfse': (parent_id_utf8,)},
736
vf.get_parent_map([b'notbxbfse']))
733
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
734
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
735
vf.get_parent_map(['notbxbfse']))
737
736
self.assertTrue(vf.has_version(parent_id_utf8))
738
737
# we have _with_ghost apis to give us ghost information.
739
self.assertEqual(set([parent_id_utf8, b'notbxbfse']),
740
vf.get_ancestry_with_ghosts([b'notbxbfse']))
741
self.assertEqual([parent_id_utf8],
742
vf.get_parents_with_ghosts(b'notbxbfse'))
738
self.assertEqual([parent_id_utf8, 'notbxbfse'],
739
vf.get_ancestry_with_ghosts(['notbxbfse']))
740
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
744
742
def test_add_lines_with_ghosts_after_normal_revs(self):
745
743
# some versioned file formats allow lines to be added with parent
749
747
vf = self.get_file()
750
748
# probe for ghost support
752
vf.add_lines_with_ghosts(b'base', [], [b'line\n', b'line_b\n'])
750
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
753
751
except NotImplementedError:
755
vf.add_lines_with_ghosts(b'references_ghost',
756
[b'base', b'a_ghost'],
757
[b'line\n', b'line_b\n', b'line_c\n'])
758
origins = vf.annotate(b'references_ghost')
759
self.assertEqual((b'base', b'line\n'), origins[0])
760
self.assertEqual((b'base', b'line_b\n'), origins[1])
761
self.assertEqual((b'references_ghost', b'line_c\n'), origins[2])
753
vf.add_lines_with_ghosts('references_ghost',
755
['line\n', 'line_b\n', 'line_c\n'])
756
origins = vf.annotate('references_ghost')
757
self.assertEqual(('base', 'line\n'), origins[0])
758
self.assertEqual(('base', 'line_b\n'), origins[1])
759
self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
763
761
def test_readonly_mode(self):
764
762
t = self.get_transport()
765
763
factory = self.get_factory()
766
764
vf = factory('id', t, 0o777, create=True, access_mode='w')
767
765
vf = factory('id', t, access_mode='r')
768
self.assertRaises(errors.ReadOnlyError, vf.add_lines, b'base', [], [])
766
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
769
767
self.assertRaises(errors.ReadOnlyError,
770
768
vf.add_lines_with_ghosts,
854
858
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
856
860
def test_add_lines(self):
857
self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
858
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
859
(b'root', b'a'), [], [])
860
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
861
(b'root', b'a:'), None, [])
862
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
863
(b'root', b'a:'), [], None)
861
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
862
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
863
('root', 'a'), [], [])
864
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
865
('root', 'a:'), None, [])
866
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
867
('root', 'a:'), [], None)
865
869
def setup_abcde(self):
866
self.vf1.add_lines((b'root', b'A'), [], [b'a'])
867
self.vf1.add_lines((b'root', b'B'), [(b'root', b'A')], [b'b'])
868
self.vf2.add_lines((b'root', b'C'), [], [b'c'])
869
self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
870
self.plan_merge_vf.add_lines((b'root', b'E:'),
871
[(b'root', b'B'), (b'root', b'D')], [b'e'])
870
self.vf1.add_lines(('root', 'A'), [], ['a'])
871
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
872
self.vf2.add_lines(('root', 'C'), [], ['c'])
873
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
874
self.plan_merge_vf.add_lines(('root', 'E:'),
875
[('root', 'B'), ('root', 'D')], ['e'])
873
877
def test_get_parents(self):
874
878
self.setup_abcde()
875
self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
876
self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
877
self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
878
self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
879
self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
880
self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
879
self.assertEqual({('root', 'B'):(('root', 'A'),)},
880
self.plan_merge_vf.get_parent_map([('root', 'B')]))
881
self.assertEqual({('root', 'D'):(('root', 'C'),)},
882
self.plan_merge_vf.get_parent_map([('root', 'D')]))
883
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
884
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
881
885
self.assertEqual({},
882
self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
886
self.plan_merge_vf.get_parent_map([('root', 'F')]))
883
887
self.assertEqual({
884
(b'root', b'B'): ((b'root', b'A'),),
885
(b'root', b'D'): ((b'root', b'C'),),
886
(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
888
('root', 'B'):(('root', 'A'),),
889
('root', 'D'):(('root', 'C'),),
890
('root', 'E:'):(('root', 'B'),('root', 'D')),
888
892
self.plan_merge_vf.get_parent_map(
889
[(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
893
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
891
895
def test_get_record_stream(self):
892
896
self.setup_abcde()
894
897
def get_record(suffix):
895
898
return next(self.plan_merge_vf.get_record_stream(
896
[(b'root', suffix)], 'unordered', True))
897
self.assertEqual(b'a', get_record(b'A').get_bytes_as('fulltext'))
898
self.assertEqual(b'a', b''.join(get_record(b'A').iter_bytes_as('chunked')))
899
self.assertEqual(b'c', get_record(b'C').get_bytes_as('fulltext'))
900
self.assertEqual(b'e', get_record(b'E:').get_bytes_as('fulltext'))
899
[('root', suffix)], 'unordered', True))
900
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
901
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
902
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
901
903
self.assertEqual('absent', get_record('F').storage_kind)
969
971
mp = list(map(addcrlf, mp))
970
972
self.assertEqual(mt.readlines(), mp)
972
975
def testOneInsert(self):
978
981
def testSeparateInserts(self):
979
self.doMerge([b'aaa', b'bbb', b'ccc'],
980
[b'aaa', b'xxx', b'bbb', b'ccc'],
981
[b'aaa', b'bbb', b'yyy', b'ccc'],
982
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
982
self.doMerge(['aaa', 'bbb', 'ccc'],
983
['aaa', 'xxx', 'bbb', 'ccc'],
984
['aaa', 'bbb', 'yyy', 'ccc'],
985
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
984
987
def testSameInsert(self):
985
self.doMerge([b'aaa', b'bbb', b'ccc'],
986
[b'aaa', b'xxx', b'bbb', b'ccc'],
987
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
988
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
989
overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
988
self.doMerge(['aaa', 'bbb', 'ccc'],
989
['aaa', 'xxx', 'bbb', 'ccc'],
990
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
991
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
992
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
991
993
def testOverlappedInsert(self):
992
self.doMerge([b'aaa', b'bbb'],
993
[b'aaa', b'xxx', b'yyy', b'bbb'],
994
[b'aaa', b'xxx', b'bbb'], self.overlappedInsertExpected)
994
self.doMerge(['aaa', 'bbb'],
995
['aaa', 'xxx', 'yyy', 'bbb'],
996
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
996
998
# really it ought to reduce this to
997
# [b'aaa', b'xxx', b'yyy', b'bbb']
999
# ['aaa', 'xxx', 'yyy', 'bbb']
999
1002
def testClashReplace(self):
1000
self.doMerge([b'aaa'],
1003
[b'<<<<<<< ', b'xxx', b'=======', b'yyy', b'zzz',
1003
self.doMerge(['aaa'],
1006
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1006
1009
def testNonClashInsert1(self):
1007
self.doMerge([b'aaa'],
1010
[b'<<<<<<< ', b'xxx', b'aaa', b'=======', b'yyy', b'zzz',
1010
self.doMerge(['aaa'],
1013
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1013
1016
def testNonClashInsert2(self):
1014
self.doMerge([b'aaa'],
1017
self.doMerge(['aaa'],
1019
1023
def testDeleteAndModify(self):
1020
1024
"""Clashing delete and modification.
1027
1031
# skippd, not working yet
1030
self.doMerge([b'aaa', b'bbb', b'ccc'],
1031
[b'aaa', b'ddd', b'ccc'],
1033
[b'<<<<<<<< ', b'aaa', b'=======', b'>>>>>>> ', b'ccc'])
1034
self.doMerge(['aaa', 'bbb', 'ccc'],
1035
['aaa', 'ddd', 'ccc'],
1037
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1035
1039
def _test_merge_from_strings(self, base, a, b, expected):
1036
1040
w = self.get_file()
1037
w.add_lines(b'text0', [], base.splitlines(True))
1038
w.add_lines(b'text1', [b'text0'], a.splitlines(True))
1039
w.add_lines(b'text2', [b'text0'], b.splitlines(True))
1041
w.add_lines('text0', [], base.splitlines(True))
1042
w.add_lines('text1', ['text0'], a.splitlines(True))
1043
w.add_lines('text2', ['text0'], b.splitlines(True))
1040
1044
self.log('merge plan:')
1041
p = list(w.plan_merge(b'text1', b'text2'))
1045
p = list(w.plan_merge('text1', 'text2'))
1042
1046
for state, line in p:
1044
1048
self.log('%12s | %s' % (state, line[:-1]))
1045
1049
self.log('merge result:')
1046
result_text = b''.join(w.weave_merge(p))
1050
result_text = ''.join(w.weave_merge(p))
1047
1051
self.log(result_text)
1048
1052
self.assertEqualDiff(result_text, expected)
1050
1054
def test_weave_merge_conflicts(self):
1051
1055
# does weave merge properly handle plans that end with unchanged?
1052
result = b''.join(self.get_file().weave_merge([('new-a', b'hello\n')]))
1053
self.assertEqual(result, b'hello\n')
1056
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1057
self.assertEqual(result, 'hello\n')
1055
1059
def test_deletion_extended(self):
1056
1060
"""One side deletes, the other deletes more.
1355
1351
def test_identity_mapper(self):
1356
1352
mapper = versionedfile.ConstantMapper("inventory")
1357
self.assertEqual("inventory", mapper.map((b'foo@ar',)))
1358
self.assertEqual("inventory", mapper.map((b'quux',)))
1353
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1354
self.assertEqual("inventory", mapper.map(('quux',)))
1360
1356
def test_prefix_mapper(self):
1361
1357
#format5: plain
1362
1358
mapper = versionedfile.PrefixMapper()
1363
self.assertEqual("file-id", mapper.map((b"file-id", b"revision-id")))
1364
self.assertEqual("new-id", mapper.map((b"new-id", b"revision-id")))
1365
self.assertEqual((b'file-id',), mapper.unmap("file-id"))
1366
self.assertEqual((b'new-id',), mapper.unmap("new-id"))
1359
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1360
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1361
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1362
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1368
1364
def test_hash_prefix_mapper(self):
1369
1365
#format6: hash + plain
1370
1366
mapper = versionedfile.HashPrefixMapper()
1372
"9b/file-id", mapper.map((b"file-id", b"revision-id")))
1373
self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1374
self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1375
self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
1367
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1368
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1369
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1370
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1377
1372
def test_hash_escaped_mapper(self):
1378
1373
#knit1: hash + escaped
1379
1374
mapper = versionedfile.HashEscapedPrefixMapper()
1380
self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1381
self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1383
self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1385
self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1386
self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1375
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1376
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1378
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1380
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1381
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1389
1384
class TestVersionedFiles(TestCaseWithMemoryTransport):
1401
1396
# plain text knits in packs (texts)
1402
1397
len_one_scenarios = [
1403
1398
('weave-named', {
1405
'factory': make_versioned_files_factory(WeaveFile,
1406
ConstantMapper('inventory')),
1400
'factory':make_versioned_files_factory(WeaveFile,
1401
ConstantMapper('inventory')),
1409
1404
'support_partial_insertion': False,
1411
1406
('named-knit', {
1413
'factory': make_file_factory(False, ConstantMapper('revisions')),
1408
'factory':make_file_factory(False, ConstantMapper('revisions')),
1416
1411
'support_partial_insertion': False,
1418
1413
('named-nograph-nodelta-knit-pack', {
1419
'cleanup': cleanup_pack_knit,
1420
'factory': make_pack_factory(False, False, 1),
1414
'cleanup':cleanup_pack_knit,
1415
'factory':make_pack_factory(False, False, 1),
1423
1418
'support_partial_insertion': False,
1425
1420
('named-graph-knit-pack', {
1426
'cleanup': cleanup_pack_knit,
1427
'factory': make_pack_factory(True, True, 1),
1421
'cleanup':cleanup_pack_knit,
1422
'factory':make_pack_factory(True, True, 1),
1430
1425
'support_partial_insertion': True,
1432
1427
('named-graph-nodelta-knit-pack', {
1433
'cleanup': cleanup_pack_knit,
1434
'factory': make_pack_factory(True, False, 1),
1428
'cleanup':cleanup_pack_knit,
1429
'factory':make_pack_factory(True, False, 1),
1437
1432
'support_partial_insertion': False,
1439
1434
('groupcompress-nograph', {
1440
'cleanup': groupcompress.cleanup_pack_group,
1441
'factory': groupcompress.make_pack_factory(False, False, 1),
1435
'cleanup':groupcompress.cleanup_pack_group,
1436
'factory':groupcompress.make_pack_factory(False, False, 1),
1442
1437
'graph': False,
1444
'support_partial_insertion': False,
1439
'support_partial_insertion':False,
1447
1442
len_two_scenarios = [
1448
1443
('weave-prefix', {
1450
'factory': make_versioned_files_factory(WeaveFile,
1445
'factory':make_versioned_files_factory(WeaveFile,
1454
1449
'support_partial_insertion': False,
1456
1451
('annotated-knit-escape', {
1458
'factory': make_file_factory(True, HashEscapedPrefixMapper()),
1453
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
1461
1456
'support_partial_insertion': False,
1463
1458
('plain-knit-pack', {
1464
'cleanup': cleanup_pack_knit,
1465
'factory': make_pack_factory(True, True, 2),
1459
'cleanup':cleanup_pack_knit,
1460
'factory':make_pack_factory(True, True, 2),
1468
1463
'support_partial_insertion': True,
1470
1465
('groupcompress', {
1471
'cleanup': groupcompress.cleanup_pack_group,
1472
'factory': groupcompress.make_pack_factory(True, False, 1),
1466
'cleanup':groupcompress.cleanup_pack_group,
1467
'factory':groupcompress.make_pack_factory(True, False, 1),
1475
'support_partial_insertion': False,
1470
'support_partial_insertion':False,
1525
1519
for record in f.get_record_stream([key0, key1], 'unordered', True):
1526
1520
records.append((record.key, record.get_bytes_as('fulltext')))
1528
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1522
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1530
def test_add_chunks(self):
1524
def test__add_text(self):
1531
1525
f = self.get_versionedfiles()
1532
key0 = self.get_simple_key(b'r0')
1533
key1 = self.get_simple_key(b'r1')
1534
key2 = self.get_simple_key(b'r2')
1535
keyf = self.get_simple_key(b'foo')
1536
def add_chunks(key, parents, chunks):
1537
factory = ChunkedContentFactory(
1538
key, parents, osutils.sha_strings(chunks), chunks)
1539
return f.add_content(factory)
1541
add_chunks(key0, [], [b'a', b'\nb\n'])
1526
key0 = self.get_simple_key('r0')
1527
key1 = self.get_simple_key('r1')
1528
key2 = self.get_simple_key('r2')
1529
keyf = self.get_simple_key('foo')
1530
f._add_text(key0, [], 'a\nb\n')
1543
add_chunks(key1, [key0], [b'b', b'\n', b'c\n'])
1532
f._add_text(key1, [key0], 'b\nc\n')
1545
add_chunks(key1, [], [b'b\n', b'c\n'])
1534
f._add_text(key1, [], 'b\nc\n')
1546
1535
keys = f.keys()
1547
self.assertIn(key0, keys)
1548
self.assertIn(key1, keys)
1536
self.assertTrue(key0 in keys)
1537
self.assertTrue(key1 in keys)
1550
1539
for record in f.get_record_stream([key0, key1], 'unordered', True):
1551
1540
records.append((record.key, record.get_bytes_as('fulltext')))
1553
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1542
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1555
1544
def test_annotate(self):
1556
1545
files = self.get_versionedfiles()
1558
1547
if self.key_length == 1:
1561
prefix = (b'FileA',)
1562
1551
# introduced full text
1563
origins = files.annotate(prefix + (b'origin',))
1552
origins = files.annotate(prefix + ('origin',))
1564
1553
self.assertEqual([
1565
(prefix + (b'origin',), b'origin\n')],
1554
(prefix + ('origin',), 'origin\n')],
1568
origins = files.annotate(prefix + (b'base',))
1557
origins = files.annotate(prefix + ('base',))
1569
1558
self.assertEqual([
1570
(prefix + (b'base',), b'base\n')],
1559
(prefix + ('base',), 'base\n')],
1573
origins = files.annotate(prefix + (b'merged',))
1562
origins = files.annotate(prefix + ('merged',))
1575
1564
self.assertEqual([
1576
(prefix + (b'base',), b'base\n'),
1577
(prefix + (b'left',), b'left\n'),
1578
(prefix + (b'right',), b'right\n'),
1579
(prefix + (b'merged',), b'merged\n')
1565
(prefix + ('base',), 'base\n'),
1566
(prefix + ('left',), 'left\n'),
1567
(prefix + ('right',), 'right\n'),
1568
(prefix + ('merged',), 'merged\n')
1583
1572
# Without a graph everything is new.
1584
1573
self.assertEqual([
1585
(prefix + (b'merged',), b'base\n'),
1586
(prefix + (b'merged',), b'left\n'),
1587
(prefix + (b'merged',), b'right\n'),
1588
(prefix + (b'merged',), b'merged\n')
1574
(prefix + ('merged',), 'base\n'),
1575
(prefix + ('merged',), 'left\n'),
1576
(prefix + ('merged',), 'right\n'),
1577
(prefix + ('merged',), 'merged\n')
1591
1580
self.assertRaises(RevisionNotPresent,
1592
files.annotate, prefix + ('missing-key',))
1581
files.annotate, prefix + ('missing-key',))
1594
1583
def test_check_no_parameters(self):
1595
1584
files = self.get_versionedfiles()
1622
1611
files = self.get_versionedfiles()
1624
1613
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1626
1615
return get_diamond_files(files, self.key_length,
1627
trailing_eol=trailing_eol, nograph=not self.graph,
1628
left_only=left_only, nokeys=nokeys)
1616
trailing_eol=trailing_eol, nograph=not self.graph,
1617
left_only=left_only, nokeys=nokeys)
1630
1619
def _add_content_nostoresha(self, add_lines):
1631
1620
"""When nostore_sha is supplied using old content raises."""
1632
1621
vf = self.get_versionedfiles()
1633
empty_text = (b'a', [])
1634
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
1635
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
1622
empty_text = ('a', [])
1623
sample_text_nl = ('b', ["foo\n", "bar\n"])
1624
sample_text_no_nl = ('c', ["foo\n", "bar"])
1637
1626
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1639
1628
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1642
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1631
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1644
1633
shas.append(sha)
1645
1634
# we now have a copy of all the lines in the vf.
1646
1635
for sha, (version, lines) in zip(
1647
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1648
new_key = self.get_simple_key(version + b"2")
1649
self.assertRaises(ExistingContent,
1650
vf.add_lines, new_key, [], lines,
1652
self.assertRaises(ExistingContent,
1653
vf.add_lines, new_key, [], lines,
1636
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1637
new_key = self.get_simple_key(version + "2")
1638
self.assertRaises(errors.ExistingContent,
1639
vf.add_lines, new_key, [], lines,
1641
self.assertRaises(errors.ExistingContent,
1642
vf._add_text, new_key, [], ''.join(lines),
1655
1644
# and no new version should have been added.
1656
1645
record = next(vf.get_record_stream([new_key], 'unordered', True))
1657
1646
self.assertEqual('absent', record.storage_kind)
1670
1662
results.append(add[:2])
1671
1663
if self.key_length == 1:
1672
1664
self.assertEqual([
1673
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1674
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1675
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1676
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1677
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1665
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1666
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1667
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1668
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1669
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1679
1671
elif self.key_length == 2:
1680
1672
self.assertEqual([
1681
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1682
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1683
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1684
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1685
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1686
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1687
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1688
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1689
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1690
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1673
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1674
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1675
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1676
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1677
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1678
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1679
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1680
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1681
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1682
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1693
1685
def test_add_lines_no_key_generates_chk_key(self):
1701
1693
results.append(add[:2])
1702
1694
if self.key_length == 1:
1703
1695
self.assertEqual([
1704
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1705
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1706
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1707
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1708
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1696
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1697
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1698
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1699
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1700
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1710
1702
# Check the added items got CHK keys.
1711
1703
self.assertEqual({
1712
(b'sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1713
(b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1714
(b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1715
(b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1716
(b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1704
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1705
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1706
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1707
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1708
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1719
1711
elif self.key_length == 2:
1720
1712
self.assertEqual([
1721
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1722
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1723
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1724
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1725
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1726
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1727
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1728
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1729
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1730
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1713
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1714
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1715
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1716
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1717
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1718
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1719
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1720
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1721
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1722
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1732
1724
# Check the added items got CHK keys.
1733
1725
self.assertEqual({
1734
(b'FileA', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1735
(b'FileA', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1736
(b'FileA', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1737
(b'FileA', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1738
(b'FileA', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1739
(b'FileB', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1740
(b'FileB', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1741
(b'FileB', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1742
(b'FileB', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1743
(b'FileB', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1726
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1727
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1728
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1729
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1730
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1731
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1732
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1733
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1734
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1735
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1747
1739
def test_empty_lines(self):
1748
1740
"""Empty files can be stored."""
1749
1741
f = self.get_versionedfiles()
1750
key_a = self.get_simple_key(b'a')
1742
key_a = self.get_simple_key('a')
1751
1743
f.add_lines(key_a, [], [])
1752
self.assertEqual(b'',
1753
next(f.get_record_stream([key_a], 'unordered', True
1754
)).get_bytes_as('fulltext'))
1755
key_b = self.get_simple_key(b'b')
1744
self.assertEqual('',
1745
f.get_record_stream([key_a], 'unordered', True
1746
).next().get_bytes_as('fulltext'))
1747
key_b = self.get_simple_key('b')
1756
1748
f.add_lines(key_b, self.get_parents([key_a]), [])
1757
self.assertEqual(b'',
1758
next(f.get_record_stream([key_b], 'unordered', True
1759
)).get_bytes_as('fulltext'))
1749
self.assertEqual('',
1750
f.get_record_stream([key_b], 'unordered', True
1751
).next().get_bytes_as('fulltext'))
1761
1753
def test_newline_only(self):
1762
1754
f = self.get_versionedfiles()
1763
key_a = self.get_simple_key(b'a')
1764
f.add_lines(key_a, [], [b'\n'])
1765
self.assertEqual(b'\n',
1766
next(f.get_record_stream([key_a], 'unordered', True
1767
)).get_bytes_as('fulltext'))
1768
key_b = self.get_simple_key(b'b')
1769
f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1770
self.assertEqual(b'\n',
1771
next(f.get_record_stream([key_b], 'unordered', True
1772
)).get_bytes_as('fulltext'))
1755
key_a = self.get_simple_key('a')
1756
f.add_lines(key_a, [], ['\n'])
1757
self.assertEqual('\n',
1758
f.get_record_stream([key_a], 'unordered', True
1759
).next().get_bytes_as('fulltext'))
1760
key_b = self.get_simple_key('b')
1761
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1762
self.assertEqual('\n',
1763
f.get_record_stream([key_b], 'unordered', True
1764
).next().get_bytes_as('fulltext'))
1774
1766
def test_get_known_graph_ancestry(self):
1775
1767
f = self.get_versionedfiles()
1776
1768
if not self.graph:
1777
1769
raise TestNotApplicable('ancestry info only relevant with graph.')
1778
key_a = self.get_simple_key(b'a')
1779
key_b = self.get_simple_key(b'b')
1780
key_c = self.get_simple_key(b'c')
1770
key_a = self.get_simple_key('a')
1771
key_b = self.get_simple_key('b')
1772
key_c = self.get_simple_key('c')
1786
f.add_lines(key_a, [], [b'\n'])
1787
f.add_lines(key_b, [key_a], [b'\n'])
1788
f.add_lines(key_c, [key_a, key_b], [b'\n'])
1778
f.add_lines(key_a, [], ['\n'])
1779
f.add_lines(key_b, [key_a], ['\n'])
1780
f.add_lines(key_c, [key_a, key_b], ['\n'])
1789
1781
kg = f.get_known_graph_ancestry([key_c])
1790
1782
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1791
1783
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1858
1850
def get_keys_and_sort_order(self):
1859
1851
"""Get diamond test keys list, and their sort ordering."""
1860
1852
if self.key_length == 1:
1861
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1862
sort_order = {(b'merged',): 2, (b'left',): 1,
1863
(b'right',): 1, (b'base',): 0}
1853
keys = [('merged',), ('left',), ('right',), ('base',)]
1854
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1866
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1867
(b'FileA', b'base'),
1868
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1869
(b'FileB', b'base'),
1857
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1859
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1872
(b'FileA', b'merged'): 2, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1873
(b'FileA', b'base'): 0,
1874
(b'FileB', b'merged'): 2, (b'FileB', b'left'): 1, (b'FileB', b'right'): 1,
1875
(b'FileB', b'base'): 0,
1863
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1864
('FileA', 'base'):0,
1865
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1866
('FileB', 'base'):0,
1877
1868
return keys, sort_order
1879
1870
def get_keys_and_groupcompress_sort_order(self):
1880
1871
"""Get diamond test keys list, and their groupcompress sort ordering."""
1881
1872
if self.key_length == 1:
1882
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1883
sort_order = {(b'merged',): 0, (b'left',): 1,
1884
(b'right',): 1, (b'base',): 2}
1873
keys = [('merged',), ('left',), ('right',), ('base',)]
1874
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1887
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1888
(b'FileA', b'base'),
1889
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1890
(b'FileB', b'base'),
1877
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1879
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1893
(b'FileA', b'merged'): 0, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1894
(b'FileA', b'base'): 2,
1895
(b'FileB', b'merged'): 3, (b'FileB', b'left'): 4, (b'FileB', b'right'): 4,
1896
(b'FileB', b'base'): 5,
1883
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1884
('FileA', 'base'):2,
1885
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1886
('FileB', 'base'):5,
1898
1888
return keys, sort_order
1985
1973
self.assertEqual(parent_map[factory.key], factory.parents)
1986
1974
# currently no stream emits mpdiff
1987
self.assertRaises(UnavailableRepresentation,
1988
factory.get_bytes_as, 'mpdiff')
1975
self.assertRaises(errors.UnavailableRepresentation,
1976
factory.get_bytes_as, 'mpdiff')
1989
1977
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1991
1979
self.assertEqual(set(keys), seen)
1993
1981
def test_get_record_stream_missing_records_are_absent(self):
1994
1982
files = self.get_versionedfiles()
1995
1983
self.get_diamond_files(files)
1996
1984
if self.key_length == 1:
1997
keys = [(b'merged',), (b'left',), (b'right',),
1998
(b'absent',), (b'base',)]
1985
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
2001
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
2002
(b'FileA', b'absent'), (b'FileA', b'base'),
2003
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
2004
(b'FileB', b'absent'), (b'FileB', b'base'),
2005
(b'absent', b'absent'),
1988
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1989
('FileA', 'absent'), ('FileA', 'base'),
1990
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1991
('FileB', 'absent'), ('FileB', 'base'),
1992
('absent', 'absent'),
2007
1994
parent_map = files.get_parent_map(keys)
2008
1995
entries = files.get_record_stream(keys, 'unordered', False)
2267
2251
self.assertRaises(RevisionNotPresent,
2268
files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
2252
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2270
2254
def test_get_parent_map(self):
2271
2255
files = self.get_versionedfiles()
2272
2256
if self.key_length == 1:
2273
2257
parent_details = [
2274
((b'r0',), self.get_parents(())),
2275
((b'r1',), self.get_parents(((b'r0',),))),
2276
((b'r2',), self.get_parents(())),
2277
((b'r3',), self.get_parents(())),
2278
((b'm',), self.get_parents(((b'r0',), (b'r1',), (b'r2',), (b'r3',)))),
2258
(('r0',), self.get_parents(())),
2259
(('r1',), self.get_parents((('r0',),))),
2260
(('r2',), self.get_parents(())),
2261
(('r3',), self.get_parents(())),
2262
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2281
2265
parent_details = [
2282
((b'FileA', b'r0'), self.get_parents(())),
2283
((b'FileA', b'r1'), self.get_parents(((b'FileA', b'r0'),))),
2284
((b'FileA', b'r2'), self.get_parents(())),
2285
((b'FileA', b'r3'), self.get_parents(())),
2286
((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2287
(b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2266
(('FileA', 'r0'), self.get_parents(())),
2267
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2268
(('FileA', 'r2'), self.get_parents(())),
2269
(('FileA', 'r3'), self.get_parents(())),
2270
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2271
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2289
2273
for key, parents in parent_details:
2290
2274
files.add_lines(key, parents, [])
2291
2275
# immediately after adding it should be queryable.
2292
self.assertEqual({key: parents}, files.get_parent_map([key]))
2276
self.assertEqual({key:parents}, files.get_parent_map([key]))
2293
2277
# We can ask for an empty set
2294
2278
self.assertEqual({}, files.get_parent_map([]))
2295
2279
# We can ask for many keys
2296
2280
all_parents = dict(parent_details)
2297
2281
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2298
2282
# Absent keys are just not included in the result.
2299
keys = list(all_parents.keys())
2283
keys = all_parents.keys()
2300
2284
if self.key_length == 1:
2301
keys.insert(1, (b'missing',))
2285
keys.insert(1, ('missing',))
2303
keys.insert(1, (b'missing', b'missing'))
2287
keys.insert(1, ('missing', 'missing'))
2304
2288
# Absent keys are just ignored
2305
2289
self.assertEqual(all_parents, files.get_parent_map(keys))
2308
2292
files = self.get_versionedfiles()
2309
2293
self.get_diamond_files(files)
2310
2294
if self.key_length == 1:
2311
keys = [(b'base',), (b'origin',), (b'left',),
2312
(b'merged',), (b'right',)]
2295
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2314
2297
# ask for shas from different prefixes.
2316
(b'FileA', b'base'), (b'FileB', b'origin'), (b'FileA', b'left'),
2317
(b'FileA', b'merged'), (b'FileB', b'right'),
2299
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2300
('FileA', 'merged'), ('FileB', 'right'),
2319
2302
self.assertEqual({
2320
keys[0]: b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2321
keys[1]: b'00e364d235126be43292ab09cb4686cf703ddc17',
2322
keys[2]: b'a8478686da38e370e32e42e8a0c220e33ee9132f',
2323
keys[3]: b'ed8bce375198ea62444dc71952b22cfc2b09226d',
2324
keys[4]: b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2303
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2304
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2305
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2306
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2307
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2326
2309
files.get_sha1s(keys))
2456
2438
source = self.get_versionedfiles('source')
2457
2439
self.get_diamond_files(source)
2458
2440
if self.key_length == 1:
2459
origin_keys = [(b'origin',)]
2460
end_keys = [(b'merged',), (b'left',)]
2461
start_keys = [(b'right',), (b'base',)]
2441
origin_keys = [('origin',)]
2442
end_keys = [('merged',), ('left',)]
2443
start_keys = [('right',), ('base',)]
2463
origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2464
end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2465
(b'FileB', b'merged',), (b'FileB', b'left',)]
2466
start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2467
(b'FileB', b'right',), (b'FileB', b'base',)]
2468
origin_entries = source.get_record_stream(
2469
origin_keys, 'unordered', False)
2445
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2446
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2447
('FileB', 'merged',), ('FileB', 'left',)]
2448
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2449
('FileB', 'right',), ('FileB', 'base',)]
2450
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2470
2451
end_entries = source.get_record_stream(end_keys, 'topological', False)
2471
start_entries = source.get_record_stream(
2472
start_keys, 'topological', False)
2452
start_entries = source.get_record_stream(start_keys, 'topological', False)
2473
2453
entries = itertools.chain(origin_entries, end_entries, start_entries)
2475
2455
files.insert_record_stream(entries)
2603
2581
files = self.get_versionedfiles()
2604
2582
# add a base to get included
2605
files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
2583
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2606
2584
# add a ancestor to be included on one side
2607
files.add_lines(self.get_simple_key(
2608
b'lancestor'), (), [b'lancestor\n'])
2585
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2609
2586
# add a ancestor to be included on the other side
2610
files.add_lines(self.get_simple_key(b'rancestor'),
2611
self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2587
files.add_lines(self.get_simple_key('rancestor'),
2588
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2612
2589
# add a child of rancestor with no eofile-nl
2613
files.add_lines(self.get_simple_key(b'child'),
2614
self.get_parents([self.get_simple_key(b'rancestor')]),
2615
[b'base\n', b'child\n'])
2590
files.add_lines(self.get_simple_key('child'),
2591
self.get_parents([self.get_simple_key('rancestor')]),
2592
['base\n', 'child\n'])
2616
2593
# add a child of lancestor and base to join the two roots
2617
files.add_lines(self.get_simple_key(b'otherchild'),
2618
self.get_parents([self.get_simple_key(b'lancestor'),
2619
self.get_simple_key(b'base')]),
2620
[b'base\n', b'lancestor\n', b'otherchild\n'])
2594
files.add_lines(self.get_simple_key('otherchild'),
2595
self.get_parents([self.get_simple_key('lancestor'),
2596
self.get_simple_key('base')]),
2597
['base\n', 'lancestor\n', 'otherchild\n'])
2622
2598
def iter_with_keys(keys, expected):
2623
2599
# now we need to see what lines are returned, and how often.
2625
2601
progress = InstrumentedProgress()
2626
2602
# iterate over the lines
2627
2603
for line in files.iter_lines_added_or_present_in_keys(keys,
2629
2605
lines.setdefault(line, 0)
2630
2606
lines[line] += 1
2631
if [] != progress.updates:
2607
if []!= progress.updates:
2632
2608
self.assertEqual(expected, progress.updates)
2634
2610
lines = iter_with_keys(
2635
[self.get_simple_key(b'child'),
2636
self.get_simple_key(b'otherchild')],
2611
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2637
2612
[('Walking content', 0, 2),
2638
2613
('Walking content', 1, 2),
2639
2614
('Walking content', 2, 2)])
2640
2615
# we must see child and otherchild
2641
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2616
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2642
2617
self.assertTrue(
2643
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2618
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2644
2619
# we dont care if we got more than that.
2646
2621
# test all lines
2647
2622
lines = iter_with_keys(files.keys(),
2648
[('Walking content', 0, 5),
2649
('Walking content', 1, 5),
2650
('Walking content', 2, 5),
2651
('Walking content', 3, 5),
2652
('Walking content', 4, 5),
2653
('Walking content', 5, 5)])
2623
[('Walking content', 0, 5),
2624
('Walking content', 1, 5),
2625
('Walking content', 2, 5),
2626
('Walking content', 3, 5),
2627
('Walking content', 4, 5),
2628
('Walking content', 5, 5)])
2654
2629
# all lines must be seen at least once
2655
self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2657
lines[(b'lancestor\n', self.get_simple_key(b'lancestor'))] > 0)
2659
lines[(b'rancestor\n', self.get_simple_key(b'rancestor'))] > 0)
2660
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2662
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2630
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2632
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2634
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2635
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2637
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2664
2639
def test_make_mpdiffs(self):
2665
2640
from breezy import multiparent
2667
2642
# add texts that should trip the knit maximum delta chain threshold
2668
2643
# as well as doing parallel chains of data in knits.
2669
2644
# this is done by two chains of 25 insertions
2670
files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2671
files.add_lines(self.get_simple_key(b'noeol'),
2672
self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2645
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2646
files.add_lines(self.get_simple_key('noeol'),
2647
self.get_parents([self.get_simple_key('base')]), ['line'])
2673
2648
# detailed eol tests:
2674
2649
# shared last line with parent no-eol
2675
files.add_lines(self.get_simple_key(b'noeolsecond'),
2676
self.get_parents([self.get_simple_key(b'noeol')]),
2677
[b'line\n', b'line'])
2650
files.add_lines(self.get_simple_key('noeolsecond'),
2651
self.get_parents([self.get_simple_key('noeol')]),
2678
2653
# differing last line with parent, both no-eol
2679
files.add_lines(self.get_simple_key(b'noeolnotshared'),
2681
[self.get_simple_key(b'noeolsecond')]),
2682
[b'line\n', b'phone'])
2654
files.add_lines(self.get_simple_key('noeolnotshared'),
2655
self.get_parents([self.get_simple_key('noeolsecond')]),
2656
['line\n', 'phone'])
2683
2657
# add eol following a noneol parent, change content
2684
files.add_lines(self.get_simple_key(b'eol'),
2685
self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2658
files.add_lines(self.get_simple_key('eol'),
2659
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2686
2660
# add eol following a noneol parent, no change content
2687
files.add_lines(self.get_simple_key(b'eolline'),
2688
self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2661
files.add_lines(self.get_simple_key('eolline'),
2662
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2689
2663
# noeol with no parents:
2690
files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
2664
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2691
2665
# noeol preceeding its leftmost parent in the output:
2692
2666
# this is done by making it a merge of two parents with no common
2693
2667
# anestry: noeolbase and noeol with the
2694
2668
# later-inserted parent the leftmost.
2695
files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2696
self.get_parents([self.get_simple_key(b'noeolbase'),
2697
self.get_simple_key(b'noeol')]),
2669
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2670
self.get_parents([self.get_simple_key('noeolbase'),
2671
self.get_simple_key('noeol')]),
2699
2673
# two identical eol texts
2700
files.add_lines(self.get_simple_key(b'noeoldup'),
2701
self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2702
next_parent = self.get_simple_key(b'base')
2703
text_name = b'chain1-'
2705
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2706
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2707
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2708
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2709
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
2710
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2711
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2712
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2713
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
2714
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2715
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2716
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2717
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
2718
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2719
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
2720
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2721
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
2722
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2723
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
2724
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
2725
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2726
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2727
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2728
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2729
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2730
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2674
files.add_lines(self.get_simple_key('noeoldup'),
2675
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2676
next_parent = self.get_simple_key('base')
2677
text_name = 'chain1-'
2679
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2680
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2681
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2682
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2683
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2684
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2685
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2686
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2687
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2688
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2689
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2690
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2691
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2692
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2693
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2694
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2695
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2696
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2697
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2698
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2699
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2700
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2701
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2702
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2703
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2704
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2732
2706
for depth in range(26):
2733
new_version = self.get_simple_key(text_name + b'%d' % depth)
2734
text = text + [b'line\n']
2707
new_version = self.get_simple_key(text_name + '%s' % depth)
2708
text = text + ['line\n']
2735
2709
files.add_lines(new_version, self.get_parents([next_parent]), text)
2736
2710
next_parent = new_version
2737
next_parent = self.get_simple_key(b'base')
2738
text_name = b'chain2-'
2711
next_parent = self.get_simple_key('base')
2712
text_name = 'chain2-'
2740
2714
for depth in range(26):
2741
new_version = self.get_simple_key(text_name + b'%d' % depth)
2742
text = text + [b'line\n']
2715
new_version = self.get_simple_key(text_name + '%s' % depth)
2716
text = text + ['line\n']
2743
2717
files.add_lines(new_version, self.get_parents([next_parent]), text)
2744
2718
next_parent = new_version
2745
2719
target = self.get_versionedfiles('target')
2803
2777
def test_get_sha1s_nonexistent(self):
2804
self.assertEqual({}, self.texts.get_sha1s([(b"NONEXISTENT",)]))
2778
self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2806
2780
def test_get_sha1s(self):
2807
self._lines[b"key"] = [b"dataline1", b"dataline2"]
2808
self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2809
self.texts.get_sha1s([(b"key",)]))
2781
self._lines["key"] = ["dataline1", "dataline2"]
2782
self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2783
self.texts.get_sha1s([("key",)]))
2811
2785
def test_get_parent_map(self):
2812
self._parent_map = {b"G": (b"A", b"B")}
2813
self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2814
self.texts.get_parent_map([(b"G",), (b"L",)]))
2786
self._parent_map = {"G": ("A", "B")}
2787
self.assertEqual({("G",): (("A",),("B",))},
2788
self.texts.get_parent_map([("G",), ("L",)]))
2816
2790
def test_get_record_stream(self):
2817
self._lines[b"A"] = [b"FOO", b"BAR"]
2818
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2791
self._lines["A"] = ["FOO", "BAR"]
2792
it = self.texts.get_record_stream([("A",)], "unordered", True)
2819
2793
record = next(it)
2820
2794
self.assertEqual("chunked", record.storage_kind)
2821
self.assertEqual(b"FOOBAR", record.get_bytes_as("fulltext"))
2822
self.assertEqual([b"FOO", b"BAR"], record.get_bytes_as("chunked"))
2795
self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
2796
self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2824
2798
def test_get_record_stream_absent(self):
2825
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2799
it = self.texts.get_record_stream([("A",)], "unordered", True)
2826
2800
record = next(it)
2827
2801
self.assertEqual("absent", record.storage_kind)
2829
2803
def test_iter_lines_added_or_present_in_keys(self):
2830
self._lines[b"A"] = [b"FOO", b"BAR"]
2831
self._lines[b"B"] = [b"HEY"]
2832
self._lines[b"C"] = [b"Alberta"]
2833
it = self.texts.iter_lines_added_or_present_in_keys([(b"A",), (b"B",)])
2834
self.assertEqual(sorted([(b"FOO", b"A"), (b"BAR", b"A"), (b"HEY", b"B")]),
2804
self._lines["A"] = ["FOO", "BAR"]
2805
self._lines["B"] = ["HEY"]
2806
self._lines["C"] = ["Alberta"]
2807
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2808
self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2838
2812
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2858
2831
self.assertEqual([], vf.calls)
2860
2833
def test_get_record_stream_topological(self):
2861
vf = self.get_ordering_vf(
2862
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2863
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2834
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2835
request_keys = [('B',), ('C',), ('D',), ('A',)]
2864
2836
keys = [r.key for r in vf.get_record_stream(request_keys,
2865
'topological', False)]
2837
'topological', False)]
2866
2838
# We should have gotten the keys in topological order
2867
self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
2839
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2868
2840
# And recorded that the request was made
2869
2841
self.assertEqual([('get_record_stream', request_keys, 'topological',
2870
2842
False)], vf.calls)
2872
2844
def test_get_record_stream_ordered(self):
2873
vf = self.get_ordering_vf(
2874
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2875
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2845
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2846
request_keys = [('B',), ('C',), ('D',), ('A',)]
2876
2847
keys = [r.key for r in vf.get_record_stream(request_keys,
2877
'unordered', False)]
2848
'unordered', False)]
2878
2849
# They should be returned based on their priority
2879
self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
2850
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2880
2851
# And the request recorded
2881
2852
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2882
2853
False)], vf.calls)
2884
2855
def test_get_record_stream_implicit_order(self):
2885
vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2886
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2856
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2857
request_keys = [('B',), ('C',), ('D',), ('A',)]
2887
2858
keys = [r.key for r in vf.get_record_stream(request_keys,
2888
'unordered', False)]
2859
'unordered', False)]
2889
2860
# A and C are not in the map, so they get sorted to the front. A comes
2890
2861
# before C alphabetically, so it comes back first
2891
self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)
2862
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2892
2863
# And the request recorded
2893
2864
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2894
2865
False)], vf.calls)