1
# Copyright (C) 2005 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
33
from bzrlib.errors import (
35
RevisionAlreadyPresent,
38
from bzrlib import knit as _mod_knit
39
from bzrlib.knit import (
46
from bzrlib.symbol_versioning import one_four, one_five
47
from bzrlib.tests import (
49
TestCaseWithMemoryTransport,
53
split_suite_by_condition,
56
from bzrlib.tests.http_utils import TestCaseWithWebserver
57
from bzrlib.trace import mutter
58
from bzrlib.transport import get_transport
59
from bzrlib.transport.memory import MemoryTransport
60
from bzrlib.tsort import topo_sort
61
from bzrlib.tuned_gzip import GzipFile
62
import bzrlib.versionedfile as versionedfile
63
from bzrlib.versionedfile import (
65
HashEscapedPrefixMapper,
67
VirtualVersionedFiles,
68
make_versioned_files_factory,
70
from bzrlib.weave import WeaveFile
71
from bzrlib.weavefile import read_weave, write_weave
74
def load_tests(standard_tests, module, loader):
75
"""Parameterize VersionedFiles tests for different implementations."""
76
to_adapt, result = split_suite_by_condition(
77
standard_tests, condition_isinstance(TestVersionedFiles))
78
# We want to be sure of behaviour for:
79
# weaves prefix layout (weave texts)
80
# individually named weaves (weave inventories)
81
# annotated knits - prefix|hash|hash-escape layout, we test the third only
82
# as it is the most complex mapper.
83
# individually named knits
84
# individual no-graph knits in packs (signatures)
85
# individual graph knits in packs (inventories)
86
# individual graph nocompression knits in packs (revisions)
87
# plain text knits in packs (texts)
91
'factory':make_versioned_files_factory(WeaveFile,
92
ConstantMapper('inventory')),
95
'support_partial_insertion': False,
99
'factory':make_file_factory(False, ConstantMapper('revisions')),
102
'support_partial_insertion': False,
104
('named-nograph-nodelta-knit-pack', {
105
'cleanup':cleanup_pack_knit,
106
'factory':make_pack_factory(False, False, 1),
109
'support_partial_insertion': False,
111
('named-graph-knit-pack', {
112
'cleanup':cleanup_pack_knit,
113
'factory':make_pack_factory(True, True, 1),
116
'support_partial_insertion': True,
118
('named-graph-nodelta-knit-pack', {
119
'cleanup':cleanup_pack_knit,
120
'factory':make_pack_factory(True, False, 1),
123
'support_partial_insertion': False,
126
len_two_scenarios = [
129
'factory':make_versioned_files_factory(WeaveFile,
133
'support_partial_insertion': False,
135
('annotated-knit-escape', {
137
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
140
'support_partial_insertion': False,
142
('plain-knit-pack', {
143
'cleanup':cleanup_pack_knit,
144
'factory':make_pack_factory(True, True, 2),
147
'support_partial_insertion': True,
150
scenarios = len_one_scenarios + len_two_scenarios
151
return multiply_tests(to_adapt, scenarios, result)
154
def get_diamond_vf(f, trailing_eol=True, left_only=False):
155
"""Get a diamond graph to exercise deltas and merges.
157
:param trailing_eol: If True end the last line with \n.
161
'base': (('origin',),),
162
'left': (('base',),),
163
'right': (('base',),),
164
'merged': (('left',), ('right',)),
166
# insert a diamond graph to exercise deltas and merges.
171
f.add_lines('origin', [], ['origin' + last_char])
172
f.add_lines('base', ['origin'], ['base' + last_char])
173
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
175
f.add_lines('right', ['base'],
176
['base\n', 'right' + last_char])
177
f.add_lines('merged', ['left', 'right'],
178
['base\n', 'left\n', 'right\n', 'merged' + last_char])
182
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
184
"""Get a diamond graph to exercise deltas and merges.
186
This creates a 5-node graph in files. If files supports 2-length keys two
187
graphs are made to exercise the support for multiple ids.
189
:param trailing_eol: If True end the last line with \n.
190
:param key_length: The length of keys in files. Currently supports length 1
192
:param left_only: If True do not add the right and merged nodes.
193
:param nograph: If True, do not provide parents to the add_lines calls;
194
this is useful for tests that need inserted data but have graphless
196
:return: The results of the add_lines calls.
201
prefixes = [('FileA',), ('FileB',)]
202
# insert a diamond graph to exercise deltas and merges.
208
def get_parents(suffix_list):
212
result = [prefix + suffix for suffix in suffix_list]
214
# we loop over each key because that spreads the inserts across prefixes,
215
# which is how commit operates.
216
for prefix in prefixes:
217
result.append(files.add_lines(prefix + ('origin',), (),
218
['origin' + last_char]))
219
for prefix in prefixes:
220
result.append(files.add_lines(prefix + ('base',),
221
get_parents([('origin',)]), ['base' + last_char]))
222
for prefix in prefixes:
223
result.append(files.add_lines(prefix + ('left',),
224
get_parents([('base',)]),
225
['base\n', 'left' + last_char]))
227
for prefix in prefixes:
228
result.append(files.add_lines(prefix + ('right',),
229
get_parents([('base',)]),
230
['base\n', 'right' + last_char]))
231
for prefix in prefixes:
232
result.append(files.add_lines(prefix + ('merged',),
233
get_parents([('left',), ('right',)]),
234
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
238
class VersionedFileTestMixIn(object):
239
"""A mixin test class for testing VersionedFiles.
241
This is not an adaptor-style test at this point because
242
theres no dynamic substitution of versioned file implementations,
243
they are strictly controlled by their owning repositories.
246
def get_transaction(self):
247
if not hasattr(self, '_transaction'):
248
self._transaction = None
249
return self._transaction
253
f.add_lines('r0', [], ['a\n', 'b\n'])
254
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
256
versions = f.versions()
257
self.assertTrue('r0' in versions)
258
self.assertTrue('r1' in versions)
259
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
260
self.assertEquals(f.get_text('r0'), 'a\nb\n')
261
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
262
self.assertEqual(2, len(f))
263
self.assertEqual(2, f.num_versions())
265
self.assertRaises(RevisionNotPresent,
266
f.add_lines, 'r2', ['foo'], [])
267
self.assertRaises(RevisionAlreadyPresent,
268
f.add_lines, 'r1', [], [])
270
# this checks that reopen with create=True does not break anything.
271
f = self.reopen_file(create=True)
274
def test_adds_with_parent_texts(self):
277
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
279
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
280
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
281
except NotImplementedError:
282
# if the format doesn't support ghosts, just add normally.
283
_, _, parent_texts['r1'] = f.add_lines('r1',
284
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
285
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
286
self.assertNotEqual(None, parent_texts['r0'])
287
self.assertNotEqual(None, parent_texts['r1'])
289
versions = f.versions()
290
self.assertTrue('r0' in versions)
291
self.assertTrue('r1' in versions)
292
self.assertTrue('r2' in versions)
293
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
294
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
295
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
296
self.assertEqual(3, f.num_versions())
297
origins = f.annotate('r1')
298
self.assertEquals(origins[0][0], 'r0')
299
self.assertEquals(origins[1][0], 'r1')
300
origins = f.annotate('r2')
301
self.assertEquals(origins[0][0], 'r1')
302
self.assertEquals(origins[1][0], 'r2')
305
f = self.reopen_file()
308
def test_add_unicode_content(self):
309
# unicode content is not permitted in versioned files.
310
# versioned files version sequences of bytes only.
312
self.assertRaises(errors.BzrBadParameterUnicode,
313
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
315
(errors.BzrBadParameterUnicode, NotImplementedError),
316
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
318
def test_add_follows_left_matching_blocks(self):
319
"""If we change left_matching_blocks, delta changes
321
Note: There are multiple correct deltas in this case, because
322
we start with 1 "a" and we get 3.
325
if isinstance(vf, WeaveFile):
326
raise TestSkipped("WeaveFile ignores left_matching_blocks")
327
vf.add_lines('1', [], ['a\n'])
328
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
329
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
330
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
331
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
332
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
333
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
335
def test_inline_newline_throws(self):
336
# \r characters are not permitted in lines being added
338
self.assertRaises(errors.BzrBadParameterContainsNewline,
339
vf.add_lines, 'a', [], ['a\n\n'])
341
(errors.BzrBadParameterContainsNewline, NotImplementedError),
342
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
343
# but inline CR's are allowed
344
vf.add_lines('a', [], ['a\r\n'])
346
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
347
except NotImplementedError:
350
def test_add_reserved(self):
352
self.assertRaises(errors.ReservedId,
353
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
355
def test_add_lines_nostoresha(self):
356
"""When nostore_sha is supplied using old content raises."""
358
empty_text = ('a', [])
359
sample_text_nl = ('b', ["foo\n", "bar\n"])
360
sample_text_no_nl = ('c', ["foo\n", "bar"])
362
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
363
sha, _, _ = vf.add_lines(version, [], lines)
365
# we now have a copy of all the lines in the vf.
366
for sha, (version, lines) in zip(
367
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
368
self.assertRaises(errors.ExistingContent,
369
vf.add_lines, version + "2", [], lines,
371
# and no new version should have been added.
372
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
375
def test_add_lines_with_ghosts_nostoresha(self):
376
"""When nostore_sha is supplied using old content raises."""
378
empty_text = ('a', [])
379
sample_text_nl = ('b', ["foo\n", "bar\n"])
380
sample_text_no_nl = ('c', ["foo\n", "bar"])
382
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
383
sha, _, _ = vf.add_lines(version, [], lines)
385
# we now have a copy of all the lines in the vf.
386
# is the test applicable to this vf implementation?
388
vf.add_lines_with_ghosts('d', [], [])
389
except NotImplementedError:
390
raise TestSkipped("add_lines_with_ghosts is optional")
391
for sha, (version, lines) in zip(
392
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
393
self.assertRaises(errors.ExistingContent,
394
vf.add_lines_with_ghosts, version + "2", [], lines,
396
# and no new version should have been added.
397
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
400
def test_add_lines_return_value(self):
401
# add_lines should return the sha1 and the text size.
403
empty_text = ('a', [])
404
sample_text_nl = ('b', ["foo\n", "bar\n"])
405
sample_text_no_nl = ('c', ["foo\n", "bar"])
406
# check results for the three cases:
407
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
408
# the first two elements are the same for all versioned files:
409
# - the digest and the size of the text. For some versioned files
410
# additional data is returned in additional tuple elements.
411
result = vf.add_lines(version, [], lines)
412
self.assertEqual(3, len(result))
413
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
415
# parents should not affect the result:
416
lines = sample_text_nl[1]
417
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
418
vf.add_lines('d', ['b', 'c'], lines)[0:2])
420
def test_get_reserved(self):
422
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
423
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
424
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
426
def test_add_unchanged_last_line_noeol_snapshot(self):
427
"""Add a text with an unchanged last line with no eol should work."""
428
# Test adding this in a number of chain lengths; because the interface
429
# for VersionedFile does not allow forcing a specific chain length, we
430
# just use a small base to get the first snapshot, then a much longer
431
# first line for the next add (which will make the third add snapshot)
432
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
433
# as a capped delta length, but ideally we would have some way of
434
# tuning the test to the store (e.g. keep going until a snapshot
436
for length in range(20):
438
vf = self.get_file('case-%d' % length)
441
for step in range(length):
442
version = prefix % step
443
lines = (['prelude \n'] * step) + ['line']
444
vf.add_lines(version, parents, lines)
445
version_lines[version] = lines
447
vf.add_lines('no-eol', parents, ['line'])
448
vf.get_texts(version_lines.keys())
449
self.assertEqualDiff('line', vf.get_text('no-eol'))
451
def test_get_texts_eol_variation(self):
452
# similar to the failure in <http://bugs.launchpad.net/234748>
454
sample_text_nl = ["line\n"]
455
sample_text_no_nl = ["line"]
462
lines = sample_text_nl
464
lines = sample_text_no_nl
465
# left_matching blocks is an internal api; it operates on the
466
# *internal* representation for a knit, which is with *all* lines
467
# being normalised to end with \n - even the final line in a no_nl
468
# file. Using it here ensures that a broken internal implementation
469
# (which is what this test tests) will generate a correct line
470
# delta (which is to say, an empty delta).
471
vf.add_lines(version, parents, lines,
472
left_matching_blocks=[(0, 0, 1)])
474
versions.append(version)
475
version_lines[version] = lines
477
vf.get_texts(versions)
478
vf.get_texts(reversed(versions))
480
def test_add_lines_with_matching_blocks_noeol_last_line(self):
481
"""Add a text with an unchanged last line with no eol should work."""
482
from bzrlib import multiparent
483
# Hand verified sha1 of the text we're adding.
484
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
485
# Create a mpdiff which adds a new line before the trailing line, and
486
# reuse the last line unaltered (which can cause annotation reuse).
487
# Test adding this in two situations:
488
# On top of a new insertion
489
vf = self.get_file('fulltext')
490
vf.add_lines('noeol', [], ['line'])
491
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
492
left_matching_blocks=[(0, 1, 1)])
493
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
495
vf = self.get_file('delta')
496
vf.add_lines('base', [], ['line'])
497
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
498
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
499
left_matching_blocks=[(1, 1, 1)])
500
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
502
def test_make_mpdiffs(self):
503
from bzrlib import multiparent
504
vf = self.get_file('foo')
505
sha1s = self._setup_for_deltas(vf)
506
new_vf = self.get_file('bar')
507
for version in multiparent.topo_iter(vf):
508
mpdiff = vf.make_mpdiffs([version])[0]
509
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
510
vf.get_sha1s([version])[version], mpdiff)])
511
self.assertEqualDiff(vf.get_text(version),
512
new_vf.get_text(version))
514
def test_make_mpdiffs_with_ghosts(self):
515
vf = self.get_file('foo')
517
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
518
except NotImplementedError:
519
# old Weave formats do not allow ghosts
521
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
523
def _setup_for_deltas(self, f):
524
self.assertFalse(f.has_version('base'))
525
# add texts that should trip the knit maximum delta chain threshold
526
# as well as doing parallel chains of data in knits.
527
# this is done by two chains of 25 insertions
528
f.add_lines('base', [], ['line\n'])
529
f.add_lines('noeol', ['base'], ['line'])
530
# detailed eol tests:
531
# shared last line with parent no-eol
532
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
533
# differing last line with parent, both no-eol
534
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
535
# add eol following a noneol parent, change content
536
f.add_lines('eol', ['noeol'], ['phone\n'])
537
# add eol following a noneol parent, no change content
538
f.add_lines('eolline', ['noeol'], ['line\n'])
539
# noeol with no parents:
540
f.add_lines('noeolbase', [], ['line'])
541
# noeol preceeding its leftmost parent in the output:
542
# this is done by making it a merge of two parents with no common
543
# anestry: noeolbase and noeol with the
544
# later-inserted parent the leftmost.
545
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
546
# two identical eol texts
547
f.add_lines('noeoldup', ['noeol'], ['line'])
549
text_name = 'chain1-'
551
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
552
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
553
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
554
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
555
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
556
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
557
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
558
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
559
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
560
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
561
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
562
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
563
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
564
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
565
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
566
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
567
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
568
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
569
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
570
19:'1ebed371807ba5935958ad0884595126e8c4e823',
571
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
572
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
573
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
574
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
575
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
576
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
578
for depth in range(26):
579
new_version = text_name + '%s' % depth
580
text = text + ['line\n']
581
f.add_lines(new_version, [next_parent], text)
582
next_parent = new_version
584
text_name = 'chain2-'
586
for depth in range(26):
587
new_version = text_name + '%s' % depth
588
text = text + ['line\n']
589
f.add_lines(new_version, [next_parent], text)
590
next_parent = new_version
593
def test_ancestry(self):
595
self.assertEqual([], f.get_ancestry([]))
596
f.add_lines('r0', [], ['a\n', 'b\n'])
597
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
598
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
599
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
600
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
601
self.assertEqual([], f.get_ancestry([]))
602
versions = f.get_ancestry(['rM'])
603
# there are some possibilities:
607
# so we check indexes
608
r0 = versions.index('r0')
609
r1 = versions.index('r1')
610
r2 = versions.index('r2')
611
self.assertFalse('r3' in versions)
612
rM = versions.index('rM')
613
self.assertTrue(r0 < r1)
614
self.assertTrue(r0 < r2)
615
self.assertTrue(r1 < rM)
616
self.assertTrue(r2 < rM)
618
self.assertRaises(RevisionNotPresent,
619
f.get_ancestry, ['rM', 'rX'])
621
self.assertEqual(set(f.get_ancestry('rM')),
622
set(f.get_ancestry('rM', topo_sorted=False)))
624
def test_mutate_after_finish(self):
625
self._transaction = 'before'
627
self._transaction = 'after'
628
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
629
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
631
def test_copy_to(self):
633
f.add_lines('0', [], ['a\n'])
634
t = MemoryTransport()
636
for suffix in self.get_factory().get_suffixes():
637
self.assertTrue(t.has('foo' + suffix))
639
def test_get_suffixes(self):
641
# and should be a list
642
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
644
def test_get_parent_map(self):
646
f.add_lines('r0', [], ['a\n', 'b\n'])
648
{'r0':()}, f.get_parent_map(['r0']))
649
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
651
{'r1':('r0',)}, f.get_parent_map(['r1']))
655
f.get_parent_map(['r0', 'r1']))
656
f.add_lines('r2', [], ['a\n', 'b\n'])
657
f.add_lines('r3', [], ['a\n', 'b\n'])
658
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
660
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
661
self.assertEqual({}, f.get_parent_map('y'))
665
f.get_parent_map(['r0', 'y', 'r1']))
667
def test_annotate(self):
669
f.add_lines('r0', [], ['a\n', 'b\n'])
670
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
671
origins = f.annotate('r1')
672
self.assertEquals(origins[0][0], 'r1')
673
self.assertEquals(origins[1][0], 'r0')
675
self.assertRaises(RevisionNotPresent,
678
def test_detection(self):
679
# Test weaves detect corruption.
681
# Weaves contain a checksum of their texts.
682
# When a text is extracted, this checksum should be
685
w = self.get_file_corrupted_text()
687
self.assertEqual('hello\n', w.get_text('v1'))
688
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
689
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
690
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
692
w = self.get_file_corrupted_checksum()
694
self.assertEqual('hello\n', w.get_text('v1'))
695
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
696
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
697
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
699
def get_file_corrupted_text(self):
700
"""Return a versioned file with corrupt text but valid metadata."""
701
raise NotImplementedError(self.get_file_corrupted_text)
703
def reopen_file(self, name='foo'):
704
"""Open the versioned file from disk again."""
705
raise NotImplementedError(self.reopen_file)
707
def test_iter_lines_added_or_present_in_versions(self):
708
# test that we get at least an equalset of the lines added by
709
# versions in the weave
710
# the ordering here is to make a tree so that dumb searches have
711
# more changes to muck up.
713
class InstrumentedProgress(progress.DummyProgress):
717
progress.DummyProgress.__init__(self)
720
def update(self, msg=None, current=None, total=None):
721
self.updates.append((msg, current, total))
724
# add a base to get included
725
vf.add_lines('base', [], ['base\n'])
726
# add a ancestor to be included on one side
727
vf.add_lines('lancestor', [], ['lancestor\n'])
728
# add a ancestor to be included on the other side
729
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
730
# add a child of rancestor with no eofile-nl
731
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
732
# add a child of lancestor and base to join the two roots
733
vf.add_lines('otherchild',
734
['lancestor', 'base'],
735
['base\n', 'lancestor\n', 'otherchild\n'])
736
def iter_with_versions(versions, expected):
737
# now we need to see what lines are returned, and how often.
739
progress = InstrumentedProgress()
740
# iterate over the lines
741
for line in vf.iter_lines_added_or_present_in_versions(versions,
743
lines.setdefault(line, 0)
745
if []!= progress.updates:
746
self.assertEqual(expected, progress.updates)
748
lines = iter_with_versions(['child', 'otherchild'],
749
[('Walking content.', 0, 2),
750
('Walking content.', 1, 2),
751
('Walking content.', 2, 2)])
752
# we must see child and otherchild
753
self.assertTrue(lines[('child\n', 'child')] > 0)
754
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
755
# we dont care if we got more than that.
758
lines = iter_with_versions(None, [('Walking content.', 0, 5),
759
('Walking content.', 1, 5),
760
('Walking content.', 2, 5),
761
('Walking content.', 3, 5),
762
('Walking content.', 4, 5),
763
('Walking content.', 5, 5)])
764
# all lines must be seen at least once
765
self.assertTrue(lines[('base\n', 'base')] > 0)
766
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
767
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
768
self.assertTrue(lines[('child\n', 'child')] > 0)
769
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
771
def test_add_lines_with_ghosts(self):
772
# some versioned file formats allow lines to be added with parent
773
# information that is > than that in the format. Formats that do
774
# not support this need to raise NotImplementedError on the
775
# add_lines_with_ghosts api.
777
# add a revision with ghost parents
778
# The preferred form is utf8, but we should translate when needed
779
parent_id_unicode = u'b\xbfse'
780
parent_id_utf8 = parent_id_unicode.encode('utf8')
782
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
783
except NotImplementedError:
784
# check the other ghost apis are also not implemented
785
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
786
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
788
vf = self.reopen_file()
789
# test key graph related apis: getncestry, _graph, get_parents
791
# - these are ghost unaware and must not be reflect ghosts
792
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
793
self.assertFalse(vf.has_version(parent_id_utf8))
794
# we have _with_ghost apis to give us ghost information.
795
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
796
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
797
# if we add something that is a ghost of another, it should correct the
798
# results of the prior apis
799
vf.add_lines(parent_id_utf8, [], [])
800
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
801
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
802
vf.get_parent_map(['notbxbfse']))
803
self.assertTrue(vf.has_version(parent_id_utf8))
804
# we have _with_ghost apis to give us ghost information.
805
self.assertEqual([parent_id_utf8, 'notbxbfse'],
806
vf.get_ancestry_with_ghosts(['notbxbfse']))
807
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
809
def test_add_lines_with_ghosts_after_normal_revs(self):
810
# some versioned file formats allow lines to be added with parent
811
# information that is > than that in the format. Formats that do
812
# not support this need to raise NotImplementedError on the
813
# add_lines_with_ghosts api.
815
# probe for ghost support
817
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
818
except NotImplementedError:
820
vf.add_lines_with_ghosts('references_ghost',
822
['line\n', 'line_b\n', 'line_c\n'])
823
origins = vf.annotate('references_ghost')
824
self.assertEquals(('base', 'line\n'), origins[0])
825
self.assertEquals(('base', 'line_b\n'), origins[1])
826
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
828
def test_readonly_mode(self):
829
transport = get_transport(self.get_url('.'))
830
factory = self.get_factory()
831
vf = factory('id', transport, 0777, create=True, access_mode='w')
832
vf = factory('id', transport, access_mode='r')
833
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
834
self.assertRaises(errors.ReadOnlyError,
835
vf.add_lines_with_ghosts,
840
def test_get_sha1s(self):
841
# check the sha1 data is available
844
vf.add_lines('a', [], ['a\n'])
845
# the same file, different metadata
846
vf.add_lines('b', ['a'], ['a\n'])
847
# a file differing only in last newline.
848
vf.add_lines('c', [], ['a'])
850
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
851
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
852
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
854
vf.get_sha1s(['a', 'c', 'b']))
857
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
859
def get_file(self, name='foo'):
860
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
861
get_scope=self.get_transaction)
863
def get_file_corrupted_text(self):
864
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
865
get_scope=self.get_transaction)
866
w.add_lines('v1', [], ['hello\n'])
867
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
869
# We are going to invasively corrupt the text
870
# Make sure the internals of weave are the same
871
self.assertEqual([('{', 0)
879
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
880
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
885
w._weave[4] = 'There\n'
888
def get_file_corrupted_checksum(self):
889
w = self.get_file_corrupted_text()
891
w._weave[4] = 'there\n'
892
self.assertEqual('hello\nthere\n', w.get_text('v2'))
894
#Invalid checksum, first digit changed
895
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
898
def reopen_file(self, name='foo', create=False):
899
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
900
get_scope=self.get_transaction)
902
def test_no_implicit_create(self):
903
self.assertRaises(errors.NoSuchFile,
906
get_transport(self.get_url('.')),
907
get_scope=self.get_transaction)
909
def get_factory(self):
913
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
916
TestCaseWithMemoryTransport.setUp(self)
917
mapper = PrefixMapper()
918
factory = make_file_factory(True, mapper)
919
self.vf1 = factory(self.get_transport('root-1'))
920
self.vf2 = factory(self.get_transport('root-2'))
921
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
922
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
924
def test_add_lines(self):
925
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
926
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
927
('root', 'a'), [], [])
928
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
929
('root', 'a:'), None, [])
930
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
931
('root', 'a:'), [], None)
933
def setup_abcde(self):
934
self.vf1.add_lines(('root', 'A'), [], ['a'])
935
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
936
self.vf2.add_lines(('root', 'C'), [], ['c'])
937
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
938
self.plan_merge_vf.add_lines(('root', 'E:'),
939
[('root', 'B'), ('root', 'D')], ['e'])
941
def test_get_parents(self):
943
self.assertEqual({('root', 'B'):(('root', 'A'),)},
944
self.plan_merge_vf.get_parent_map([('root', 'B')]))
945
self.assertEqual({('root', 'D'):(('root', 'C'),)},
946
self.plan_merge_vf.get_parent_map([('root', 'D')]))
947
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
948
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
950
self.plan_merge_vf.get_parent_map([('root', 'F')]))
952
('root', 'B'):(('root', 'A'),),
953
('root', 'D'):(('root', 'C'),),
954
('root', 'E:'):(('root', 'B'),('root', 'D')),
956
self.plan_merge_vf.get_parent_map(
957
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
959
def test_get_record_stream(self):
961
def get_record(suffix):
962
return self.plan_merge_vf.get_record_stream(
963
[('root', suffix)], 'unordered', True).next()
964
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
965
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
966
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
967
self.assertEqual('absent', get_record('F').storage_kind)
970
class TestReadonlyHttpMixin(object):
972
def get_transaction(self):
975
def test_readonly_http_works(self):
976
# we should be able to read from http with a versioned file.
978
# try an empty file access
979
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
980
self.assertEqual([], readonly_vf.versions())
982
vf.add_lines('1', [], ['a\n'])
983
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
984
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
985
self.assertEqual(['1', '2'], vf.versions())
986
for version in readonly_vf.versions():
987
readonly_vf.get_lines(version)
990
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
993
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
994
get_scope=self.get_transaction)
996
def get_factory(self):
1000
class MergeCasesMixin(object):
1002
def doMerge(self, base, a, b, mp):
1003
from cStringIO import StringIO
1004
from textwrap import dedent
1010
w.add_lines('text0', [], map(addcrlf, base))
1011
w.add_lines('text1', ['text0'], map(addcrlf, a))
1012
w.add_lines('text2', ['text0'], map(addcrlf, b))
1014
self.log_contents(w)
1016
self.log('merge plan:')
1017
p = list(w.plan_merge('text1', 'text2'))
1018
for state, line in p:
1020
self.log('%12s | %s' % (state, line[:-1]))
1024
mt.writelines(w.weave_merge(p))
1026
self.log(mt.getvalue())
1028
mp = map(addcrlf, mp)
1029
self.assertEqual(mt.readlines(), mp)
1032
def testOneInsert(self):
1038
def testSeparateInserts(self):
1039
self.doMerge(['aaa', 'bbb', 'ccc'],
1040
['aaa', 'xxx', 'bbb', 'ccc'],
1041
['aaa', 'bbb', 'yyy', 'ccc'],
1042
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1044
def testSameInsert(self):
1045
self.doMerge(['aaa', 'bbb', 'ccc'],
1046
['aaa', 'xxx', 'bbb', 'ccc'],
1047
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1048
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1049
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1050
def testOverlappedInsert(self):
1051
self.doMerge(['aaa', 'bbb'],
1052
['aaa', 'xxx', 'yyy', 'bbb'],
1053
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1055
# really it ought to reduce this to
1056
# ['aaa', 'xxx', 'yyy', 'bbb']
1059
def testClashReplace(self):
1060
self.doMerge(['aaa'],
1063
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1066
def testNonClashInsert1(self):
1067
self.doMerge(['aaa'],
1070
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1073
def testNonClashInsert2(self):
1074
self.doMerge(['aaa'],
1080
def testDeleteAndModify(self):
1081
"""Clashing delete and modification.
1083
If one side modifies a region and the other deletes it then
1084
there should be a conflict with one side blank.
1087
#######################################
1088
# skippd, not working yet
1091
self.doMerge(['aaa', 'bbb', 'ccc'],
1092
['aaa', 'ddd', 'ccc'],
1094
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1096
def _test_merge_from_strings(self, base, a, b, expected):
1098
w.add_lines('text0', [], base.splitlines(True))
1099
w.add_lines('text1', ['text0'], a.splitlines(True))
1100
w.add_lines('text2', ['text0'], b.splitlines(True))
1101
self.log('merge plan:')
1102
p = list(w.plan_merge('text1', 'text2'))
1103
for state, line in p:
1105
self.log('%12s | %s' % (state, line[:-1]))
1106
self.log('merge result:')
1107
result_text = ''.join(w.weave_merge(p))
1108
self.log(result_text)
1109
self.assertEqualDiff(result_text, expected)
1111
def test_weave_merge_conflicts(self):
1112
# does weave merge properly handle plans that end with unchanged?
1113
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1114
self.assertEqual(result, 'hello\n')
1116
def test_deletion_extended(self):
1117
"""One side deletes, the other deletes more.
1134
self._test_merge_from_strings(base, a, b, result)
1136
def test_deletion_overlap(self):
1137
"""Delete overlapping regions with no other conflict.
1139
Arguably it'd be better to treat these as agreement, rather than
1140
conflict, but for now conflict is safer.
1168
self._test_merge_from_strings(base, a, b, result)
1170
def test_agreement_deletion(self):
1171
"""Agree to delete some lines, without conflicts."""
1193
self._test_merge_from_strings(base, a, b, result)
1195
def test_sync_on_deletion(self):
1196
"""Specific case of merge where we can synchronize incorrectly.
1198
A previous version of the weave merge concluded that the two versions
1199
agreed on deleting line 2, and this could be a synchronization point.
1200
Line 1 was then considered in isolation, and thought to be deleted on
1203
It's better to consider the whole thing as a disagreement region.
1214
a's replacement line 2
1227
a's replacement line 2
1234
self._test_merge_from_strings(base, a, b, result)
1237
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1239
def get_file(self, name='foo'):
1240
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1242
def log_contents(self, w):
1243
self.log('weave is:')
1245
write_weave(w, tmpf)
1246
self.log(tmpf.getvalue())
1248
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1249
'xxx', '>>>>>>> ', 'bbb']
1252
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1254
def test_select_adaptor(self):
1255
"""Test expected adapters exist."""
1256
# One scenario for each lookup combination we expect to use.
1257
# Each is source_kind, requested_kind, adapter class
1259
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1260
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1261
('knit-annotated-delta-gz', 'knit-delta-gz',
1262
_mod_knit.DeltaAnnotatedToUnannotated),
1263
('knit-annotated-delta-gz', 'fulltext',
1264
_mod_knit.DeltaAnnotatedToFullText),
1265
('knit-annotated-ft-gz', 'knit-ft-gz',
1266
_mod_knit.FTAnnotatedToUnannotated),
1267
('knit-annotated-ft-gz', 'fulltext',
1268
_mod_knit.FTAnnotatedToFullText),
1270
for source, requested, klass in scenarios:
1271
adapter_factory = versionedfile.adapter_registry.get(
1272
(source, requested))
1273
adapter = adapter_factory(None)
1274
self.assertIsInstance(adapter, klass)
1276
def get_knit(self, annotated=True):
1277
mapper = ConstantMapper('knit')
1278
transport = self.get_transport()
1279
return make_file_factory(annotated, mapper)(transport)
1281
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1282
"""Grab the interested adapted texts for tests."""
1283
# origin is a fulltext
1284
entries = f.get_record_stream([('origin',)], 'unordered', False)
1285
base = entries.next()
1286
ft_data = ft_adapter.get_bytes(base)
1287
# merged is both a delta and multiple parents.
1288
entries = f.get_record_stream([('merged',)], 'unordered', False)
1289
merged = entries.next()
1290
delta_data = delta_adapter.get_bytes(merged)
1291
return ft_data, delta_data
1293
def test_deannotation_noeol(self):
1294
"""Test converting annotated knits to unannotated knits."""
1295
# we need a full text, and a delta
1297
get_diamond_files(f, 1, trailing_eol=False)
1298
ft_data, delta_data = self.helpGetBytes(f,
1299
_mod_knit.FTAnnotatedToUnannotated(None),
1300
_mod_knit.DeltaAnnotatedToUnannotated(None))
1302
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1305
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1307
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1308
'1,2,3\nleft\nright\nmerged\nend merged\n',
1309
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1311
def test_deannotation(self):
1312
"""Test converting annotated knits to unannotated knits."""
1313
# we need a full text, and a delta
1315
get_diamond_files(f, 1)
1316
ft_data, delta_data = self.helpGetBytes(f,
1317
_mod_knit.FTAnnotatedToUnannotated(None),
1318
_mod_knit.DeltaAnnotatedToUnannotated(None))
1320
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1323
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1325
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1326
'2,2,2\nright\nmerged\nend merged\n',
1327
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1329
def test_annotated_to_fulltext_no_eol(self):
1330
"""Test adapting annotated knits to full texts (for -> weaves)."""
1331
# we need a full text, and a delta
1333
get_diamond_files(f, 1, trailing_eol=False)
1334
# Reconstructing a full text requires a backing versioned file, and it
1335
# must have the base lines requested from it.
1336
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1337
ft_data, delta_data = self.helpGetBytes(f,
1338
_mod_knit.FTAnnotatedToFullText(None),
1339
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1340
self.assertEqual('origin', ft_data)
1341
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1342
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1343
True)], logged_vf.calls)
1345
def test_annotated_to_fulltext(self):
1346
"""Test adapting annotated knits to full texts (for -> weaves)."""
1347
# we need a full text, and a delta
1349
get_diamond_files(f, 1)
1350
# Reconstructing a full text requires a backing versioned file, and it
1351
# must have the base lines requested from it.
1352
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1353
ft_data, delta_data = self.helpGetBytes(f,
1354
_mod_knit.FTAnnotatedToFullText(None),
1355
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1356
self.assertEqual('origin\n', ft_data)
1357
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1358
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1359
True)], logged_vf.calls)
1361
def test_unannotated_to_fulltext(self):
1362
"""Test adapting unannotated knits to full texts.
1364
This is used for -> weaves, and for -> annotated knits.
1366
# we need a full text, and a delta
1367
f = self.get_knit(annotated=False)
1368
get_diamond_files(f, 1)
1369
# Reconstructing a full text requires a backing versioned file, and it
1370
# must have the base lines requested from it.
1371
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1372
ft_data, delta_data = self.helpGetBytes(f,
1373
_mod_knit.FTPlainToFullText(None),
1374
_mod_knit.DeltaPlainToFullText(logged_vf))
1375
self.assertEqual('origin\n', ft_data)
1376
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1377
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1378
True)], logged_vf.calls)
1380
def test_unannotated_to_fulltext_no_eol(self):
1381
"""Test adapting unannotated knits to full texts.
1383
This is used for -> weaves, and for -> annotated knits.
1385
# we need a full text, and a delta
1386
f = self.get_knit(annotated=False)
1387
get_diamond_files(f, 1, trailing_eol=False)
1388
# Reconstructing a full text requires a backing versioned file, and it
1389
# must have the base lines requested from it.
1390
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1391
ft_data, delta_data = self.helpGetBytes(f,
1392
_mod_knit.FTPlainToFullText(None),
1393
_mod_knit.DeltaPlainToFullText(logged_vf))
1394
self.assertEqual('origin', ft_data)
1395
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1396
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1397
True)], logged_vf.calls)
1400
class TestKeyMapper(TestCaseWithMemoryTransport):
1401
"""Tests for various key mapping logic."""
1403
def test_identity_mapper(self):
1404
mapper = versionedfile.ConstantMapper("inventory")
1405
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1406
self.assertEqual("inventory", mapper.map(('quux',)))
1408
def test_prefix_mapper(self):
1410
mapper = versionedfile.PrefixMapper()
1411
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1412
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1413
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1414
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1416
def test_hash_prefix_mapper(self):
1417
#format6: hash + plain
1418
mapper = versionedfile.HashPrefixMapper()
1419
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1420
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1421
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1422
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1424
def test_hash_escaped_mapper(self):
1425
#knit1: hash + escaped
1426
mapper = versionedfile.HashEscapedPrefixMapper()
1427
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1428
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1430
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1432
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1433
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1436
class TestVersionedFiles(TestCaseWithMemoryTransport):
1437
"""Tests for the multiple-file variant of VersionedFile."""
1439
def get_versionedfiles(self, relpath='files'):
1440
transport = self.get_transport(relpath)
1442
transport.mkdir('.')
1443
files = self.factory(transport)
1444
if self.cleanup is not None:
1445
self.addCleanup(lambda:self.cleanup(files))
1448
def test_annotate(self):
1449
files = self.get_versionedfiles()
1450
self.get_diamond_files(files)
1451
if self.key_length == 1:
1455
# introduced full text
1456
origins = files.annotate(prefix + ('origin',))
1458
(prefix + ('origin',), 'origin\n')],
1461
origins = files.annotate(prefix + ('base',))
1463
(prefix + ('base',), 'base\n')],
1466
origins = files.annotate(prefix + ('merged',))
1469
(prefix + ('base',), 'base\n'),
1470
(prefix + ('left',), 'left\n'),
1471
(prefix + ('right',), 'right\n'),
1472
(prefix + ('merged',), 'merged\n')
1476
# Without a graph everything is new.
1478
(prefix + ('merged',), 'base\n'),
1479
(prefix + ('merged',), 'left\n'),
1480
(prefix + ('merged',), 'right\n'),
1481
(prefix + ('merged',), 'merged\n')
1484
self.assertRaises(RevisionNotPresent,
1485
files.annotate, prefix + ('missing-key',))
1487
def test_construct(self):
1488
"""Each parameterised test can be constructed on a transport."""
1489
files = self.get_versionedfiles()
1491
def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1492
return get_diamond_files(files, self.key_length,
1493
trailing_eol=trailing_eol, nograph=not self.graph,
1494
left_only=left_only)
1496
def test_add_lines_return(self):
1497
files = self.get_versionedfiles()
1498
# save code by using the stock data insertion helper.
1499
adds = self.get_diamond_files(files)
1501
# We can only validate the first 2 elements returned from add_lines.
1503
self.assertEqual(3, len(add))
1504
results.append(add[:2])
1505
if self.key_length == 1:
1507
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1508
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1509
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1510
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1511
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1513
elif self.key_length == 2:
1515
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1516
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1517
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1518
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1519
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1520
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1521
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1522
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1523
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1524
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1527
def test_empty_lines(self):
1528
"""Empty files can be stored."""
1529
f = self.get_versionedfiles()
1530
key_a = self.get_simple_key('a')
1531
f.add_lines(key_a, [], [])
1532
self.assertEqual('',
1533
f.get_record_stream([key_a], 'unordered', True
1534
).next().get_bytes_as('fulltext'))
1535
key_b = self.get_simple_key('b')
1536
f.add_lines(key_b, self.get_parents([key_a]), [])
1537
self.assertEqual('',
1538
f.get_record_stream([key_b], 'unordered', True
1539
).next().get_bytes_as('fulltext'))
1541
def test_newline_only(self):
1542
f = self.get_versionedfiles()
1543
key_a = self.get_simple_key('a')
1544
f.add_lines(key_a, [], ['\n'])
1545
self.assertEqual('\n',
1546
f.get_record_stream([key_a], 'unordered', True
1547
).next().get_bytes_as('fulltext'))
1548
key_b = self.get_simple_key('b')
1549
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1550
self.assertEqual('\n',
1551
f.get_record_stream([key_b], 'unordered', True
1552
).next().get_bytes_as('fulltext'))
1554
def test_get_record_stream_empty(self):
1555
"""An empty stream can be requested without error."""
1556
f = self.get_versionedfiles()
1557
entries = f.get_record_stream([], 'unordered', False)
1558
self.assertEqual([], list(entries))
1560
def assertValidStorageKind(self, storage_kind):
1561
"""Assert that storage_kind is a valid storage_kind."""
1562
self.assertSubset([storage_kind],
1563
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1564
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1565
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1567
'knit-delta-closure', 'knit-delta-closure-ref'])
1569
def capture_stream(self, f, entries, on_seen, parents):
1570
"""Capture a stream for testing."""
1571
for factory in entries:
1572
on_seen(factory.key)
1573
self.assertValidStorageKind(factory.storage_kind)
1574
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1576
self.assertEqual(parents[factory.key], factory.parents)
1577
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1580
def test_get_record_stream_interface(self):
1581
"""each item in a stream has to provide a regular interface."""
1582
files = self.get_versionedfiles()
1583
self.get_diamond_files(files)
1584
keys, _ = self.get_keys_and_sort_order()
1585
parent_map = files.get_parent_map(keys)
1586
entries = files.get_record_stream(keys, 'unordered', False)
1588
self.capture_stream(files, entries, seen.add, parent_map)
1589
self.assertEqual(set(keys), seen)
1591
def get_simple_key(self, suffix):
1592
"""Return a key for the object under test."""
1593
if self.key_length == 1:
1596
return ('FileA',) + (suffix,)
1598
def get_keys_and_sort_order(self):
1599
"""Get diamond test keys list, and their sort ordering."""
1600
if self.key_length == 1:
1601
keys = [('merged',), ('left',), ('right',), ('base',)]
1602
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1605
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1607
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1611
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1612
('FileA', 'base'):0,
1613
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1614
('FileB', 'base'):0,
1616
return keys, sort_order
1618
def get_keys_and_groupcompress_sort_order(self):
1619
"""Get diamond test keys list, and their groupcompress sort ordering."""
1620
if self.key_length == 1:
1621
keys = [('merged',), ('left',), ('right',), ('base',)]
1622
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1625
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1627
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1631
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1632
('FileA', 'base'):2,
1633
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1634
('FileB', 'base'):5,
1636
return keys, sort_order
1638
def test_get_record_stream_interface_ordered(self):
1639
"""each item in a stream has to provide a regular interface."""
1640
files = self.get_versionedfiles()
1641
self.get_diamond_files(files)
1642
keys, sort_order = self.get_keys_and_sort_order()
1643
parent_map = files.get_parent_map(keys)
1644
entries = files.get_record_stream(keys, 'topological', False)
1646
self.capture_stream(files, entries, seen.append, parent_map)
1647
self.assertStreamOrder(sort_order, seen, keys)
1649
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1650
"""each item must be accessible as a fulltext."""
1651
files = self.get_versionedfiles()
1652
self.get_diamond_files(files)
1653
keys, sort_order = self.get_keys_and_sort_order()
1654
parent_map = files.get_parent_map(keys)
1655
entries = files.get_record_stream(keys, 'topological', True)
1657
for factory in entries:
1658
seen.append(factory.key)
1659
self.assertValidStorageKind(factory.storage_kind)
1660
self.assertSubset([factory.sha1],
1661
[None, files.get_sha1s([factory.key])[factory.key]])
1662
self.assertEqual(parent_map[factory.key], factory.parents)
1663
# self.assertEqual(files.get_text(factory.key),
1664
ft_bytes = factory.get_bytes_as('fulltext')
1665
self.assertIsInstance(ft_bytes, str)
1666
chunked_bytes = factory.get_bytes_as('chunked')
1667
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1669
self.assertStreamOrder(sort_order, seen, keys)
1671
def test_get_record_stream_interface_groupcompress(self):
1672
"""each item in a stream has to provide a regular interface."""
1673
files = self.get_versionedfiles()
1674
self.get_diamond_files(files)
1675
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1676
parent_map = files.get_parent_map(keys)
1677
entries = files.get_record_stream(keys, 'groupcompress', False)
1679
self.capture_stream(files, entries, seen.append, parent_map)
1680
self.assertStreamOrder(sort_order, seen, keys)
1682
def assertStreamOrder(self, sort_order, seen, keys):
1683
self.assertEqual(len(set(seen)), len(keys))
1684
if self.key_length == 1:
1687
lows = {('FileA',):0, ('FileB',):0}
1689
self.assertEqual(set(keys), set(seen))
1692
sort_pos = sort_order[key]
1693
self.assertTrue(sort_pos >= lows[key[:-1]],
1694
"Out of order in sorted stream: %r, %r" % (key, seen))
1695
lows[key[:-1]] = sort_pos
1697
def test_get_record_stream_unknown_storage_kind_raises(self):
1698
"""Asking for a storage kind that the stream cannot supply raises."""
1699
files = self.get_versionedfiles()
1700
self.get_diamond_files(files)
1701
if self.key_length == 1:
1702
keys = [('merged',), ('left',), ('right',), ('base',)]
1705
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1707
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1710
parent_map = files.get_parent_map(keys)
1711
entries = files.get_record_stream(keys, 'unordered', False)
1712
# We track the contents because we should be able to try, fail a
1713
# particular kind and then ask for one that works and continue.
1715
for factory in entries:
1716
seen.add(factory.key)
1717
self.assertValidStorageKind(factory.storage_kind)
1718
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1720
self.assertEqual(parent_map[factory.key], factory.parents)
1721
# currently no stream emits mpdiff
1722
self.assertRaises(errors.UnavailableRepresentation,
1723
factory.get_bytes_as, 'mpdiff')
1724
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1726
self.assertEqual(set(keys), seen)
1728
def test_get_record_stream_missing_records_are_absent(self):
1729
files = self.get_versionedfiles()
1730
self.get_diamond_files(files)
1731
if self.key_length == 1:
1732
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1735
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1736
('FileA', 'absent'), ('FileA', 'base'),
1737
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1738
('FileB', 'absent'), ('FileB', 'base'),
1739
('absent', 'absent'),
1741
parent_map = files.get_parent_map(keys)
1742
entries = files.get_record_stream(keys, 'unordered', False)
1743
self.assertAbsentRecord(files, keys, parent_map, entries)
1744
entries = files.get_record_stream(keys, 'topological', False)
1745
self.assertAbsentRecord(files, keys, parent_map, entries)
1747
def assertRecordHasContent(self, record, bytes):
1748
"""Assert that record has the bytes bytes."""
1749
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1750
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1752
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1753
files = self.get_versionedfiles()
1754
key = self.get_simple_key('foo')
1755
files.add_lines(key, (), ['my text\n', 'content'])
1756
stream = files.get_record_stream([key], 'unordered', False)
1757
record = stream.next()
1758
if record.storage_kind in ('chunked', 'fulltext'):
1759
# chunked and fulltext representations are for direct use not wire
1760
# serialisation: check they are able to be used directly. To send
1761
# such records over the wire translation will be needed.
1762
self.assertRecordHasContent(record, "my text\ncontent")
1764
bytes = [record.get_bytes_as(record.storage_kind)]
1765
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1766
source_record = record
1768
for record in network_stream:
1769
records.append(record)
1770
self.assertEqual(source_record.storage_kind,
1771
record.storage_kind)
1772
self.assertEqual(source_record.parents, record.parents)
1774
source_record.get_bytes_as(source_record.storage_kind),
1775
record.get_bytes_as(record.storage_kind))
1776
self.assertEqual(1, len(records))
1778
def assertStreamMetaEqual(self, records, expected, stream):
1779
"""Assert that streams expected and stream have the same records.
1781
:param records: A list to collect the seen records.
1782
:return: A generator of the records in stream.
1784
# We make assertions during copying to catch things early for
1786
for record, ref_record in izip(stream, expected):
1787
records.append(record)
1788
self.assertEqual(ref_record.key, record.key)
1789
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1790
self.assertEqual(ref_record.parents, record.parents)
1793
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1795
"""Convert a stream to a bytes iterator.
1797
:param skipped_records: A list with one element to increment when a
1799
:param full_texts: A dict from key->fulltext representation, for
1800
checking chunked or fulltext stored records.
1801
:param stream: A record_stream.
1802
:return: An iterator over the bytes of each record.
1804
for record in stream:
1805
if record.storage_kind in ('chunked', 'fulltext'):
1806
skipped_records[0] += 1
1807
# check the content is correct for direct use.
1808
self.assertRecordHasContent(record, full_texts[record.key])
1810
yield record.get_bytes_as(record.storage_kind)
1812
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1813
files = self.get_versionedfiles()
1814
target_files = self.get_versionedfiles('target')
1815
key = self.get_simple_key('ft')
1816
key_delta = self.get_simple_key('delta')
1817
files.add_lines(key, (), ['my text\n', 'content'])
1819
delta_parents = (key,)
1822
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1823
local = files.get_record_stream([key, key_delta], 'unordered', False)
1824
ref = files.get_record_stream([key, key_delta], 'unordered', False)
1825
skipped_records = [0]
1827
key: "my text\ncontent",
1828
key_delta: "different\ncontent\n",
1830
byte_stream = self.stream_to_bytes_or_skip_counter(
1831
skipped_records, full_texts, local)
1832
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1834
# insert the stream from the network into a versioned files object so we can
1835
# check the content was carried across correctly without doing delta
1837
target_files.insert_record_stream(
1838
self.assertStreamMetaEqual(records, ref, network_stream))
1839
# No duplicates on the wire thank you!
1840
self.assertEqual(2, len(records) + skipped_records[0])
1842
# if any content was copied it all must have all been.
1843
self.assertIdenticalVersionedFile(files, target_files)
1845
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
1846
# copy a delta over the wire
1847
files = self.get_versionedfiles()
1848
target_files = self.get_versionedfiles('target')
1849
key = self.get_simple_key('ft')
1850
key_delta = self.get_simple_key('delta')
1851
files.add_lines(key, (), ['my text\n', 'content'])
1853
delta_parents = (key,)
1856
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1857
# Copy the basis text across so we can reconstruct the delta during
1858
# insertion into target.
1859
target_files.insert_record_stream(files.get_record_stream([key],
1860
'unordered', False))
1861
local = files.get_record_stream([key_delta], 'unordered', False)
1862
ref = files.get_record_stream([key_delta], 'unordered', False)
1863
skipped_records = [0]
1865
key_delta: "different\ncontent\n",
1867
byte_stream = self.stream_to_bytes_or_skip_counter(
1868
skipped_records, full_texts, local)
1869
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1871
# insert the stream from the network into a versioned files object so we can
1872
# check the content was carried across correctly without doing delta
1873
# inspection during check_stream.
1874
target_files.insert_record_stream(
1875
self.assertStreamMetaEqual(records, ref, network_stream))
1876
# No duplicates on the wire thank you!
1877
self.assertEqual(1, len(records) + skipped_records[0])
1879
# if any content was copied it all must have all been
1880
self.assertIdenticalVersionedFile(files, target_files)
1882
def test_get_record_stream_wire_ready_delta_closure_included(self):
1883
# copy a delta over the wire with the ability to get its full text.
1884
files = self.get_versionedfiles()
1885
key = self.get_simple_key('ft')
1886
key_delta = self.get_simple_key('delta')
1887
files.add_lines(key, (), ['my text\n', 'content'])
1889
delta_parents = (key,)
1892
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1893
local = files.get_record_stream([key_delta], 'unordered', True)
1894
ref = files.get_record_stream([key_delta], 'unordered', True)
1895
skipped_records = [0]
1897
key_delta: "different\ncontent\n",
1899
byte_stream = self.stream_to_bytes_or_skip_counter(
1900
skipped_records, full_texts, local)
1901
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1903
# insert the stream from the network into a versioned files object so we can
1904
# check the content was carried across correctly without doing delta
1905
# inspection during check_stream.
1906
for record in self.assertStreamMetaEqual(records, ref, network_stream):
1907
# we have to be able to get the full text out:
1908
self.assertRecordHasContent(record, full_texts[record.key])
1909
# No duplicates on the wire thank you!
1910
self.assertEqual(1, len(records) + skipped_records[0])
1912
def assertAbsentRecord(self, files, keys, parents, entries):
1913
"""Helper for test_get_record_stream_missing_records_are_absent."""
1915
for factory in entries:
1916
seen.add(factory.key)
1917
if factory.key[-1] == 'absent':
1918
self.assertEqual('absent', factory.storage_kind)
1919
self.assertEqual(None, factory.sha1)
1920
self.assertEqual(None, factory.parents)
1922
self.assertValidStorageKind(factory.storage_kind)
1923
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1925
self.assertEqual(parents[factory.key], factory.parents)
1926
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1928
self.assertEqual(set(keys), seen)
1930
def test_filter_absent_records(self):
1931
"""Requested missing records can be filter trivially."""
1932
files = self.get_versionedfiles()
1933
self.get_diamond_files(files)
1934
keys, _ = self.get_keys_and_sort_order()
1935
parent_map = files.get_parent_map(keys)
1936
# Add an absent record in the middle of the present keys. (We don't ask
1937
# for just absent keys to ensure that content before and after the
1938
# absent keys is still delivered).
1939
present_keys = list(keys)
1940
if self.key_length == 1:
1941
keys.insert(2, ('extra',))
1943
keys.insert(2, ('extra', 'extra'))
1944
entries = files.get_record_stream(keys, 'unordered', False)
1946
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1948
self.assertEqual(set(present_keys), seen)
1950
def get_mapper(self):
1951
"""Get a mapper suitable for the key length of the test interface."""
1952
if self.key_length == 1:
1953
return ConstantMapper('source')
1955
return HashEscapedPrefixMapper()
1957
def get_parents(self, parents):
1958
"""Get parents, taking self.graph into consideration."""
1964
def test_get_parent_map(self):
1965
files = self.get_versionedfiles()
1966
if self.key_length == 1:
1968
(('r0',), self.get_parents(())),
1969
(('r1',), self.get_parents((('r0',),))),
1970
(('r2',), self.get_parents(())),
1971
(('r3',), self.get_parents(())),
1972
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1976
(('FileA', 'r0'), self.get_parents(())),
1977
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1978
(('FileA', 'r2'), self.get_parents(())),
1979
(('FileA', 'r3'), self.get_parents(())),
1980
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
1981
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
1983
for key, parents in parent_details:
1984
files.add_lines(key, parents, [])
1985
# immediately after adding it should be queryable.
1986
self.assertEqual({key:parents}, files.get_parent_map([key]))
1987
# We can ask for an empty set
1988
self.assertEqual({}, files.get_parent_map([]))
1989
# We can ask for many keys
1990
all_parents = dict(parent_details)
1991
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
1992
# Absent keys are just not included in the result.
1993
keys = all_parents.keys()
1994
if self.key_length == 1:
1995
keys.insert(1, ('missing',))
1997
keys.insert(1, ('missing', 'missing'))
1998
# Absent keys are just ignored
1999
self.assertEqual(all_parents, files.get_parent_map(keys))
2001
def test_get_sha1s(self):
2002
files = self.get_versionedfiles()
2003
self.get_diamond_files(files)
2004
if self.key_length == 1:
2005
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2007
# ask for shas from different prefixes.
2009
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2010
('FileA', 'merged'), ('FileB', 'right'),
2013
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2014
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2015
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2016
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2017
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2019
files.get_sha1s(keys))
2021
def test_insert_record_stream_empty(self):
2022
"""Inserting an empty record stream should work."""
2023
files = self.get_versionedfiles()
2024
files.insert_record_stream([])
2026
def assertIdenticalVersionedFile(self, expected, actual):
2027
"""Assert that left and right have the same contents."""
2028
self.assertEqual(set(actual.keys()), set(expected.keys()))
2029
actual_parents = actual.get_parent_map(actual.keys())
2031
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2033
for key, parents in actual_parents.items():
2034
self.assertEqual(None, parents)
2035
for key in actual.keys():
2036
actual_text = actual.get_record_stream(
2037
[key], 'unordered', True).next().get_bytes_as('fulltext')
2038
expected_text = expected.get_record_stream(
2039
[key], 'unordered', True).next().get_bytes_as('fulltext')
2040
self.assertEqual(actual_text, expected_text)
2042
def test_insert_record_stream_fulltexts(self):
2043
"""Any file should accept a stream of fulltexts."""
2044
files = self.get_versionedfiles()
2045
mapper = self.get_mapper()
2046
source_transport = self.get_transport('source')
2047
source_transport.mkdir('.')
2048
# weaves always output fulltexts.
2049
source = make_versioned_files_factory(WeaveFile, mapper)(
2051
self.get_diamond_files(source, trailing_eol=False)
2052
stream = source.get_record_stream(source.keys(), 'topological',
2054
files.insert_record_stream(stream)
2055
self.assertIdenticalVersionedFile(source, files)
2057
def test_insert_record_stream_fulltexts_noeol(self):
2058
"""Any file should accept a stream of fulltexts."""
2059
files = self.get_versionedfiles()
2060
mapper = self.get_mapper()
2061
source_transport = self.get_transport('source')
2062
source_transport.mkdir('.')
2063
# weaves always output fulltexts.
2064
source = make_versioned_files_factory(WeaveFile, mapper)(
2066
self.get_diamond_files(source, trailing_eol=False)
2067
stream = source.get_record_stream(source.keys(), 'topological',
2069
files.insert_record_stream(stream)
2070
self.assertIdenticalVersionedFile(source, files)
2072
def test_insert_record_stream_annotated_knits(self):
2073
"""Any file should accept a stream from plain knits."""
2074
files = self.get_versionedfiles()
2075
mapper = self.get_mapper()
2076
source_transport = self.get_transport('source')
2077
source_transport.mkdir('.')
2078
source = make_file_factory(True, mapper)(source_transport)
2079
self.get_diamond_files(source)
2080
stream = source.get_record_stream(source.keys(), 'topological',
2082
files.insert_record_stream(stream)
2083
self.assertIdenticalVersionedFile(source, files)
2085
def test_insert_record_stream_annotated_knits_noeol(self):
2086
"""Any file should accept a stream from plain knits."""
2087
files = self.get_versionedfiles()
2088
mapper = self.get_mapper()
2089
source_transport = self.get_transport('source')
2090
source_transport.mkdir('.')
2091
source = make_file_factory(True, mapper)(source_transport)
2092
self.get_diamond_files(source, trailing_eol=False)
2093
stream = source.get_record_stream(source.keys(), 'topological',
2095
files.insert_record_stream(stream)
2096
self.assertIdenticalVersionedFile(source, files)
2098
def test_insert_record_stream_plain_knits(self):
2099
"""Any file should accept a stream from plain knits."""
2100
files = self.get_versionedfiles()
2101
mapper = self.get_mapper()
2102
source_transport = self.get_transport('source')
2103
source_transport.mkdir('.')
2104
source = make_file_factory(False, mapper)(source_transport)
2105
self.get_diamond_files(source)
2106
stream = source.get_record_stream(source.keys(), 'topological',
2108
files.insert_record_stream(stream)
2109
self.assertIdenticalVersionedFile(source, files)
2111
def test_insert_record_stream_plain_knits_noeol(self):
2112
"""Any file should accept a stream from plain knits."""
2113
files = self.get_versionedfiles()
2114
mapper = self.get_mapper()
2115
source_transport = self.get_transport('source')
2116
source_transport.mkdir('.')
2117
source = make_file_factory(False, mapper)(source_transport)
2118
self.get_diamond_files(source, trailing_eol=False)
2119
stream = source.get_record_stream(source.keys(), 'topological',
2121
files.insert_record_stream(stream)
2122
self.assertIdenticalVersionedFile(source, files)
2124
def test_insert_record_stream_existing_keys(self):
2125
"""Inserting keys already in a file should not error."""
2126
files = self.get_versionedfiles()
2127
source = self.get_versionedfiles('source')
2128
self.get_diamond_files(source)
2129
# insert some keys into f.
2130
self.get_diamond_files(files, left_only=True)
2131
stream = source.get_record_stream(source.keys(), 'topological',
2133
files.insert_record_stream(stream)
2134
self.assertIdenticalVersionedFile(source, files)
2136
def test_insert_record_stream_missing_keys(self):
2137
"""Inserting a stream with absent keys should raise an error."""
2138
files = self.get_versionedfiles()
2139
source = self.get_versionedfiles('source')
2140
stream = source.get_record_stream([('missing',) * self.key_length],
2141
'topological', False)
2142
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2145
def test_insert_record_stream_out_of_order(self):
2146
"""An out of order stream can either error or work."""
2147
files = self.get_versionedfiles()
2148
source = self.get_versionedfiles('source')
2149
self.get_diamond_files(source)
2150
if self.key_length == 1:
2151
origin_keys = [('origin',)]
2152
end_keys = [('merged',), ('left',)]
2153
start_keys = [('right',), ('base',)]
2155
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2156
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2157
('FileB', 'merged',), ('FileB', 'left',)]
2158
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2159
('FileB', 'right',), ('FileB', 'base',)]
2160
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2161
end_entries = source.get_record_stream(end_keys, 'topological', False)
2162
start_entries = source.get_record_stream(start_keys, 'topological', False)
2163
entries = chain(origin_entries, end_entries, start_entries)
2165
files.insert_record_stream(entries)
2166
except RevisionNotPresent:
2167
# Must not have corrupted the file.
2170
self.assertIdenticalVersionedFile(source, files)
2172
def get_knit_delta_source(self):
2173
"""Get a source that can produce a stream with knit delta records,
2174
regardless of this test's scenario.
2176
mapper = self.get_mapper()
2177
source_transport = self.get_transport('source')
2178
source_transport.mkdir('.')
2179
source = make_file_factory(False, mapper)(source_transport)
2180
get_diamond_files(source, self.key_length, trailing_eol=True,
2181
nograph=False, left_only=False)
2184
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2185
"""Insertion where a needed basis is not included notifies the caller
2186
of the missing basis. In the meantime a record missing its basis is
2189
source = self.get_knit_delta_source()
2190
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2191
entries = source.get_record_stream(keys, 'unordered', False)
2192
files = self.get_versionedfiles()
2193
if self.support_partial_insertion:
2194
self.assertEqual([],
2195
list(files.get_missing_compression_parent_keys()))
2196
files.insert_record_stream(entries)
2197
missing_bases = files.get_missing_compression_parent_keys()
2198
self.assertEqual(set([self.get_simple_key('left')]),
2200
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2203
errors.RevisionNotPresent, files.insert_record_stream, entries)
2206
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2207
"""Insertion where a needed basis is not included notifies the caller
2208
of the missing basis. That basis can be added in a second
2209
insert_record_stream call that does not need to repeat records present
2210
in the previous stream. The record(s) that required that basis are
2211
fully inserted once their basis is no longer missing.
2213
if not self.support_partial_insertion:
2214
raise TestNotApplicable(
2215
'versioned file scenario does not support partial insertion')
2216
source = self.get_knit_delta_source()
2217
entries = source.get_record_stream([self.get_simple_key('origin'),
2218
self.get_simple_key('merged')], 'unordered', False)
2219
files = self.get_versionedfiles()
2220
files.insert_record_stream(entries)
2221
missing_bases = files.get_missing_compression_parent_keys()
2222
self.assertEqual(set([self.get_simple_key('left')]),
2224
# 'merged' is inserted (although a commit of a write group involving
2225
# this versionedfiles would fail).
2226
merged_key = self.get_simple_key('merged')
2228
[merged_key], files.get_parent_map([merged_key]).keys())
2229
# Add the full delta closure of the missing records
2230
missing_entries = source.get_record_stream(
2231
missing_bases, 'unordered', True)
2232
files.insert_record_stream(missing_entries)
2233
# Now 'merged' is fully inserted (and a commit would succeed).
2234
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2236
[merged_key], files.get_parent_map([merged_key]).keys())
2239
def test_iter_lines_added_or_present_in_keys(self):
2240
# test that we get at least an equalset of the lines added by
2241
# versions in the store.
2242
# the ordering here is to make a tree so that dumb searches have
2243
# more changes to muck up.
2245
class InstrumentedProgress(progress.DummyProgress):
2249
progress.DummyProgress.__init__(self)
2252
def update(self, msg=None, current=None, total=None):
2253
self.updates.append((msg, current, total))
2255
files = self.get_versionedfiles()
2256
# add a base to get included
2257
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2258
# add a ancestor to be included on one side
2259
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2260
# add a ancestor to be included on the other side
2261
files.add_lines(self.get_simple_key('rancestor'),
2262
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2263
# add a child of rancestor with no eofile-nl
2264
files.add_lines(self.get_simple_key('child'),
2265
self.get_parents([self.get_simple_key('rancestor')]),
2266
['base\n', 'child\n'])
2267
# add a child of lancestor and base to join the two roots
2268
files.add_lines(self.get_simple_key('otherchild'),
2269
self.get_parents([self.get_simple_key('lancestor'),
2270
self.get_simple_key('base')]),
2271
['base\n', 'lancestor\n', 'otherchild\n'])
2272
def iter_with_keys(keys, expected):
2273
# now we need to see what lines are returned, and how often.
2275
progress = InstrumentedProgress()
2276
# iterate over the lines
2277
for line in files.iter_lines_added_or_present_in_keys(keys,
2279
lines.setdefault(line, 0)
2281
if []!= progress.updates:
2282
self.assertEqual(expected, progress.updates)
2284
lines = iter_with_keys(
2285
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2286
[('Walking content.', 0, 2),
2287
('Walking content.', 1, 2),
2288
('Walking content.', 2, 2)])
2289
# we must see child and otherchild
2290
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2292
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2293
# we dont care if we got more than that.
2296
lines = iter_with_keys(files.keys(),
2297
[('Walking content.', 0, 5),
2298
('Walking content.', 1, 5),
2299
('Walking content.', 2, 5),
2300
('Walking content.', 3, 5),
2301
('Walking content.', 4, 5),
2302
('Walking content.', 5, 5)])
2303
# all lines must be seen at least once
2304
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2306
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2308
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2309
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2311
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2313
def test_make_mpdiffs(self):
2314
from bzrlib import multiparent
2315
files = self.get_versionedfiles('source')
2316
# add texts that should trip the knit maximum delta chain threshold
2317
# as well as doing parallel chains of data in knits.
2318
# this is done by two chains of 25 insertions
2319
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2320
files.add_lines(self.get_simple_key('noeol'),
2321
self.get_parents([self.get_simple_key('base')]), ['line'])
2322
# detailed eol tests:
2323
# shared last line with parent no-eol
2324
files.add_lines(self.get_simple_key('noeolsecond'),
2325
self.get_parents([self.get_simple_key('noeol')]),
2327
# differing last line with parent, both no-eol
2328
files.add_lines(self.get_simple_key('noeolnotshared'),
2329
self.get_parents([self.get_simple_key('noeolsecond')]),
2330
['line\n', 'phone'])
2331
# add eol following a noneol parent, change content
2332
files.add_lines(self.get_simple_key('eol'),
2333
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2334
# add eol following a noneol parent, no change content
2335
files.add_lines(self.get_simple_key('eolline'),
2336
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2337
# noeol with no parents:
2338
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2339
# noeol preceeding its leftmost parent in the output:
2340
# this is done by making it a merge of two parents with no common
2341
# anestry: noeolbase and noeol with the
2342
# later-inserted parent the leftmost.
2343
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2344
self.get_parents([self.get_simple_key('noeolbase'),
2345
self.get_simple_key('noeol')]),
2347
# two identical eol texts
2348
files.add_lines(self.get_simple_key('noeoldup'),
2349
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2350
next_parent = self.get_simple_key('base')
2351
text_name = 'chain1-'
2353
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2354
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2355
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2356
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2357
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2358
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2359
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2360
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2361
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2362
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2363
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2364
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2365
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2366
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2367
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2368
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2369
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2370
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2371
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2372
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2373
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2374
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2375
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2376
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2377
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2378
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2380
for depth in range(26):
2381
new_version = self.get_simple_key(text_name + '%s' % depth)
2382
text = text + ['line\n']
2383
files.add_lines(new_version, self.get_parents([next_parent]), text)
2384
next_parent = new_version
2385
next_parent = self.get_simple_key('base')
2386
text_name = 'chain2-'
2388
for depth in range(26):
2389
new_version = self.get_simple_key(text_name + '%s' % depth)
2390
text = text + ['line\n']
2391
files.add_lines(new_version, self.get_parents([next_parent]), text)
2392
next_parent = new_version
2393
target = self.get_versionedfiles('target')
2394
for key in multiparent.topo_iter_keys(files, files.keys()):
2395
mpdiff = files.make_mpdiffs([key])[0]
2396
parents = files.get_parent_map([key])[key] or []
2398
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2399
self.assertEqualDiff(
2400
files.get_record_stream([key], 'unordered',
2401
True).next().get_bytes_as('fulltext'),
2402
target.get_record_stream([key], 'unordered',
2403
True).next().get_bytes_as('fulltext')
2406
def test_keys(self):
2407
# While use is discouraged, versions() is still needed by aspects of
2409
files = self.get_versionedfiles()
2410
self.assertEqual(set(), set(files.keys()))
2411
if self.key_length == 1:
2414
key = ('foo', 'bar',)
2415
files.add_lines(key, (), [])
2416
self.assertEqual(set([key]), set(files.keys()))
2419
class VirtualVersionedFilesTests(TestCase):
2420
"""Basic tests for the VirtualVersionedFiles implementations."""
2422
def _get_parent_map(self, keys):
2425
if k in self._parent_map:
2426
ret[k] = self._parent_map[k]
2430
TestCase.setUp(self)
2432
self._parent_map = {}
2433
self.texts = VirtualVersionedFiles(self._get_parent_map,
2436
def test_add_lines(self):
2437
self.assertRaises(NotImplementedError,
2438
self.texts.add_lines, "foo", [], [])
2440
def test_add_mpdiffs(self):
2441
self.assertRaises(NotImplementedError,
2442
self.texts.add_mpdiffs, [])
2444
def test_check(self):
2445
self.assertTrue(self.texts.check())
2447
def test_insert_record_stream(self):
2448
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2451
def test_get_sha1s_nonexistent(self):
2452
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2454
def test_get_sha1s(self):
2455
self._lines["key"] = ["dataline1", "dataline2"]
2456
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2457
self.texts.get_sha1s([("key",)]))
2459
def test_get_parent_map(self):
2460
self._parent_map = {"G": ("A", "B")}
2461
self.assertEquals({("G",): (("A",),("B",))},
2462
self.texts.get_parent_map([("G",), ("L",)]))
2464
def test_get_record_stream(self):
2465
self._lines["A"] = ["FOO", "BAR"]
2466
it = self.texts.get_record_stream([("A",)], "unordered", True)
2468
self.assertEquals("chunked", record.storage_kind)
2469
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2470
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2472
def test_get_record_stream_absent(self):
2473
it = self.texts.get_record_stream([("A",)], "unordered", True)
2475
self.assertEquals("absent", record.storage_kind)
2477
def test_iter_lines_added_or_present_in_keys(self):
2478
self._lines["A"] = ["FOO", "BAR"]
2479
self._lines["B"] = ["HEY"]
2480
self._lines["C"] = ["Alberta"]
2481
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2482
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2486
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2488
def get_ordering_vf(self, key_priority):
2489
builder = self.make_branch_builder('test')
2490
builder.start_series()
2491
builder.build_snapshot('A', None, [
2492
('add', ('', 'TREE_ROOT', 'directory', None))])
2493
builder.build_snapshot('B', ['A'], [])
2494
builder.build_snapshot('C', ['B'], [])
2495
builder.build_snapshot('D', ['C'], [])
2496
builder.finish_series()
2497
b = builder.get_branch()
2499
self.addCleanup(b.unlock)
2500
vf = b.repository.inventories
2501
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2503
def test_get_empty(self):
2504
vf = self.get_ordering_vf({})
2505
self.assertEqual([], vf.calls)
2507
def test_get_record_stream_topological(self):
2508
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2509
request_keys = [('B',), ('C',), ('D',), ('A',)]
2510
keys = [r.key for r in vf.get_record_stream(request_keys,
2511
'topological', False)]
2512
# We should have gotten the keys in topological order
2513
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2514
# And recorded that the request was made
2515
self.assertEqual([('get_record_stream', request_keys, 'topological',
2518
def test_get_record_stream_ordered(self):
2519
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2520
request_keys = [('B',), ('C',), ('D',), ('A',)]
2521
keys = [r.key for r in vf.get_record_stream(request_keys,
2522
'unordered', False)]
2523
# They should be returned based on their priority
2524
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2525
# And the request recorded
2526
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2529
def test_get_record_stream_implicit_order(self):
2530
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2531
request_keys = [('B',), ('C',), ('D',), ('A',)]
2532
keys = [r.key for r in vf.get_record_stream(request_keys,
2533
'unordered', False)]
2534
# A and C are not in the map, so they get sorted to the front. A comes
2535
# before C alphabetically, so it comes back first
2536
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2537
# And the request recorded
2538
self.assertEqual([('get_record_stream', request_keys, 'unordered',