1
# Copyright (C) 2005, 2009 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
33
from bzrlib.errors import (
35
RevisionAlreadyPresent,
38
from bzrlib import knit as _mod_knit
39
from bzrlib.knit import (
46
from bzrlib.tests import (
48
TestCaseWithMemoryTransport,
52
split_suite_by_condition,
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
56
from bzrlib.trace import mutter
57
from bzrlib.transport import get_transport
58
from bzrlib.transport.memory import MemoryTransport
59
from bzrlib.tsort import topo_sort
60
from bzrlib.tuned_gzip import GzipFile
61
import bzrlib.versionedfile as versionedfile
62
from bzrlib.versionedfile import (
64
HashEscapedPrefixMapper,
66
VirtualVersionedFiles,
67
make_versioned_files_factory,
69
from bzrlib.weave import WeaveFile
70
from bzrlib.weavefile import read_weave, write_weave
73
def load_tests(standard_tests, module, loader):
74
"""Parameterize VersionedFiles tests for different implementations."""
75
to_adapt, result = split_suite_by_condition(
76
standard_tests, condition_isinstance(TestVersionedFiles))
77
# We want to be sure of behaviour for:
78
# weaves prefix layout (weave texts)
79
# individually named weaves (weave inventories)
80
# annotated knits - prefix|hash|hash-escape layout, we test the third only
81
# as it is the most complex mapper.
82
# individually named knits
83
# individual no-graph knits in packs (signatures)
84
# individual graph knits in packs (inventories)
85
# individual graph nocompression knits in packs (revisions)
86
# plain text knits in packs (texts)
90
'factory':make_versioned_files_factory(WeaveFile,
91
ConstantMapper('inventory')),
94
'support_partial_insertion': False,
98
'factory':make_file_factory(False, ConstantMapper('revisions')),
101
'support_partial_insertion': False,
103
('named-nograph-nodelta-knit-pack', {
104
'cleanup':cleanup_pack_knit,
105
'factory':make_pack_factory(False, False, 1),
108
'support_partial_insertion': False,
110
('named-graph-knit-pack', {
111
'cleanup':cleanup_pack_knit,
112
'factory':make_pack_factory(True, True, 1),
115
'support_partial_insertion': True,
117
('named-graph-nodelta-knit-pack', {
118
'cleanup':cleanup_pack_knit,
119
'factory':make_pack_factory(True, False, 1),
122
'support_partial_insertion': False,
125
len_two_scenarios = [
128
'factory':make_versioned_files_factory(WeaveFile,
132
'support_partial_insertion': False,
134
('annotated-knit-escape', {
136
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
139
'support_partial_insertion': False,
141
('plain-knit-pack', {
142
'cleanup':cleanup_pack_knit,
143
'factory':make_pack_factory(True, True, 2),
146
'support_partial_insertion': True,
149
scenarios = len_one_scenarios + len_two_scenarios
150
return multiply_tests(to_adapt, scenarios, result)
153
def get_diamond_vf(f, trailing_eol=True, left_only=False):
154
"""Get a diamond graph to exercise deltas and merges.
156
:param trailing_eol: If True end the last line with \n.
160
'base': (('origin',),),
161
'left': (('base',),),
162
'right': (('base',),),
163
'merged': (('left',), ('right',)),
165
# insert a diamond graph to exercise deltas and merges.
170
f.add_lines('origin', [], ['origin' + last_char])
171
f.add_lines('base', ['origin'], ['base' + last_char])
172
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
174
f.add_lines('right', ['base'],
175
['base\n', 'right' + last_char])
176
f.add_lines('merged', ['left', 'right'],
177
['base\n', 'left\n', 'right\n', 'merged' + last_char])
181
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
183
"""Get a diamond graph to exercise deltas and merges.
185
This creates a 5-node graph in files. If files supports 2-length keys two
186
graphs are made to exercise the support for multiple ids.
188
:param trailing_eol: If True end the last line with \n.
189
:param key_length: The length of keys in files. Currently supports length 1
191
:param left_only: If True do not add the right and merged nodes.
192
:param nograph: If True, do not provide parents to the add_lines calls;
193
this is useful for tests that need inserted data but have graphless
195
:return: The results of the add_lines calls.
200
prefixes = [('FileA',), ('FileB',)]
201
# insert a diamond graph to exercise deltas and merges.
207
def get_parents(suffix_list):
211
result = [prefix + suffix for suffix in suffix_list]
213
# we loop over each key because that spreads the inserts across prefixes,
214
# which is how commit operates.
215
for prefix in prefixes:
216
result.append(files.add_lines(prefix + ('origin',), (),
217
['origin' + last_char]))
218
for prefix in prefixes:
219
result.append(files.add_lines(prefix + ('base',),
220
get_parents([('origin',)]), ['base' + last_char]))
221
for prefix in prefixes:
222
result.append(files.add_lines(prefix + ('left',),
223
get_parents([('base',)]),
224
['base\n', 'left' + last_char]))
226
for prefix in prefixes:
227
result.append(files.add_lines(prefix + ('right',),
228
get_parents([('base',)]),
229
['base\n', 'right' + last_char]))
230
for prefix in prefixes:
231
result.append(files.add_lines(prefix + ('merged',),
232
get_parents([('left',), ('right',)]),
233
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
237
class VersionedFileTestMixIn(object):
238
"""A mixin test class for testing VersionedFiles.
240
This is not an adaptor-style test at this point because
241
theres no dynamic substitution of versioned file implementations,
242
they are strictly controlled by their owning repositories.
245
def get_transaction(self):
246
if not hasattr(self, '_transaction'):
247
self._transaction = None
248
return self._transaction
252
f.add_lines('r0', [], ['a\n', 'b\n'])
253
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
255
versions = f.versions()
256
self.assertTrue('r0' in versions)
257
self.assertTrue('r1' in versions)
258
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
259
self.assertEquals(f.get_text('r0'), 'a\nb\n')
260
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
261
self.assertEqual(2, len(f))
262
self.assertEqual(2, f.num_versions())
264
self.assertRaises(RevisionNotPresent,
265
f.add_lines, 'r2', ['foo'], [])
266
self.assertRaises(RevisionAlreadyPresent,
267
f.add_lines, 'r1', [], [])
269
# this checks that reopen with create=True does not break anything.
270
f = self.reopen_file(create=True)
273
def test_adds_with_parent_texts(self):
276
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
278
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
279
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
280
except NotImplementedError:
281
# if the format doesn't support ghosts, just add normally.
282
_, _, parent_texts['r1'] = f.add_lines('r1',
283
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
284
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
285
self.assertNotEqual(None, parent_texts['r0'])
286
self.assertNotEqual(None, parent_texts['r1'])
288
versions = f.versions()
289
self.assertTrue('r0' in versions)
290
self.assertTrue('r1' in versions)
291
self.assertTrue('r2' in versions)
292
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
293
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
294
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
295
self.assertEqual(3, f.num_versions())
296
origins = f.annotate('r1')
297
self.assertEquals(origins[0][0], 'r0')
298
self.assertEquals(origins[1][0], 'r1')
299
origins = f.annotate('r2')
300
self.assertEquals(origins[0][0], 'r1')
301
self.assertEquals(origins[1][0], 'r2')
304
f = self.reopen_file()
307
def test_add_unicode_content(self):
308
# unicode content is not permitted in versioned files.
309
# versioned files version sequences of bytes only.
311
self.assertRaises(errors.BzrBadParameterUnicode,
312
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
314
(errors.BzrBadParameterUnicode, NotImplementedError),
315
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
317
def test_add_follows_left_matching_blocks(self):
318
"""If we change left_matching_blocks, delta changes
320
Note: There are multiple correct deltas in this case, because
321
we start with 1 "a" and we get 3.
324
if isinstance(vf, WeaveFile):
325
raise TestSkipped("WeaveFile ignores left_matching_blocks")
326
vf.add_lines('1', [], ['a\n'])
327
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
328
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
329
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
330
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
331
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
332
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
334
def test_inline_newline_throws(self):
335
# \r characters are not permitted in lines being added
337
self.assertRaises(errors.BzrBadParameterContainsNewline,
338
vf.add_lines, 'a', [], ['a\n\n'])
340
(errors.BzrBadParameterContainsNewline, NotImplementedError),
341
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
342
# but inline CR's are allowed
343
vf.add_lines('a', [], ['a\r\n'])
345
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
346
except NotImplementedError:
349
def test_add_reserved(self):
351
self.assertRaises(errors.ReservedId,
352
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
354
def test_add_lines_nostoresha(self):
355
"""When nostore_sha is supplied using old content raises."""
357
empty_text = ('a', [])
358
sample_text_nl = ('b', ["foo\n", "bar\n"])
359
sample_text_no_nl = ('c', ["foo\n", "bar"])
361
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
362
sha, _, _ = vf.add_lines(version, [], lines)
364
# we now have a copy of all the lines in the vf.
365
for sha, (version, lines) in zip(
366
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
367
self.assertRaises(errors.ExistingContent,
368
vf.add_lines, version + "2", [], lines,
370
# and no new version should have been added.
371
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
374
def test_add_lines_with_ghosts_nostoresha(self):
375
"""When nostore_sha is supplied using old content raises."""
377
empty_text = ('a', [])
378
sample_text_nl = ('b', ["foo\n", "bar\n"])
379
sample_text_no_nl = ('c', ["foo\n", "bar"])
381
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
382
sha, _, _ = vf.add_lines(version, [], lines)
384
# we now have a copy of all the lines in the vf.
385
# is the test applicable to this vf implementation?
387
vf.add_lines_with_ghosts('d', [], [])
388
except NotImplementedError:
389
raise TestSkipped("add_lines_with_ghosts is optional")
390
for sha, (version, lines) in zip(
391
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
392
self.assertRaises(errors.ExistingContent,
393
vf.add_lines_with_ghosts, version + "2", [], lines,
395
# and no new version should have been added.
396
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
399
def test_add_lines_return_value(self):
400
# add_lines should return the sha1 and the text size.
402
empty_text = ('a', [])
403
sample_text_nl = ('b', ["foo\n", "bar\n"])
404
sample_text_no_nl = ('c', ["foo\n", "bar"])
405
# check results for the three cases:
406
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
407
# the first two elements are the same for all versioned files:
408
# - the digest and the size of the text. For some versioned files
409
# additional data is returned in additional tuple elements.
410
result = vf.add_lines(version, [], lines)
411
self.assertEqual(3, len(result))
412
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
414
# parents should not affect the result:
415
lines = sample_text_nl[1]
416
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
417
vf.add_lines('d', ['b', 'c'], lines)[0:2])
419
def test_get_reserved(self):
421
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
422
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
423
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
425
def test_add_unchanged_last_line_noeol_snapshot(self):
426
"""Add a text with an unchanged last line with no eol should work."""
427
# Test adding this in a number of chain lengths; because the interface
428
# for VersionedFile does not allow forcing a specific chain length, we
429
# just use a small base to get the first snapshot, then a much longer
430
# first line for the next add (which will make the third add snapshot)
431
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
432
# as a capped delta length, but ideally we would have some way of
433
# tuning the test to the store (e.g. keep going until a snapshot
435
for length in range(20):
437
vf = self.get_file('case-%d' % length)
440
for step in range(length):
441
version = prefix % step
442
lines = (['prelude \n'] * step) + ['line']
443
vf.add_lines(version, parents, lines)
444
version_lines[version] = lines
446
vf.add_lines('no-eol', parents, ['line'])
447
vf.get_texts(version_lines.keys())
448
self.assertEqualDiff('line', vf.get_text('no-eol'))
450
def test_get_texts_eol_variation(self):
451
# similar to the failure in <http://bugs.launchpad.net/234748>
453
sample_text_nl = ["line\n"]
454
sample_text_no_nl = ["line"]
461
lines = sample_text_nl
463
lines = sample_text_no_nl
464
# left_matching blocks is an internal api; it operates on the
465
# *internal* representation for a knit, which is with *all* lines
466
# being normalised to end with \n - even the final line in a no_nl
467
# file. Using it here ensures that a broken internal implementation
468
# (which is what this test tests) will generate a correct line
469
# delta (which is to say, an empty delta).
470
vf.add_lines(version, parents, lines,
471
left_matching_blocks=[(0, 0, 1)])
473
versions.append(version)
474
version_lines[version] = lines
476
vf.get_texts(versions)
477
vf.get_texts(reversed(versions))
479
def test_add_lines_with_matching_blocks_noeol_last_line(self):
480
"""Add a text with an unchanged last line with no eol should work."""
481
from bzrlib import multiparent
482
# Hand verified sha1 of the text we're adding.
483
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
484
# Create a mpdiff which adds a new line before the trailing line, and
485
# reuse the last line unaltered (which can cause annotation reuse).
486
# Test adding this in two situations:
487
# On top of a new insertion
488
vf = self.get_file('fulltext')
489
vf.add_lines('noeol', [], ['line'])
490
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
491
left_matching_blocks=[(0, 1, 1)])
492
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
494
vf = self.get_file('delta')
495
vf.add_lines('base', [], ['line'])
496
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
497
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
498
left_matching_blocks=[(1, 1, 1)])
499
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
501
def test_make_mpdiffs(self):
502
from bzrlib import multiparent
503
vf = self.get_file('foo')
504
sha1s = self._setup_for_deltas(vf)
505
new_vf = self.get_file('bar')
506
for version in multiparent.topo_iter(vf):
507
mpdiff = vf.make_mpdiffs([version])[0]
508
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
509
vf.get_sha1s([version])[version], mpdiff)])
510
self.assertEqualDiff(vf.get_text(version),
511
new_vf.get_text(version))
513
def test_make_mpdiffs_with_ghosts(self):
514
vf = self.get_file('foo')
516
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
517
except NotImplementedError:
518
# old Weave formats do not allow ghosts
520
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
522
def _setup_for_deltas(self, f):
523
self.assertFalse(f.has_version('base'))
524
# add texts that should trip the knit maximum delta chain threshold
525
# as well as doing parallel chains of data in knits.
526
# this is done by two chains of 25 insertions
527
f.add_lines('base', [], ['line\n'])
528
f.add_lines('noeol', ['base'], ['line'])
529
# detailed eol tests:
530
# shared last line with parent no-eol
531
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
532
# differing last line with parent, both no-eol
533
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
534
# add eol following a noneol parent, change content
535
f.add_lines('eol', ['noeol'], ['phone\n'])
536
# add eol following a noneol parent, no change content
537
f.add_lines('eolline', ['noeol'], ['line\n'])
538
# noeol with no parents:
539
f.add_lines('noeolbase', [], ['line'])
540
# noeol preceeding its leftmost parent in the output:
541
# this is done by making it a merge of two parents with no common
542
# anestry: noeolbase and noeol with the
543
# later-inserted parent the leftmost.
544
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
545
# two identical eol texts
546
f.add_lines('noeoldup', ['noeol'], ['line'])
548
text_name = 'chain1-'
550
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
551
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
552
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
553
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
554
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
555
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
556
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
557
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
558
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
559
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
560
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
561
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
562
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
563
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
564
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
565
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
566
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
567
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
568
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
569
19:'1ebed371807ba5935958ad0884595126e8c4e823',
570
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
571
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
572
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
573
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
574
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
575
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
577
for depth in range(26):
578
new_version = text_name + '%s' % depth
579
text = text + ['line\n']
580
f.add_lines(new_version, [next_parent], text)
581
next_parent = new_version
583
text_name = 'chain2-'
585
for depth in range(26):
586
new_version = text_name + '%s' % depth
587
text = text + ['line\n']
588
f.add_lines(new_version, [next_parent], text)
589
next_parent = new_version
592
def test_ancestry(self):
594
self.assertEqual([], f.get_ancestry([]))
595
f.add_lines('r0', [], ['a\n', 'b\n'])
596
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
597
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
598
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
599
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
600
self.assertEqual([], f.get_ancestry([]))
601
versions = f.get_ancestry(['rM'])
602
# there are some possibilities:
606
# so we check indexes
607
r0 = versions.index('r0')
608
r1 = versions.index('r1')
609
r2 = versions.index('r2')
610
self.assertFalse('r3' in versions)
611
rM = versions.index('rM')
612
self.assertTrue(r0 < r1)
613
self.assertTrue(r0 < r2)
614
self.assertTrue(r1 < rM)
615
self.assertTrue(r2 < rM)
617
self.assertRaises(RevisionNotPresent,
618
f.get_ancestry, ['rM', 'rX'])
620
self.assertEqual(set(f.get_ancestry('rM')),
621
set(f.get_ancestry('rM', topo_sorted=False)))
623
def test_mutate_after_finish(self):
624
self._transaction = 'before'
626
self._transaction = 'after'
627
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
628
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
630
def test_copy_to(self):
632
f.add_lines('0', [], ['a\n'])
633
t = MemoryTransport()
635
for suffix in self.get_factory().get_suffixes():
636
self.assertTrue(t.has('foo' + suffix))
638
def test_get_suffixes(self):
640
# and should be a list
641
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
643
def test_get_parent_map(self):
645
f.add_lines('r0', [], ['a\n', 'b\n'])
647
{'r0':()}, f.get_parent_map(['r0']))
648
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
650
{'r1':('r0',)}, f.get_parent_map(['r1']))
654
f.get_parent_map(['r0', 'r1']))
655
f.add_lines('r2', [], ['a\n', 'b\n'])
656
f.add_lines('r3', [], ['a\n', 'b\n'])
657
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
659
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
660
self.assertEqual({}, f.get_parent_map('y'))
664
f.get_parent_map(['r0', 'y', 'r1']))
666
def test_annotate(self):
668
f.add_lines('r0', [], ['a\n', 'b\n'])
669
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
670
origins = f.annotate('r1')
671
self.assertEquals(origins[0][0], 'r1')
672
self.assertEquals(origins[1][0], 'r0')
674
self.assertRaises(RevisionNotPresent,
677
def test_detection(self):
678
# Test weaves detect corruption.
680
# Weaves contain a checksum of their texts.
681
# When a text is extracted, this checksum should be
684
w = self.get_file_corrupted_text()
686
self.assertEqual('hello\n', w.get_text('v1'))
687
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
688
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
689
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
691
w = self.get_file_corrupted_checksum()
693
self.assertEqual('hello\n', w.get_text('v1'))
694
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
695
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
696
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
698
def get_file_corrupted_text(self):
699
"""Return a versioned file with corrupt text but valid metadata."""
700
raise NotImplementedError(self.get_file_corrupted_text)
702
def reopen_file(self, name='foo'):
703
"""Open the versioned file from disk again."""
704
raise NotImplementedError(self.reopen_file)
706
def test_iter_lines_added_or_present_in_versions(self):
707
# test that we get at least an equalset of the lines added by
708
# versions in the weave
709
# the ordering here is to make a tree so that dumb searches have
710
# more changes to muck up.
712
class InstrumentedProgress(progress.DummyProgress):
716
progress.DummyProgress.__init__(self)
719
def update(self, msg=None, current=None, total=None):
720
self.updates.append((msg, current, total))
723
# add a base to get included
724
vf.add_lines('base', [], ['base\n'])
725
# add a ancestor to be included on one side
726
vf.add_lines('lancestor', [], ['lancestor\n'])
727
# add a ancestor to be included on the other side
728
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
729
# add a child of rancestor with no eofile-nl
730
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
731
# add a child of lancestor and base to join the two roots
732
vf.add_lines('otherchild',
733
['lancestor', 'base'],
734
['base\n', 'lancestor\n', 'otherchild\n'])
735
def iter_with_versions(versions, expected):
736
# now we need to see what lines are returned, and how often.
738
progress = InstrumentedProgress()
739
# iterate over the lines
740
for line in vf.iter_lines_added_or_present_in_versions(versions,
742
lines.setdefault(line, 0)
744
if []!= progress.updates:
745
self.assertEqual(expected, progress.updates)
747
lines = iter_with_versions(['child', 'otherchild'],
748
[('Walking content', 0, 2),
749
('Walking content', 1, 2),
750
('Walking content', 2, 2)])
751
# we must see child and otherchild
752
self.assertTrue(lines[('child\n', 'child')] > 0)
753
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
754
# we dont care if we got more than that.
757
lines = iter_with_versions(None, [('Walking content', 0, 5),
758
('Walking content', 1, 5),
759
('Walking content', 2, 5),
760
('Walking content', 3, 5),
761
('Walking content', 4, 5),
762
('Walking content', 5, 5)])
763
# all lines must be seen at least once
764
self.assertTrue(lines[('base\n', 'base')] > 0)
765
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
766
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
767
self.assertTrue(lines[('child\n', 'child')] > 0)
768
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
770
def test_add_lines_with_ghosts(self):
771
# some versioned file formats allow lines to be added with parent
772
# information that is > than that in the format. Formats that do
773
# not support this need to raise NotImplementedError on the
774
# add_lines_with_ghosts api.
776
# add a revision with ghost parents
777
# The preferred form is utf8, but we should translate when needed
778
parent_id_unicode = u'b\xbfse'
779
parent_id_utf8 = parent_id_unicode.encode('utf8')
781
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
782
except NotImplementedError:
783
# check the other ghost apis are also not implemented
784
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
785
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
787
vf = self.reopen_file()
788
# test key graph related apis: getncestry, _graph, get_parents
790
# - these are ghost unaware and must not be reflect ghosts
791
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
792
self.assertFalse(vf.has_version(parent_id_utf8))
793
# we have _with_ghost apis to give us ghost information.
794
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
795
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
796
# if we add something that is a ghost of another, it should correct the
797
# results of the prior apis
798
vf.add_lines(parent_id_utf8, [], [])
799
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
800
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
801
vf.get_parent_map(['notbxbfse']))
802
self.assertTrue(vf.has_version(parent_id_utf8))
803
# we have _with_ghost apis to give us ghost information.
804
self.assertEqual([parent_id_utf8, 'notbxbfse'],
805
vf.get_ancestry_with_ghosts(['notbxbfse']))
806
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
808
def test_add_lines_with_ghosts_after_normal_revs(self):
809
# some versioned file formats allow lines to be added with parent
810
# information that is > than that in the format. Formats that do
811
# not support this need to raise NotImplementedError on the
812
# add_lines_with_ghosts api.
814
# probe for ghost support
816
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
817
except NotImplementedError:
819
vf.add_lines_with_ghosts('references_ghost',
821
['line\n', 'line_b\n', 'line_c\n'])
822
origins = vf.annotate('references_ghost')
823
self.assertEquals(('base', 'line\n'), origins[0])
824
self.assertEquals(('base', 'line_b\n'), origins[1])
825
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
827
def test_readonly_mode(self):
828
transport = get_transport(self.get_url('.'))
829
factory = self.get_factory()
830
vf = factory('id', transport, 0777, create=True, access_mode='w')
831
vf = factory('id', transport, access_mode='r')
832
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
833
self.assertRaises(errors.ReadOnlyError,
834
vf.add_lines_with_ghosts,
839
def test_get_sha1s(self):
840
# check the sha1 data is available
843
vf.add_lines('a', [], ['a\n'])
844
# the same file, different metadata
845
vf.add_lines('b', ['a'], ['a\n'])
846
# a file differing only in last newline.
847
vf.add_lines('c', [], ['a'])
849
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
850
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
851
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
853
vf.get_sha1s(['a', 'c', 'b']))
856
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
858
def get_file(self, name='foo'):
859
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
860
get_scope=self.get_transaction)
862
def get_file_corrupted_text(self):
863
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
864
get_scope=self.get_transaction)
865
w.add_lines('v1', [], ['hello\n'])
866
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
868
# We are going to invasively corrupt the text
869
# Make sure the internals of weave are the same
870
self.assertEqual([('{', 0)
878
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
879
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
884
w._weave[4] = 'There\n'
887
def get_file_corrupted_checksum(self):
888
w = self.get_file_corrupted_text()
890
w._weave[4] = 'there\n'
891
self.assertEqual('hello\nthere\n', w.get_text('v2'))
893
#Invalid checksum, first digit changed
894
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
897
def reopen_file(self, name='foo', create=False):
898
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
899
get_scope=self.get_transaction)
901
def test_no_implicit_create(self):
902
self.assertRaises(errors.NoSuchFile,
905
get_transport(self.get_url('.')),
906
get_scope=self.get_transaction)
908
def get_factory(self):
912
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
915
TestCaseWithMemoryTransport.setUp(self)
916
mapper = PrefixMapper()
917
factory = make_file_factory(True, mapper)
918
self.vf1 = factory(self.get_transport('root-1'))
919
self.vf2 = factory(self.get_transport('root-2'))
920
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
921
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
923
def test_add_lines(self):
924
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
925
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
926
('root', 'a'), [], [])
927
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
928
('root', 'a:'), None, [])
929
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
930
('root', 'a:'), [], None)
932
def setup_abcde(self):
933
self.vf1.add_lines(('root', 'A'), [], ['a'])
934
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
935
self.vf2.add_lines(('root', 'C'), [], ['c'])
936
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
937
self.plan_merge_vf.add_lines(('root', 'E:'),
938
[('root', 'B'), ('root', 'D')], ['e'])
940
def test_get_parents(self):
942
self.assertEqual({('root', 'B'):(('root', 'A'),)},
943
self.plan_merge_vf.get_parent_map([('root', 'B')]))
944
self.assertEqual({('root', 'D'):(('root', 'C'),)},
945
self.plan_merge_vf.get_parent_map([('root', 'D')]))
946
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
947
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
949
self.plan_merge_vf.get_parent_map([('root', 'F')]))
951
('root', 'B'):(('root', 'A'),),
952
('root', 'D'):(('root', 'C'),),
953
('root', 'E:'):(('root', 'B'),('root', 'D')),
955
self.plan_merge_vf.get_parent_map(
956
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
958
def test_get_record_stream(self):
960
def get_record(suffix):
961
return self.plan_merge_vf.get_record_stream(
962
[('root', suffix)], 'unordered', True).next()
963
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
964
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
965
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
966
self.assertEqual('absent', get_record('F').storage_kind)
969
class TestReadonlyHttpMixin(object):
971
def get_transaction(self):
974
def test_readonly_http_works(self):
975
# we should be able to read from http with a versioned file.
977
# try an empty file access
978
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
979
self.assertEqual([], readonly_vf.versions())
981
vf.add_lines('1', [], ['a\n'])
982
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
983
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
984
self.assertEqual(['1', '2'], vf.versions())
985
for version in readonly_vf.versions():
986
readonly_vf.get_lines(version)
989
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
992
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
993
get_scope=self.get_transaction)
995
def get_factory(self):
999
class MergeCasesMixin(object):
1001
def doMerge(self, base, a, b, mp):
1002
from cStringIO import StringIO
1003
from textwrap import dedent
1009
w.add_lines('text0', [], map(addcrlf, base))
1010
w.add_lines('text1', ['text0'], map(addcrlf, a))
1011
w.add_lines('text2', ['text0'], map(addcrlf, b))
1013
self.log_contents(w)
1015
self.log('merge plan:')
1016
p = list(w.plan_merge('text1', 'text2'))
1017
for state, line in p:
1019
self.log('%12s | %s' % (state, line[:-1]))
1023
mt.writelines(w.weave_merge(p))
1025
self.log(mt.getvalue())
1027
mp = map(addcrlf, mp)
1028
self.assertEqual(mt.readlines(), mp)
1031
def testOneInsert(self):
1037
def testSeparateInserts(self):
1038
self.doMerge(['aaa', 'bbb', 'ccc'],
1039
['aaa', 'xxx', 'bbb', 'ccc'],
1040
['aaa', 'bbb', 'yyy', 'ccc'],
1041
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1043
def testSameInsert(self):
1044
self.doMerge(['aaa', 'bbb', 'ccc'],
1045
['aaa', 'xxx', 'bbb', 'ccc'],
1046
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1047
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1048
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1049
def testOverlappedInsert(self):
1050
self.doMerge(['aaa', 'bbb'],
1051
['aaa', 'xxx', 'yyy', 'bbb'],
1052
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1054
# really it ought to reduce this to
1055
# ['aaa', 'xxx', 'yyy', 'bbb']
1058
def testClashReplace(self):
1059
self.doMerge(['aaa'],
1062
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1065
def testNonClashInsert1(self):
1066
self.doMerge(['aaa'],
1069
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1072
def testNonClashInsert2(self):
1073
self.doMerge(['aaa'],
1079
def testDeleteAndModify(self):
1080
"""Clashing delete and modification.
1082
If one side modifies a region and the other deletes it then
1083
there should be a conflict with one side blank.
1086
#######################################
1087
# skippd, not working yet
1090
self.doMerge(['aaa', 'bbb', 'ccc'],
1091
['aaa', 'ddd', 'ccc'],
1093
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1095
def _test_merge_from_strings(self, base, a, b, expected):
1097
w.add_lines('text0', [], base.splitlines(True))
1098
w.add_lines('text1', ['text0'], a.splitlines(True))
1099
w.add_lines('text2', ['text0'], b.splitlines(True))
1100
self.log('merge plan:')
1101
p = list(w.plan_merge('text1', 'text2'))
1102
for state, line in p:
1104
self.log('%12s | %s' % (state, line[:-1]))
1105
self.log('merge result:')
1106
result_text = ''.join(w.weave_merge(p))
1107
self.log(result_text)
1108
self.assertEqualDiff(result_text, expected)
1110
def test_weave_merge_conflicts(self):
1111
# does weave merge properly handle plans that end with unchanged?
1112
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1113
self.assertEqual(result, 'hello\n')
1115
def test_deletion_extended(self):
1116
"""One side deletes, the other deletes more.
1133
self._test_merge_from_strings(base, a, b, result)
1135
def test_deletion_overlap(self):
1136
"""Delete overlapping regions with no other conflict.
1138
Arguably it'd be better to treat these as agreement, rather than
1139
conflict, but for now conflict is safer.
1167
self._test_merge_from_strings(base, a, b, result)
1169
def test_agreement_deletion(self):
1170
"""Agree to delete some lines, without conflicts."""
1192
self._test_merge_from_strings(base, a, b, result)
1194
def test_sync_on_deletion(self):
1195
"""Specific case of merge where we can synchronize incorrectly.
1197
A previous version of the weave merge concluded that the two versions
1198
agreed on deleting line 2, and this could be a synchronization point.
1199
Line 1 was then considered in isolation, and thought to be deleted on
1202
It's better to consider the whole thing as a disagreement region.
1213
a's replacement line 2
1226
a's replacement line 2
1233
self._test_merge_from_strings(base, a, b, result)
1236
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1238
def get_file(self, name='foo'):
1239
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1241
def log_contents(self, w):
1242
self.log('weave is:')
1244
write_weave(w, tmpf)
1245
self.log(tmpf.getvalue())
1247
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1248
'xxx', '>>>>>>> ', 'bbb']
1251
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1253
def test_select_adaptor(self):
1254
"""Test expected adapters exist."""
1255
# One scenario for each lookup combination we expect to use.
1256
# Each is source_kind, requested_kind, adapter class
1258
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1259
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1260
('knit-annotated-delta-gz', 'knit-delta-gz',
1261
_mod_knit.DeltaAnnotatedToUnannotated),
1262
('knit-annotated-delta-gz', 'fulltext',
1263
_mod_knit.DeltaAnnotatedToFullText),
1264
('knit-annotated-ft-gz', 'knit-ft-gz',
1265
_mod_knit.FTAnnotatedToUnannotated),
1266
('knit-annotated-ft-gz', 'fulltext',
1267
_mod_knit.FTAnnotatedToFullText),
1269
for source, requested, klass in scenarios:
1270
adapter_factory = versionedfile.adapter_registry.get(
1271
(source, requested))
1272
adapter = adapter_factory(None)
1273
self.assertIsInstance(adapter, klass)
1275
def get_knit(self, annotated=True):
1276
mapper = ConstantMapper('knit')
1277
transport = self.get_transport()
1278
return make_file_factory(annotated, mapper)(transport)
1280
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1281
"""Grab the interested adapted texts for tests."""
1282
# origin is a fulltext
1283
entries = f.get_record_stream([('origin',)], 'unordered', False)
1284
base = entries.next()
1285
ft_data = ft_adapter.get_bytes(base)
1286
# merged is both a delta and multiple parents.
1287
entries = f.get_record_stream([('merged',)], 'unordered', False)
1288
merged = entries.next()
1289
delta_data = delta_adapter.get_bytes(merged)
1290
return ft_data, delta_data
1292
def test_deannotation_noeol(self):
1293
"""Test converting annotated knits to unannotated knits."""
1294
# we need a full text, and a delta
1296
get_diamond_files(f, 1, trailing_eol=False)
1297
ft_data, delta_data = self.helpGetBytes(f,
1298
_mod_knit.FTAnnotatedToUnannotated(None),
1299
_mod_knit.DeltaAnnotatedToUnannotated(None))
1301
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1304
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1306
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1307
'1,2,3\nleft\nright\nmerged\nend merged\n',
1308
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1310
def test_deannotation(self):
1311
"""Test converting annotated knits to unannotated knits."""
1312
# we need a full text, and a delta
1314
get_diamond_files(f, 1)
1315
ft_data, delta_data = self.helpGetBytes(f,
1316
_mod_knit.FTAnnotatedToUnannotated(None),
1317
_mod_knit.DeltaAnnotatedToUnannotated(None))
1319
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1322
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1324
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1325
'2,2,2\nright\nmerged\nend merged\n',
1326
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1328
def test_annotated_to_fulltext_no_eol(self):
1329
"""Test adapting annotated knits to full texts (for -> weaves)."""
1330
# we need a full text, and a delta
1332
get_diamond_files(f, 1, trailing_eol=False)
1333
# Reconstructing a full text requires a backing versioned file, and it
1334
# must have the base lines requested from it.
1335
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1336
ft_data, delta_data = self.helpGetBytes(f,
1337
_mod_knit.FTAnnotatedToFullText(None),
1338
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1339
self.assertEqual('origin', ft_data)
1340
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1341
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1342
True)], logged_vf.calls)
1344
def test_annotated_to_fulltext(self):
1345
"""Test adapting annotated knits to full texts (for -> weaves)."""
1346
# we need a full text, and a delta
1348
get_diamond_files(f, 1)
1349
# Reconstructing a full text requires a backing versioned file, and it
1350
# must have the base lines requested from it.
1351
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1352
ft_data, delta_data = self.helpGetBytes(f,
1353
_mod_knit.FTAnnotatedToFullText(None),
1354
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1355
self.assertEqual('origin\n', ft_data)
1356
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1357
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1358
True)], logged_vf.calls)
1360
def test_unannotated_to_fulltext(self):
1361
"""Test adapting unannotated knits to full texts.
1363
This is used for -> weaves, and for -> annotated knits.
1365
# we need a full text, and a delta
1366
f = self.get_knit(annotated=False)
1367
get_diamond_files(f, 1)
1368
# Reconstructing a full text requires a backing versioned file, and it
1369
# must have the base lines requested from it.
1370
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1371
ft_data, delta_data = self.helpGetBytes(f,
1372
_mod_knit.FTPlainToFullText(None),
1373
_mod_knit.DeltaPlainToFullText(logged_vf))
1374
self.assertEqual('origin\n', ft_data)
1375
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1376
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1377
True)], logged_vf.calls)
1379
def test_unannotated_to_fulltext_no_eol(self):
1380
"""Test adapting unannotated knits to full texts.
1382
This is used for -> weaves, and for -> annotated knits.
1384
# we need a full text, and a delta
1385
f = self.get_knit(annotated=False)
1386
get_diamond_files(f, 1, trailing_eol=False)
1387
# Reconstructing a full text requires a backing versioned file, and it
1388
# must have the base lines requested from it.
1389
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1390
ft_data, delta_data = self.helpGetBytes(f,
1391
_mod_knit.FTPlainToFullText(None),
1392
_mod_knit.DeltaPlainToFullText(logged_vf))
1393
self.assertEqual('origin', ft_data)
1394
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1395
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1396
True)], logged_vf.calls)
1399
class TestKeyMapper(TestCaseWithMemoryTransport):
1400
"""Tests for various key mapping logic."""
1402
def test_identity_mapper(self):
1403
mapper = versionedfile.ConstantMapper("inventory")
1404
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1405
self.assertEqual("inventory", mapper.map(('quux',)))
1407
def test_prefix_mapper(self):
1409
mapper = versionedfile.PrefixMapper()
1410
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1411
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1412
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1413
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1415
def test_hash_prefix_mapper(self):
1416
#format6: hash + plain
1417
mapper = versionedfile.HashPrefixMapper()
1418
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1419
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1420
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1421
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1423
def test_hash_escaped_mapper(self):
1424
#knit1: hash + escaped
1425
mapper = versionedfile.HashEscapedPrefixMapper()
1426
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1427
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1429
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1431
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1432
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1435
class TestVersionedFiles(TestCaseWithMemoryTransport):
1436
"""Tests for the multiple-file variant of VersionedFile."""
1438
def get_versionedfiles(self, relpath='files'):
1439
transport = self.get_transport(relpath)
1441
transport.mkdir('.')
1442
files = self.factory(transport)
1443
if self.cleanup is not None:
1444
self.addCleanup(lambda:self.cleanup(files))
1447
def test_annotate(self):
1448
files = self.get_versionedfiles()
1449
self.get_diamond_files(files)
1450
if self.key_length == 1:
1454
# introduced full text
1455
origins = files.annotate(prefix + ('origin',))
1457
(prefix + ('origin',), 'origin\n')],
1460
origins = files.annotate(prefix + ('base',))
1462
(prefix + ('base',), 'base\n')],
1465
origins = files.annotate(prefix + ('merged',))
1468
(prefix + ('base',), 'base\n'),
1469
(prefix + ('left',), 'left\n'),
1470
(prefix + ('right',), 'right\n'),
1471
(prefix + ('merged',), 'merged\n')
1475
# Without a graph everything is new.
1477
(prefix + ('merged',), 'base\n'),
1478
(prefix + ('merged',), 'left\n'),
1479
(prefix + ('merged',), 'right\n'),
1480
(prefix + ('merged',), 'merged\n')
1483
self.assertRaises(RevisionNotPresent,
1484
files.annotate, prefix + ('missing-key',))
1486
def test_construct(self):
1487
"""Each parameterised test can be constructed on a transport."""
1488
files = self.get_versionedfiles()
1490
def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1491
return get_diamond_files(files, self.key_length,
1492
trailing_eol=trailing_eol, nograph=not self.graph,
1493
left_only=left_only)
1495
def test_add_lines_nostoresha(self):
1496
"""When nostore_sha is supplied using old content raises."""
1497
vf = self.get_versionedfiles()
1498
empty_text = ('a', [])
1499
sample_text_nl = ('b', ["foo\n", "bar\n"])
1500
sample_text_no_nl = ('c', ["foo\n", "bar"])
1502
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1503
sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1505
# we now have a copy of all the lines in the vf.
1506
for sha, (version, lines) in zip(
1507
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1508
new_key = self.get_simple_key(version + "2")
1509
self.assertRaises(errors.ExistingContent,
1510
vf.add_lines, new_key, [], lines,
1512
# and no new version should have been added.
1513
record = vf.get_record_stream([new_key], 'unordered', True).next()
1514
self.assertEqual('absent', record.storage_kind)
1516
def test_add_lines_return(self):
1517
files = self.get_versionedfiles()
1518
# save code by using the stock data insertion helper.
1519
adds = self.get_diamond_files(files)
1521
# We can only validate the first 2 elements returned from add_lines.
1523
self.assertEqual(3, len(add))
1524
results.append(add[:2])
1525
if self.key_length == 1:
1527
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1528
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1529
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1530
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1531
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1533
elif self.key_length == 2:
1535
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1536
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1537
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1538
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1539
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1540
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1541
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1542
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1543
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1544
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1547
def test_empty_lines(self):
1548
"""Empty files can be stored."""
1549
f = self.get_versionedfiles()
1550
key_a = self.get_simple_key('a')
1551
f.add_lines(key_a, [], [])
1552
self.assertEqual('',
1553
f.get_record_stream([key_a], 'unordered', True
1554
).next().get_bytes_as('fulltext'))
1555
key_b = self.get_simple_key('b')
1556
f.add_lines(key_b, self.get_parents([key_a]), [])
1557
self.assertEqual('',
1558
f.get_record_stream([key_b], 'unordered', True
1559
).next().get_bytes_as('fulltext'))
1561
def test_newline_only(self):
1562
f = self.get_versionedfiles()
1563
key_a = self.get_simple_key('a')
1564
f.add_lines(key_a, [], ['\n'])
1565
self.assertEqual('\n',
1566
f.get_record_stream([key_a], 'unordered', True
1567
).next().get_bytes_as('fulltext'))
1568
key_b = self.get_simple_key('b')
1569
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1570
self.assertEqual('\n',
1571
f.get_record_stream([key_b], 'unordered', True
1572
).next().get_bytes_as('fulltext'))
1574
def test_get_record_stream_empty(self):
1575
"""An empty stream can be requested without error."""
1576
f = self.get_versionedfiles()
1577
entries = f.get_record_stream([], 'unordered', False)
1578
self.assertEqual([], list(entries))
1580
def assertValidStorageKind(self, storage_kind):
1581
"""Assert that storage_kind is a valid storage_kind."""
1582
self.assertSubset([storage_kind],
1583
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1584
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1585
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1587
'knit-delta-closure', 'knit-delta-closure-ref'])
1589
def capture_stream(self, f, entries, on_seen, parents):
1590
"""Capture a stream for testing."""
1591
for factory in entries:
1592
on_seen(factory.key)
1593
self.assertValidStorageKind(factory.storage_kind)
1594
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1596
self.assertEqual(parents[factory.key], factory.parents)
1597
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1600
def test_get_record_stream_interface(self):
1601
"""each item in a stream has to provide a regular interface."""
1602
files = self.get_versionedfiles()
1603
self.get_diamond_files(files)
1604
keys, _ = self.get_keys_and_sort_order()
1605
parent_map = files.get_parent_map(keys)
1606
entries = files.get_record_stream(keys, 'unordered', False)
1608
self.capture_stream(files, entries, seen.add, parent_map)
1609
self.assertEqual(set(keys), seen)
1611
def get_simple_key(self, suffix):
1612
"""Return a key for the object under test."""
1613
if self.key_length == 1:
1616
return ('FileA',) + (suffix,)
1618
def get_keys_and_sort_order(self):
1619
"""Get diamond test keys list, and their sort ordering."""
1620
if self.key_length == 1:
1621
keys = [('merged',), ('left',), ('right',), ('base',)]
1622
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1625
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1627
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1631
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1632
('FileA', 'base'):0,
1633
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1634
('FileB', 'base'):0,
1636
return keys, sort_order
1638
def get_keys_and_groupcompress_sort_order(self):
1639
"""Get diamond test keys list, and their groupcompress sort ordering."""
1640
if self.key_length == 1:
1641
keys = [('merged',), ('left',), ('right',), ('base',)]
1642
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1645
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1647
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1651
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1652
('FileA', 'base'):2,
1653
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1654
('FileB', 'base'):5,
1656
return keys, sort_order
1658
def test_get_record_stream_interface_ordered(self):
1659
"""each item in a stream has to provide a regular interface."""
1660
files = self.get_versionedfiles()
1661
self.get_diamond_files(files)
1662
keys, sort_order = self.get_keys_and_sort_order()
1663
parent_map = files.get_parent_map(keys)
1664
entries = files.get_record_stream(keys, 'topological', False)
1666
self.capture_stream(files, entries, seen.append, parent_map)
1667
self.assertStreamOrder(sort_order, seen, keys)
1669
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1670
"""each item must be accessible as a fulltext."""
1671
files = self.get_versionedfiles()
1672
self.get_diamond_files(files)
1673
keys, sort_order = self.get_keys_and_sort_order()
1674
parent_map = files.get_parent_map(keys)
1675
entries = files.get_record_stream(keys, 'topological', True)
1677
for factory in entries:
1678
seen.append(factory.key)
1679
self.assertValidStorageKind(factory.storage_kind)
1680
self.assertSubset([factory.sha1],
1681
[None, files.get_sha1s([factory.key])[factory.key]])
1682
self.assertEqual(parent_map[factory.key], factory.parents)
1683
# self.assertEqual(files.get_text(factory.key),
1684
ft_bytes = factory.get_bytes_as('fulltext')
1685
self.assertIsInstance(ft_bytes, str)
1686
chunked_bytes = factory.get_bytes_as('chunked')
1687
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1689
self.assertStreamOrder(sort_order, seen, keys)
1691
def test_get_record_stream_interface_groupcompress(self):
1692
"""each item in a stream has to provide a regular interface."""
1693
files = self.get_versionedfiles()
1694
self.get_diamond_files(files)
1695
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1696
parent_map = files.get_parent_map(keys)
1697
entries = files.get_record_stream(keys, 'groupcompress', False)
1699
self.capture_stream(files, entries, seen.append, parent_map)
1700
self.assertStreamOrder(sort_order, seen, keys)
1702
def assertStreamOrder(self, sort_order, seen, keys):
1703
self.assertEqual(len(set(seen)), len(keys))
1704
if self.key_length == 1:
1707
lows = {('FileA',):0, ('FileB',):0}
1709
self.assertEqual(set(keys), set(seen))
1712
sort_pos = sort_order[key]
1713
self.assertTrue(sort_pos >= lows[key[:-1]],
1714
"Out of order in sorted stream: %r, %r" % (key, seen))
1715
lows[key[:-1]] = sort_pos
1717
def test_get_record_stream_unknown_storage_kind_raises(self):
1718
"""Asking for a storage kind that the stream cannot supply raises."""
1719
files = self.get_versionedfiles()
1720
self.get_diamond_files(files)
1721
if self.key_length == 1:
1722
keys = [('merged',), ('left',), ('right',), ('base',)]
1725
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1727
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1730
parent_map = files.get_parent_map(keys)
1731
entries = files.get_record_stream(keys, 'unordered', False)
1732
# We track the contents because we should be able to try, fail a
1733
# particular kind and then ask for one that works and continue.
1735
for factory in entries:
1736
seen.add(factory.key)
1737
self.assertValidStorageKind(factory.storage_kind)
1738
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1740
self.assertEqual(parent_map[factory.key], factory.parents)
1741
# currently no stream emits mpdiff
1742
self.assertRaises(errors.UnavailableRepresentation,
1743
factory.get_bytes_as, 'mpdiff')
1744
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1746
self.assertEqual(set(keys), seen)
1748
def test_get_record_stream_missing_records_are_absent(self):
1749
files = self.get_versionedfiles()
1750
self.get_diamond_files(files)
1751
if self.key_length == 1:
1752
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1755
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1756
('FileA', 'absent'), ('FileA', 'base'),
1757
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1758
('FileB', 'absent'), ('FileB', 'base'),
1759
('absent', 'absent'),
1761
parent_map = files.get_parent_map(keys)
1762
entries = files.get_record_stream(keys, 'unordered', False)
1763
self.assertAbsentRecord(files, keys, parent_map, entries)
1764
entries = files.get_record_stream(keys, 'topological', False)
1765
self.assertAbsentRecord(files, keys, parent_map, entries)
1767
def assertRecordHasContent(self, record, bytes):
1768
"""Assert that record has the bytes bytes."""
1769
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1770
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1772
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1773
files = self.get_versionedfiles()
1774
key = self.get_simple_key('foo')
1775
files.add_lines(key, (), ['my text\n', 'content'])
1776
stream = files.get_record_stream([key], 'unordered', False)
1777
record = stream.next()
1778
if record.storage_kind in ('chunked', 'fulltext'):
1779
# chunked and fulltext representations are for direct use not wire
1780
# serialisation: check they are able to be used directly. To send
1781
# such records over the wire translation will be needed.
1782
self.assertRecordHasContent(record, "my text\ncontent")
1784
bytes = [record.get_bytes_as(record.storage_kind)]
1785
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1786
source_record = record
1788
for record in network_stream:
1789
records.append(record)
1790
self.assertEqual(source_record.storage_kind,
1791
record.storage_kind)
1792
self.assertEqual(source_record.parents, record.parents)
1794
source_record.get_bytes_as(source_record.storage_kind),
1795
record.get_bytes_as(record.storage_kind))
1796
self.assertEqual(1, len(records))
1798
def assertStreamMetaEqual(self, records, expected, stream):
1799
"""Assert that streams expected and stream have the same records.
1801
:param records: A list to collect the seen records.
1802
:return: A generator of the records in stream.
1804
# We make assertions during copying to catch things early for
1806
for record, ref_record in izip(stream, expected):
1807
records.append(record)
1808
self.assertEqual(ref_record.key, record.key)
1809
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1810
self.assertEqual(ref_record.parents, record.parents)
1813
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1815
"""Convert a stream to a bytes iterator.
1817
:param skipped_records: A list with one element to increment when a
1819
:param full_texts: A dict from key->fulltext representation, for
1820
checking chunked or fulltext stored records.
1821
:param stream: A record_stream.
1822
:return: An iterator over the bytes of each record.
1824
for record in stream:
1825
if record.storage_kind in ('chunked', 'fulltext'):
1826
skipped_records[0] += 1
1827
# check the content is correct for direct use.
1828
self.assertRecordHasContent(record, full_texts[record.key])
1830
yield record.get_bytes_as(record.storage_kind)
1832
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1833
files = self.get_versionedfiles()
1834
target_files = self.get_versionedfiles('target')
1835
key = self.get_simple_key('ft')
1836
key_delta = self.get_simple_key('delta')
1837
files.add_lines(key, (), ['my text\n', 'content'])
1839
delta_parents = (key,)
1842
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1843
local = files.get_record_stream([key, key_delta], 'unordered', False)
1844
ref = files.get_record_stream([key, key_delta], 'unordered', False)
1845
skipped_records = [0]
1847
key: "my text\ncontent",
1848
key_delta: "different\ncontent\n",
1850
byte_stream = self.stream_to_bytes_or_skip_counter(
1851
skipped_records, full_texts, local)
1852
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1854
# insert the stream from the network into a versioned files object so we can
1855
# check the content was carried across correctly without doing delta
1857
target_files.insert_record_stream(
1858
self.assertStreamMetaEqual(records, ref, network_stream))
1859
# No duplicates on the wire thank you!
1860
self.assertEqual(2, len(records) + skipped_records[0])
1862
# if any content was copied it all must have all been.
1863
self.assertIdenticalVersionedFile(files, target_files)
1865
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
1866
# copy a delta over the wire
1867
files = self.get_versionedfiles()
1868
target_files = self.get_versionedfiles('target')
1869
key = self.get_simple_key('ft')
1870
key_delta = self.get_simple_key('delta')
1871
files.add_lines(key, (), ['my text\n', 'content'])
1873
delta_parents = (key,)
1876
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1877
# Copy the basis text across so we can reconstruct the delta during
1878
# insertion into target.
1879
target_files.insert_record_stream(files.get_record_stream([key],
1880
'unordered', False))
1881
local = files.get_record_stream([key_delta], 'unordered', False)
1882
ref = files.get_record_stream([key_delta], 'unordered', False)
1883
skipped_records = [0]
1885
key_delta: "different\ncontent\n",
1887
byte_stream = self.stream_to_bytes_or_skip_counter(
1888
skipped_records, full_texts, local)
1889
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1891
# insert the stream from the network into a versioned files object so we can
1892
# check the content was carried across correctly without doing delta
1893
# inspection during check_stream.
1894
target_files.insert_record_stream(
1895
self.assertStreamMetaEqual(records, ref, network_stream))
1896
# No duplicates on the wire thank you!
1897
self.assertEqual(1, len(records) + skipped_records[0])
1899
# if any content was copied it all must have all been
1900
self.assertIdenticalVersionedFile(files, target_files)
1902
def test_get_record_stream_wire_ready_delta_closure_included(self):
1903
# copy a delta over the wire with the ability to get its full text.
1904
files = self.get_versionedfiles()
1905
key = self.get_simple_key('ft')
1906
key_delta = self.get_simple_key('delta')
1907
files.add_lines(key, (), ['my text\n', 'content'])
1909
delta_parents = (key,)
1912
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1913
local = files.get_record_stream([key_delta], 'unordered', True)
1914
ref = files.get_record_stream([key_delta], 'unordered', True)
1915
skipped_records = [0]
1917
key_delta: "different\ncontent\n",
1919
byte_stream = self.stream_to_bytes_or_skip_counter(
1920
skipped_records, full_texts, local)
1921
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1923
# insert the stream from the network into a versioned files object so we can
1924
# check the content was carried across correctly without doing delta
1925
# inspection during check_stream.
1926
for record in self.assertStreamMetaEqual(records, ref, network_stream):
1927
# we have to be able to get the full text out:
1928
self.assertRecordHasContent(record, full_texts[record.key])
1929
# No duplicates on the wire thank you!
1930
self.assertEqual(1, len(records) + skipped_records[0])
1932
def assertAbsentRecord(self, files, keys, parents, entries):
1933
"""Helper for test_get_record_stream_missing_records_are_absent."""
1935
for factory in entries:
1936
seen.add(factory.key)
1937
if factory.key[-1] == 'absent':
1938
self.assertEqual('absent', factory.storage_kind)
1939
self.assertEqual(None, factory.sha1)
1940
self.assertEqual(None, factory.parents)
1942
self.assertValidStorageKind(factory.storage_kind)
1943
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1945
self.assertEqual(parents[factory.key], factory.parents)
1946
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1948
self.assertEqual(set(keys), seen)
1950
def test_filter_absent_records(self):
1951
"""Requested missing records can be filter trivially."""
1952
files = self.get_versionedfiles()
1953
self.get_diamond_files(files)
1954
keys, _ = self.get_keys_and_sort_order()
1955
parent_map = files.get_parent_map(keys)
1956
# Add an absent record in the middle of the present keys. (We don't ask
1957
# for just absent keys to ensure that content before and after the
1958
# absent keys is still delivered).
1959
present_keys = list(keys)
1960
if self.key_length == 1:
1961
keys.insert(2, ('extra',))
1963
keys.insert(2, ('extra', 'extra'))
1964
entries = files.get_record_stream(keys, 'unordered', False)
1966
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1968
self.assertEqual(set(present_keys), seen)
1970
def get_mapper(self):
1971
"""Get a mapper suitable for the key length of the test interface."""
1972
if self.key_length == 1:
1973
return ConstantMapper('source')
1975
return HashEscapedPrefixMapper()
1977
def get_parents(self, parents):
1978
"""Get parents, taking self.graph into consideration."""
1984
def test_get_parent_map(self):
1985
files = self.get_versionedfiles()
1986
if self.key_length == 1:
1988
(('r0',), self.get_parents(())),
1989
(('r1',), self.get_parents((('r0',),))),
1990
(('r2',), self.get_parents(())),
1991
(('r3',), self.get_parents(())),
1992
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1996
(('FileA', 'r0'), self.get_parents(())),
1997
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1998
(('FileA', 'r2'), self.get_parents(())),
1999
(('FileA', 'r3'), self.get_parents(())),
2000
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2001
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2003
for key, parents in parent_details:
2004
files.add_lines(key, parents, [])
2005
# immediately after adding it should be queryable.
2006
self.assertEqual({key:parents}, files.get_parent_map([key]))
2007
# We can ask for an empty set
2008
self.assertEqual({}, files.get_parent_map([]))
2009
# We can ask for many keys
2010
all_parents = dict(parent_details)
2011
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2012
# Absent keys are just not included in the result.
2013
keys = all_parents.keys()
2014
if self.key_length == 1:
2015
keys.insert(1, ('missing',))
2017
keys.insert(1, ('missing', 'missing'))
2018
# Absent keys are just ignored
2019
self.assertEqual(all_parents, files.get_parent_map(keys))
2021
def test_get_sha1s(self):
2022
files = self.get_versionedfiles()
2023
self.get_diamond_files(files)
2024
if self.key_length == 1:
2025
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2027
# ask for shas from different prefixes.
2029
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2030
('FileA', 'merged'), ('FileB', 'right'),
2033
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2034
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2035
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2036
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2037
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2039
files.get_sha1s(keys))
2041
def test_insert_record_stream_empty(self):
2042
"""Inserting an empty record stream should work."""
2043
files = self.get_versionedfiles()
2044
files.insert_record_stream([])
2046
def assertIdenticalVersionedFile(self, expected, actual):
2047
"""Assert that left and right have the same contents."""
2048
self.assertEqual(set(actual.keys()), set(expected.keys()))
2049
actual_parents = actual.get_parent_map(actual.keys())
2051
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2053
for key, parents in actual_parents.items():
2054
self.assertEqual(None, parents)
2055
for key in actual.keys():
2056
actual_text = actual.get_record_stream(
2057
[key], 'unordered', True).next().get_bytes_as('fulltext')
2058
expected_text = expected.get_record_stream(
2059
[key], 'unordered', True).next().get_bytes_as('fulltext')
2060
self.assertEqual(actual_text, expected_text)
2062
def test_insert_record_stream_fulltexts(self):
2063
"""Any file should accept a stream of fulltexts."""
2064
files = self.get_versionedfiles()
2065
mapper = self.get_mapper()
2066
source_transport = self.get_transport('source')
2067
source_transport.mkdir('.')
2068
# weaves always output fulltexts.
2069
source = make_versioned_files_factory(WeaveFile, mapper)(
2071
self.get_diamond_files(source, trailing_eol=False)
2072
stream = source.get_record_stream(source.keys(), 'topological',
2074
files.insert_record_stream(stream)
2075
self.assertIdenticalVersionedFile(source, files)
2077
def test_insert_record_stream_fulltexts_noeol(self):
2078
"""Any file should accept a stream of fulltexts."""
2079
files = self.get_versionedfiles()
2080
mapper = self.get_mapper()
2081
source_transport = self.get_transport('source')
2082
source_transport.mkdir('.')
2083
# weaves always output fulltexts.
2084
source = make_versioned_files_factory(WeaveFile, mapper)(
2086
self.get_diamond_files(source, trailing_eol=False)
2087
stream = source.get_record_stream(source.keys(), 'topological',
2089
files.insert_record_stream(stream)
2090
self.assertIdenticalVersionedFile(source, files)
2092
def test_insert_record_stream_annotated_knits(self):
2093
"""Any file should accept a stream from plain knits."""
2094
files = self.get_versionedfiles()
2095
mapper = self.get_mapper()
2096
source_transport = self.get_transport('source')
2097
source_transport.mkdir('.')
2098
source = make_file_factory(True, mapper)(source_transport)
2099
self.get_diamond_files(source)
2100
stream = source.get_record_stream(source.keys(), 'topological',
2102
files.insert_record_stream(stream)
2103
self.assertIdenticalVersionedFile(source, files)
2105
def test_insert_record_stream_annotated_knits_noeol(self):
2106
"""Any file should accept a stream from plain knits."""
2107
files = self.get_versionedfiles()
2108
mapper = self.get_mapper()
2109
source_transport = self.get_transport('source')
2110
source_transport.mkdir('.')
2111
source = make_file_factory(True, mapper)(source_transport)
2112
self.get_diamond_files(source, trailing_eol=False)
2113
stream = source.get_record_stream(source.keys(), 'topological',
2115
files.insert_record_stream(stream)
2116
self.assertIdenticalVersionedFile(source, files)
2118
def test_insert_record_stream_plain_knits(self):
2119
"""Any file should accept a stream from plain knits."""
2120
files = self.get_versionedfiles()
2121
mapper = self.get_mapper()
2122
source_transport = self.get_transport('source')
2123
source_transport.mkdir('.')
2124
source = make_file_factory(False, mapper)(source_transport)
2125
self.get_diamond_files(source)
2126
stream = source.get_record_stream(source.keys(), 'topological',
2128
files.insert_record_stream(stream)
2129
self.assertIdenticalVersionedFile(source, files)
2131
def test_insert_record_stream_plain_knits_noeol(self):
2132
"""Any file should accept a stream from plain knits."""
2133
files = self.get_versionedfiles()
2134
mapper = self.get_mapper()
2135
source_transport = self.get_transport('source')
2136
source_transport.mkdir('.')
2137
source = make_file_factory(False, mapper)(source_transport)
2138
self.get_diamond_files(source, trailing_eol=False)
2139
stream = source.get_record_stream(source.keys(), 'topological',
2141
files.insert_record_stream(stream)
2142
self.assertIdenticalVersionedFile(source, files)
2144
def test_insert_record_stream_existing_keys(self):
2145
"""Inserting keys already in a file should not error."""
2146
files = self.get_versionedfiles()
2147
source = self.get_versionedfiles('source')
2148
self.get_diamond_files(source)
2149
# insert some keys into f.
2150
self.get_diamond_files(files, left_only=True)
2151
stream = source.get_record_stream(source.keys(), 'topological',
2153
files.insert_record_stream(stream)
2154
self.assertIdenticalVersionedFile(source, files)
2156
def test_insert_record_stream_missing_keys(self):
2157
"""Inserting a stream with absent keys should raise an error."""
2158
files = self.get_versionedfiles()
2159
source = self.get_versionedfiles('source')
2160
stream = source.get_record_stream([('missing',) * self.key_length],
2161
'topological', False)
2162
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2165
def test_insert_record_stream_out_of_order(self):
2166
"""An out of order stream can either error or work."""
2167
files = self.get_versionedfiles()
2168
source = self.get_versionedfiles('source')
2169
self.get_diamond_files(source)
2170
if self.key_length == 1:
2171
origin_keys = [('origin',)]
2172
end_keys = [('merged',), ('left',)]
2173
start_keys = [('right',), ('base',)]
2175
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2176
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2177
('FileB', 'merged',), ('FileB', 'left',)]
2178
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2179
('FileB', 'right',), ('FileB', 'base',)]
2180
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2181
end_entries = source.get_record_stream(end_keys, 'topological', False)
2182
start_entries = source.get_record_stream(start_keys, 'topological', False)
2183
entries = chain(origin_entries, end_entries, start_entries)
2185
files.insert_record_stream(entries)
2186
except RevisionNotPresent:
2187
# Must not have corrupted the file.
2190
self.assertIdenticalVersionedFile(source, files)
2192
def get_knit_delta_source(self):
2193
"""Get a source that can produce a stream with knit delta records,
2194
regardless of this test's scenario.
2196
mapper = self.get_mapper()
2197
source_transport = self.get_transport('source')
2198
source_transport.mkdir('.')
2199
source = make_file_factory(False, mapper)(source_transport)
2200
get_diamond_files(source, self.key_length, trailing_eol=True,
2201
nograph=False, left_only=False)
2204
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2205
"""Insertion where a needed basis is not included notifies the caller
2206
of the missing basis. In the meantime a record missing its basis is
2209
source = self.get_knit_delta_source()
2210
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2211
entries = source.get_record_stream(keys, 'unordered', False)
2212
files = self.get_versionedfiles()
2213
if self.support_partial_insertion:
2214
self.assertEqual([],
2215
list(files.get_missing_compression_parent_keys()))
2216
files.insert_record_stream(entries)
2217
missing_bases = files.get_missing_compression_parent_keys()
2218
self.assertEqual(set([self.get_simple_key('left')]),
2220
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2223
errors.RevisionNotPresent, files.insert_record_stream, entries)
2226
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2227
"""Insertion where a needed basis is not included notifies the caller
2228
of the missing basis. That basis can be added in a second
2229
insert_record_stream call that does not need to repeat records present
2230
in the previous stream. The record(s) that required that basis are
2231
fully inserted once their basis is no longer missing.
2233
if not self.support_partial_insertion:
2234
raise TestNotApplicable(
2235
'versioned file scenario does not support partial insertion')
2236
source = self.get_knit_delta_source()
2237
entries = source.get_record_stream([self.get_simple_key('origin'),
2238
self.get_simple_key('merged')], 'unordered', False)
2239
files = self.get_versionedfiles()
2240
files.insert_record_stream(entries)
2241
missing_bases = files.get_missing_compression_parent_keys()
2242
self.assertEqual(set([self.get_simple_key('left')]),
2244
# 'merged' is inserted (although a commit of a write group involving
2245
# this versionedfiles would fail).
2246
merged_key = self.get_simple_key('merged')
2248
[merged_key], files.get_parent_map([merged_key]).keys())
2249
# Add the full delta closure of the missing records
2250
missing_entries = source.get_record_stream(
2251
missing_bases, 'unordered', True)
2252
files.insert_record_stream(missing_entries)
2253
# Now 'merged' is fully inserted (and a commit would succeed).
2254
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2256
[merged_key], files.get_parent_map([merged_key]).keys())
2259
def test_iter_lines_added_or_present_in_keys(self):
2260
# test that we get at least an equalset of the lines added by
2261
# versions in the store.
2262
# the ordering here is to make a tree so that dumb searches have
2263
# more changes to muck up.
2265
class InstrumentedProgress(progress.DummyProgress):
2269
progress.DummyProgress.__init__(self)
2272
def update(self, msg=None, current=None, total=None):
2273
self.updates.append((msg, current, total))
2275
files = self.get_versionedfiles()
2276
# add a base to get included
2277
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2278
# add a ancestor to be included on one side
2279
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2280
# add a ancestor to be included on the other side
2281
files.add_lines(self.get_simple_key('rancestor'),
2282
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2283
# add a child of rancestor with no eofile-nl
2284
files.add_lines(self.get_simple_key('child'),
2285
self.get_parents([self.get_simple_key('rancestor')]),
2286
['base\n', 'child\n'])
2287
# add a child of lancestor and base to join the two roots
2288
files.add_lines(self.get_simple_key('otherchild'),
2289
self.get_parents([self.get_simple_key('lancestor'),
2290
self.get_simple_key('base')]),
2291
['base\n', 'lancestor\n', 'otherchild\n'])
2292
def iter_with_keys(keys, expected):
2293
# now we need to see what lines are returned, and how often.
2295
progress = InstrumentedProgress()
2296
# iterate over the lines
2297
for line in files.iter_lines_added_or_present_in_keys(keys,
2299
lines.setdefault(line, 0)
2301
if []!= progress.updates:
2302
self.assertEqual(expected, progress.updates)
2304
lines = iter_with_keys(
2305
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2306
[('Walking content', 0, 2),
2307
('Walking content', 1, 2),
2308
('Walking content', 2, 2)])
2309
# we must see child and otherchild
2310
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2312
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2313
# we dont care if we got more than that.
2316
lines = iter_with_keys(files.keys(),
2317
[('Walking content', 0, 5),
2318
('Walking content', 1, 5),
2319
('Walking content', 2, 5),
2320
('Walking content', 3, 5),
2321
('Walking content', 4, 5),
2322
('Walking content', 5, 5)])
2323
# all lines must be seen at least once
2324
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2326
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2328
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2329
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2331
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2333
def test_make_mpdiffs(self):
2334
from bzrlib import multiparent
2335
files = self.get_versionedfiles('source')
2336
# add texts that should trip the knit maximum delta chain threshold
2337
# as well as doing parallel chains of data in knits.
2338
# this is done by two chains of 25 insertions
2339
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2340
files.add_lines(self.get_simple_key('noeol'),
2341
self.get_parents([self.get_simple_key('base')]), ['line'])
2342
# detailed eol tests:
2343
# shared last line with parent no-eol
2344
files.add_lines(self.get_simple_key('noeolsecond'),
2345
self.get_parents([self.get_simple_key('noeol')]),
2347
# differing last line with parent, both no-eol
2348
files.add_lines(self.get_simple_key('noeolnotshared'),
2349
self.get_parents([self.get_simple_key('noeolsecond')]),
2350
['line\n', 'phone'])
2351
# add eol following a noneol parent, change content
2352
files.add_lines(self.get_simple_key('eol'),
2353
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2354
# add eol following a noneol parent, no change content
2355
files.add_lines(self.get_simple_key('eolline'),
2356
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2357
# noeol with no parents:
2358
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2359
# noeol preceeding its leftmost parent in the output:
2360
# this is done by making it a merge of two parents with no common
2361
# anestry: noeolbase and noeol with the
2362
# later-inserted parent the leftmost.
2363
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2364
self.get_parents([self.get_simple_key('noeolbase'),
2365
self.get_simple_key('noeol')]),
2367
# two identical eol texts
2368
files.add_lines(self.get_simple_key('noeoldup'),
2369
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2370
next_parent = self.get_simple_key('base')
2371
text_name = 'chain1-'
2373
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2374
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2375
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2376
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2377
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2378
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2379
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2380
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2381
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2382
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2383
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2384
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2385
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2386
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2387
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2388
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2389
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2390
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2391
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2392
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2393
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2394
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2395
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2396
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2397
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2398
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2400
for depth in range(26):
2401
new_version = self.get_simple_key(text_name + '%s' % depth)
2402
text = text + ['line\n']
2403
files.add_lines(new_version, self.get_parents([next_parent]), text)
2404
next_parent = new_version
2405
next_parent = self.get_simple_key('base')
2406
text_name = 'chain2-'
2408
for depth in range(26):
2409
new_version = self.get_simple_key(text_name + '%s' % depth)
2410
text = text + ['line\n']
2411
files.add_lines(new_version, self.get_parents([next_parent]), text)
2412
next_parent = new_version
2413
target = self.get_versionedfiles('target')
2414
for key in multiparent.topo_iter_keys(files, files.keys()):
2415
mpdiff = files.make_mpdiffs([key])[0]
2416
parents = files.get_parent_map([key])[key] or []
2418
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2419
self.assertEqualDiff(
2420
files.get_record_stream([key], 'unordered',
2421
True).next().get_bytes_as('fulltext'),
2422
target.get_record_stream([key], 'unordered',
2423
True).next().get_bytes_as('fulltext')
2426
def test_keys(self):
2427
# While use is discouraged, versions() is still needed by aspects of
2429
files = self.get_versionedfiles()
2430
self.assertEqual(set(), set(files.keys()))
2431
if self.key_length == 1:
2434
key = ('foo', 'bar',)
2435
files.add_lines(key, (), [])
2436
self.assertEqual(set([key]), set(files.keys()))
2439
class VirtualVersionedFilesTests(TestCase):
2440
"""Basic tests for the VirtualVersionedFiles implementations."""
2442
def _get_parent_map(self, keys):
2445
if k in self._parent_map:
2446
ret[k] = self._parent_map[k]
2450
TestCase.setUp(self)
2452
self._parent_map = {}
2453
self.texts = VirtualVersionedFiles(self._get_parent_map,
2456
def test_add_lines(self):
2457
self.assertRaises(NotImplementedError,
2458
self.texts.add_lines, "foo", [], [])
2460
def test_add_mpdiffs(self):
2461
self.assertRaises(NotImplementedError,
2462
self.texts.add_mpdiffs, [])
2464
def test_check(self):
2465
self.assertTrue(self.texts.check())
2467
def test_insert_record_stream(self):
2468
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2471
def test_get_sha1s_nonexistent(self):
2472
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2474
def test_get_sha1s(self):
2475
self._lines["key"] = ["dataline1", "dataline2"]
2476
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2477
self.texts.get_sha1s([("key",)]))
2479
def test_get_parent_map(self):
2480
self._parent_map = {"G": ("A", "B")}
2481
self.assertEquals({("G",): (("A",),("B",))},
2482
self.texts.get_parent_map([("G",), ("L",)]))
2484
def test_get_record_stream(self):
2485
self._lines["A"] = ["FOO", "BAR"]
2486
it = self.texts.get_record_stream([("A",)], "unordered", True)
2488
self.assertEquals("chunked", record.storage_kind)
2489
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2490
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2492
def test_get_record_stream_absent(self):
2493
it = self.texts.get_record_stream([("A",)], "unordered", True)
2495
self.assertEquals("absent", record.storage_kind)
2497
def test_iter_lines_added_or_present_in_keys(self):
2498
self._lines["A"] = ["FOO", "BAR"]
2499
self._lines["B"] = ["HEY"]
2500
self._lines["C"] = ["Alberta"]
2501
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2502
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2506
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2508
def get_ordering_vf(self, key_priority):
2509
builder = self.make_branch_builder('test')
2510
builder.start_series()
2511
builder.build_snapshot('A', None, [
2512
('add', ('', 'TREE_ROOT', 'directory', None))])
2513
builder.build_snapshot('B', ['A'], [])
2514
builder.build_snapshot('C', ['B'], [])
2515
builder.build_snapshot('D', ['C'], [])
2516
builder.finish_series()
2517
b = builder.get_branch()
2519
self.addCleanup(b.unlock)
2520
vf = b.repository.inventories
2521
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2523
def test_get_empty(self):
2524
vf = self.get_ordering_vf({})
2525
self.assertEqual([], vf.calls)
2527
def test_get_record_stream_topological(self):
2528
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2529
request_keys = [('B',), ('C',), ('D',), ('A',)]
2530
keys = [r.key for r in vf.get_record_stream(request_keys,
2531
'topological', False)]
2532
# We should have gotten the keys in topological order
2533
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2534
# And recorded that the request was made
2535
self.assertEqual([('get_record_stream', request_keys, 'topological',
2538
def test_get_record_stream_ordered(self):
2539
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2540
request_keys = [('B',), ('C',), ('D',), ('A',)]
2541
keys = [r.key for r in vf.get_record_stream(request_keys,
2542
'unordered', False)]
2543
# They should be returned based on their priority
2544
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2545
# And the request recorded
2546
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2549
def test_get_record_stream_implicit_order(self):
2550
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2551
request_keys = [('B',), ('C',), ('D',), ('A',)]
2552
keys = [r.key for r in vf.get_record_stream(request_keys,
2553
'unordered', False)]
2554
# A and C are not in the map, so they get sorted to the front. A comes
2555
# before C alphabetically, so it comes back first
2556
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2557
# And the request recorded
2558
self.assertEqual([('get_record_stream', request_keys, 'unordered',