1
# Copyright (C) 2005, 2009 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
33
from bzrlib.errors import (
35
RevisionAlreadyPresent,
38
from bzrlib import knit as _mod_knit
39
from bzrlib.knit import (
46
from bzrlib.symbol_versioning import one_four, one_five
47
from bzrlib.tests import (
49
TestCaseWithMemoryTransport,
53
split_suite_by_condition,
56
from bzrlib.tests.http_utils import TestCaseWithWebserver
57
from bzrlib.trace import mutter
58
from bzrlib.transport import get_transport
59
from bzrlib.transport.memory import MemoryTransport
60
from bzrlib.tsort import topo_sort
61
from bzrlib.tuned_gzip import GzipFile
62
import bzrlib.versionedfile as versionedfile
63
from bzrlib.versionedfile import (
65
HashEscapedPrefixMapper,
67
VirtualVersionedFiles,
68
make_versioned_files_factory,
70
from bzrlib.weave import WeaveFile
71
from bzrlib.weavefile import read_weave, write_weave
74
def load_tests(standard_tests, module, loader):
75
"""Parameterize VersionedFiles tests for different implementations."""
76
to_adapt, result = split_suite_by_condition(
77
standard_tests, condition_isinstance(TestVersionedFiles))
78
# We want to be sure of behaviour for:
79
# weaves prefix layout (weave texts)
80
# individually named weaves (weave inventories)
81
# annotated knits - prefix|hash|hash-escape layout, we test the third only
82
# as it is the most complex mapper.
83
# individually named knits
84
# individual no-graph knits in packs (signatures)
85
# individual graph knits in packs (inventories)
86
# individual graph nocompression knits in packs (revisions)
87
# plain text knits in packs (texts)
91
'factory':make_versioned_files_factory(WeaveFile,
92
ConstantMapper('inventory')),
95
'support_partial_insertion': False,
99
'factory':make_file_factory(False, ConstantMapper('revisions')),
102
'support_partial_insertion': False,
104
('named-nograph-nodelta-knit-pack', {
105
'cleanup':cleanup_pack_knit,
106
'factory':make_pack_factory(False, False, 1),
109
'support_partial_insertion': False,
111
('named-graph-knit-pack', {
112
'cleanup':cleanup_pack_knit,
113
'factory':make_pack_factory(True, True, 1),
116
'support_partial_insertion': True,
118
('named-graph-nodelta-knit-pack', {
119
'cleanup':cleanup_pack_knit,
120
'factory':make_pack_factory(True, False, 1),
123
'support_partial_insertion': False,
126
len_two_scenarios = [
129
'factory':make_versioned_files_factory(WeaveFile,
133
'support_partial_insertion': False,
135
('annotated-knit-escape', {
137
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
140
'support_partial_insertion': False,
142
('plain-knit-pack', {
143
'cleanup':cleanup_pack_knit,
144
'factory':make_pack_factory(True, True, 2),
147
'support_partial_insertion': True,
150
scenarios = len_one_scenarios + len_two_scenarios
151
return multiply_tests(to_adapt, scenarios, result)
154
def get_diamond_vf(f, trailing_eol=True, left_only=False):
155
"""Get a diamond graph to exercise deltas and merges.
157
:param trailing_eol: If True end the last line with \n.
161
'base': (('origin',),),
162
'left': (('base',),),
163
'right': (('base',),),
164
'merged': (('left',), ('right',)),
166
# insert a diamond graph to exercise deltas and merges.
171
f.add_lines('origin', [], ['origin' + last_char])
172
f.add_lines('base', ['origin'], ['base' + last_char])
173
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
175
f.add_lines('right', ['base'],
176
['base\n', 'right' + last_char])
177
f.add_lines('merged', ['left', 'right'],
178
['base\n', 'left\n', 'right\n', 'merged' + last_char])
182
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
184
"""Get a diamond graph to exercise deltas and merges.
186
This creates a 5-node graph in files. If files supports 2-length keys two
187
graphs are made to exercise the support for multiple ids.
189
:param trailing_eol: If True end the last line with \n.
190
:param key_length: The length of keys in files. Currently supports length 1
192
:param left_only: If True do not add the right and merged nodes.
193
:param nograph: If True, do not provide parents to the add_lines calls;
194
this is useful for tests that need inserted data but have graphless
196
:return: The results of the add_lines calls.
201
prefixes = [('FileA',), ('FileB',)]
202
# insert a diamond graph to exercise deltas and merges.
208
def get_parents(suffix_list):
212
result = [prefix + suffix for suffix in suffix_list]
214
# we loop over each key because that spreads the inserts across prefixes,
215
# which is how commit operates.
216
for prefix in prefixes:
217
result.append(files.add_lines(prefix + ('origin',), (),
218
['origin' + last_char]))
219
for prefix in prefixes:
220
result.append(files.add_lines(prefix + ('base',),
221
get_parents([('origin',)]), ['base' + last_char]))
222
for prefix in prefixes:
223
result.append(files.add_lines(prefix + ('left',),
224
get_parents([('base',)]),
225
['base\n', 'left' + last_char]))
227
for prefix in prefixes:
228
result.append(files.add_lines(prefix + ('right',),
229
get_parents([('base',)]),
230
['base\n', 'right' + last_char]))
231
for prefix in prefixes:
232
result.append(files.add_lines(prefix + ('merged',),
233
get_parents([('left',), ('right',)]),
234
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
238
class VersionedFileTestMixIn(object):
239
"""A mixin test class for testing VersionedFiles.
241
This is not an adaptor-style test at this point because
242
theres no dynamic substitution of versioned file implementations,
243
they are strictly controlled by their owning repositories.
246
def get_transaction(self):
247
if not hasattr(self, '_transaction'):
248
self._transaction = None
249
return self._transaction
253
f.add_lines('r0', [], ['a\n', 'b\n'])
254
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
256
versions = f.versions()
257
self.assertTrue('r0' in versions)
258
self.assertTrue('r1' in versions)
259
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
260
self.assertEquals(f.get_text('r0'), 'a\nb\n')
261
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
262
self.assertEqual(2, len(f))
263
self.assertEqual(2, f.num_versions())
265
self.assertRaises(RevisionNotPresent,
266
f.add_lines, 'r2', ['foo'], [])
267
self.assertRaises(RevisionAlreadyPresent,
268
f.add_lines, 'r1', [], [])
270
# this checks that reopen with create=True does not break anything.
271
f = self.reopen_file(create=True)
274
def test_adds_with_parent_texts(self):
277
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
279
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
280
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
281
except NotImplementedError:
282
# if the format doesn't support ghosts, just add normally.
283
_, _, parent_texts['r1'] = f.add_lines('r1',
284
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
285
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
286
self.assertNotEqual(None, parent_texts['r0'])
287
self.assertNotEqual(None, parent_texts['r1'])
289
versions = f.versions()
290
self.assertTrue('r0' in versions)
291
self.assertTrue('r1' in versions)
292
self.assertTrue('r2' in versions)
293
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
294
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
295
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
296
self.assertEqual(3, f.num_versions())
297
origins = f.annotate('r1')
298
self.assertEquals(origins[0][0], 'r0')
299
self.assertEquals(origins[1][0], 'r1')
300
origins = f.annotate('r2')
301
self.assertEquals(origins[0][0], 'r1')
302
self.assertEquals(origins[1][0], 'r2')
305
f = self.reopen_file()
308
def test_add_unicode_content(self):
309
# unicode content is not permitted in versioned files.
310
# versioned files version sequences of bytes only.
312
self.assertRaises(errors.BzrBadParameterUnicode,
313
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
315
(errors.BzrBadParameterUnicode, NotImplementedError),
316
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
318
def test_add_follows_left_matching_blocks(self):
319
"""If we change left_matching_blocks, delta changes
321
Note: There are multiple correct deltas in this case, because
322
we start with 1 "a" and we get 3.
325
if isinstance(vf, WeaveFile):
326
raise TestSkipped("WeaveFile ignores left_matching_blocks")
327
vf.add_lines('1', [], ['a\n'])
328
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
329
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
330
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
331
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
332
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
333
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
335
def test_inline_newline_throws(self):
336
# \r characters are not permitted in lines being added
338
self.assertRaises(errors.BzrBadParameterContainsNewline,
339
vf.add_lines, 'a', [], ['a\n\n'])
341
(errors.BzrBadParameterContainsNewline, NotImplementedError),
342
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
343
# but inline CR's are allowed
344
vf.add_lines('a', [], ['a\r\n'])
346
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
347
except NotImplementedError:
350
def test_add_reserved(self):
352
self.assertRaises(errors.ReservedId,
353
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
355
def test_add_lines_nostoresha(self):
356
"""When nostore_sha is supplied using old content raises."""
358
empty_text = ('a', [])
359
sample_text_nl = ('b', ["foo\n", "bar\n"])
360
sample_text_no_nl = ('c', ["foo\n", "bar"])
362
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
363
sha, _, _ = vf.add_lines(version, [], lines)
365
# we now have a copy of all the lines in the vf.
366
for sha, (version, lines) in zip(
367
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
368
self.assertRaises(errors.ExistingContent,
369
vf.add_lines, version + "2", [], lines,
371
# and no new version should have been added.
372
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
375
def test_add_lines_with_ghosts_nostoresha(self):
376
"""When nostore_sha is supplied using old content raises."""
378
empty_text = ('a', [])
379
sample_text_nl = ('b', ["foo\n", "bar\n"])
380
sample_text_no_nl = ('c', ["foo\n", "bar"])
382
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
383
sha, _, _ = vf.add_lines(version, [], lines)
385
# we now have a copy of all the lines in the vf.
386
# is the test applicable to this vf implementation?
388
vf.add_lines_with_ghosts('d', [], [])
389
except NotImplementedError:
390
raise TestSkipped("add_lines_with_ghosts is optional")
391
for sha, (version, lines) in zip(
392
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
393
self.assertRaises(errors.ExistingContent,
394
vf.add_lines_with_ghosts, version + "2", [], lines,
396
# and no new version should have been added.
397
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
400
def test_add_lines_return_value(self):
401
# add_lines should return the sha1 and the text size.
403
empty_text = ('a', [])
404
sample_text_nl = ('b', ["foo\n", "bar\n"])
405
sample_text_no_nl = ('c', ["foo\n", "bar"])
406
# check results for the three cases:
407
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
408
# the first two elements are the same for all versioned files:
409
# - the digest and the size of the text. For some versioned files
410
# additional data is returned in additional tuple elements.
411
result = vf.add_lines(version, [], lines)
412
self.assertEqual(3, len(result))
413
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
415
# parents should not affect the result:
416
lines = sample_text_nl[1]
417
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
418
vf.add_lines('d', ['b', 'c'], lines)[0:2])
420
def test_get_reserved(self):
422
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
423
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
424
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
426
def test_add_unchanged_last_line_noeol_snapshot(self):
427
"""Add a text with an unchanged last line with no eol should work."""
428
# Test adding this in a number of chain lengths; because the interface
429
# for VersionedFile does not allow forcing a specific chain length, we
430
# just use a small base to get the first snapshot, then a much longer
431
# first line for the next add (which will make the third add snapshot)
432
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
433
# as a capped delta length, but ideally we would have some way of
434
# tuning the test to the store (e.g. keep going until a snapshot
436
for length in range(20):
438
vf = self.get_file('case-%d' % length)
441
for step in range(length):
442
version = prefix % step
443
lines = (['prelude \n'] * step) + ['line']
444
vf.add_lines(version, parents, lines)
445
version_lines[version] = lines
447
vf.add_lines('no-eol', parents, ['line'])
448
vf.get_texts(version_lines.keys())
449
self.assertEqualDiff('line', vf.get_text('no-eol'))
451
def test_get_texts_eol_variation(self):
452
# similar to the failure in <http://bugs.launchpad.net/234748>
454
sample_text_nl = ["line\n"]
455
sample_text_no_nl = ["line"]
462
lines = sample_text_nl
464
lines = sample_text_no_nl
465
# left_matching blocks is an internal api; it operates on the
466
# *internal* representation for a knit, which is with *all* lines
467
# being normalised to end with \n - even the final line in a no_nl
468
# file. Using it here ensures that a broken internal implementation
469
# (which is what this test tests) will generate a correct line
470
# delta (which is to say, an empty delta).
471
vf.add_lines(version, parents, lines,
472
left_matching_blocks=[(0, 0, 1)])
474
versions.append(version)
475
version_lines[version] = lines
477
vf.get_texts(versions)
478
vf.get_texts(reversed(versions))
480
def test_add_lines_with_matching_blocks_noeol_last_line(self):
481
"""Add a text with an unchanged last line with no eol should work."""
482
from bzrlib import multiparent
483
# Hand verified sha1 of the text we're adding.
484
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
485
# Create a mpdiff which adds a new line before the trailing line, and
486
# reuse the last line unaltered (which can cause annotation reuse).
487
# Test adding this in two situations:
488
# On top of a new insertion
489
vf = self.get_file('fulltext')
490
vf.add_lines('noeol', [], ['line'])
491
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
492
left_matching_blocks=[(0, 1, 1)])
493
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
495
vf = self.get_file('delta')
496
vf.add_lines('base', [], ['line'])
497
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
498
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
499
left_matching_blocks=[(1, 1, 1)])
500
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
502
def test_make_mpdiffs(self):
503
from bzrlib import multiparent
504
vf = self.get_file('foo')
505
sha1s = self._setup_for_deltas(vf)
506
new_vf = self.get_file('bar')
507
for version in multiparent.topo_iter(vf):
508
mpdiff = vf.make_mpdiffs([version])[0]
509
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
510
vf.get_sha1s([version])[version], mpdiff)])
511
self.assertEqualDiff(vf.get_text(version),
512
new_vf.get_text(version))
514
def test_make_mpdiffs_with_ghosts(self):
515
vf = self.get_file('foo')
517
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
518
except NotImplementedError:
519
# old Weave formats do not allow ghosts
521
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
523
def _setup_for_deltas(self, f):
524
self.assertFalse(f.has_version('base'))
525
# add texts that should trip the knit maximum delta chain threshold
526
# as well as doing parallel chains of data in knits.
527
# this is done by two chains of 25 insertions
528
f.add_lines('base', [], ['line\n'])
529
f.add_lines('noeol', ['base'], ['line'])
530
# detailed eol tests:
531
# shared last line with parent no-eol
532
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
533
# differing last line with parent, both no-eol
534
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
535
# add eol following a noneol parent, change content
536
f.add_lines('eol', ['noeol'], ['phone\n'])
537
# add eol following a noneol parent, no change content
538
f.add_lines('eolline', ['noeol'], ['line\n'])
539
# noeol with no parents:
540
f.add_lines('noeolbase', [], ['line'])
541
# noeol preceeding its leftmost parent in the output:
542
# this is done by making it a merge of two parents with no common
543
# anestry: noeolbase and noeol with the
544
# later-inserted parent the leftmost.
545
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
546
# two identical eol texts
547
f.add_lines('noeoldup', ['noeol'], ['line'])
549
text_name = 'chain1-'
551
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
552
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
553
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
554
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
555
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
556
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
557
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
558
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
559
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
560
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
561
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
562
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
563
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
564
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
565
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
566
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
567
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
568
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
569
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
570
19:'1ebed371807ba5935958ad0884595126e8c4e823',
571
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
572
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
573
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
574
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
575
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
576
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
578
for depth in range(26):
579
new_version = text_name + '%s' % depth
580
text = text + ['line\n']
581
f.add_lines(new_version, [next_parent], text)
582
next_parent = new_version
584
text_name = 'chain2-'
586
for depth in range(26):
587
new_version = text_name + '%s' % depth
588
text = text + ['line\n']
589
f.add_lines(new_version, [next_parent], text)
590
next_parent = new_version
593
def test_ancestry(self):
595
self.assertEqual([], f.get_ancestry([]))
596
f.add_lines('r0', [], ['a\n', 'b\n'])
597
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
598
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
599
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
600
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
601
self.assertEqual([], f.get_ancestry([]))
602
versions = f.get_ancestry(['rM'])
603
# there are some possibilities:
607
# so we check indexes
608
r0 = versions.index('r0')
609
r1 = versions.index('r1')
610
r2 = versions.index('r2')
611
self.assertFalse('r3' in versions)
612
rM = versions.index('rM')
613
self.assertTrue(r0 < r1)
614
self.assertTrue(r0 < r2)
615
self.assertTrue(r1 < rM)
616
self.assertTrue(r2 < rM)
618
self.assertRaises(RevisionNotPresent,
619
f.get_ancestry, ['rM', 'rX'])
621
self.assertEqual(set(f.get_ancestry('rM')),
622
set(f.get_ancestry('rM', topo_sorted=False)))
624
def test_mutate_after_finish(self):
625
self._transaction = 'before'
627
self._transaction = 'after'
628
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
629
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
631
def test_copy_to(self):
633
f.add_lines('0', [], ['a\n'])
634
t = MemoryTransport()
636
for suffix in self.get_factory().get_suffixes():
637
self.assertTrue(t.has('foo' + suffix))
639
def test_get_suffixes(self):
641
# and should be a list
642
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
644
def test_get_parent_map(self):
646
f.add_lines('r0', [], ['a\n', 'b\n'])
648
{'r0':()}, f.get_parent_map(['r0']))
649
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
651
{'r1':('r0',)}, f.get_parent_map(['r1']))
655
f.get_parent_map(['r0', 'r1']))
656
f.add_lines('r2', [], ['a\n', 'b\n'])
657
f.add_lines('r3', [], ['a\n', 'b\n'])
658
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
660
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
661
self.assertEqual({}, f.get_parent_map('y'))
665
f.get_parent_map(['r0', 'y', 'r1']))
667
def test_annotate(self):
669
f.add_lines('r0', [], ['a\n', 'b\n'])
670
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
671
origins = f.annotate('r1')
672
self.assertEquals(origins[0][0], 'r1')
673
self.assertEquals(origins[1][0], 'r0')
675
self.assertRaises(RevisionNotPresent,
678
def test_detection(self):
679
# Test weaves detect corruption.
681
# Weaves contain a checksum of their texts.
682
# When a text is extracted, this checksum should be
685
w = self.get_file_corrupted_text()
687
self.assertEqual('hello\n', w.get_text('v1'))
688
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
689
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
690
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
692
w = self.get_file_corrupted_checksum()
694
self.assertEqual('hello\n', w.get_text('v1'))
695
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
696
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
697
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
699
def get_file_corrupted_text(self):
700
"""Return a versioned file with corrupt text but valid metadata."""
701
raise NotImplementedError(self.get_file_corrupted_text)
703
def reopen_file(self, name='foo'):
704
"""Open the versioned file from disk again."""
705
raise NotImplementedError(self.reopen_file)
707
def test_iter_lines_added_or_present_in_versions(self):
708
# test that we get at least an equalset of the lines added by
709
# versions in the weave
710
# the ordering here is to make a tree so that dumb searches have
711
# more changes to muck up.
713
class InstrumentedProgress(progress.DummyProgress):
717
progress.DummyProgress.__init__(self)
720
def update(self, msg=None, current=None, total=None):
721
self.updates.append((msg, current, total))
724
# add a base to get included
725
vf.add_lines('base', [], ['base\n'])
726
# add a ancestor to be included on one side
727
vf.add_lines('lancestor', [], ['lancestor\n'])
728
# add a ancestor to be included on the other side
729
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
730
# add a child of rancestor with no eofile-nl
731
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
732
# add a child of lancestor and base to join the two roots
733
vf.add_lines('otherchild',
734
['lancestor', 'base'],
735
['base\n', 'lancestor\n', 'otherchild\n'])
736
def iter_with_versions(versions, expected):
737
# now we need to see what lines are returned, and how often.
739
progress = InstrumentedProgress()
740
# iterate over the lines
741
for line in vf.iter_lines_added_or_present_in_versions(versions,
743
lines.setdefault(line, 0)
745
if []!= progress.updates:
746
self.assertEqual(expected, progress.updates)
748
lines = iter_with_versions(['child', 'otherchild'],
749
[('Walking content', 0, 2),
750
('Walking content', 1, 2),
751
('Walking content', 2, 2)])
752
# we must see child and otherchild
753
self.assertTrue(lines[('child\n', 'child')] > 0)
754
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
755
# we dont care if we got more than that.
758
lines = iter_with_versions(None, [('Walking content', 0, 5),
759
('Walking content', 1, 5),
760
('Walking content', 2, 5),
761
('Walking content', 3, 5),
762
('Walking content', 4, 5),
763
('Walking content', 5, 5)])
764
# all lines must be seen at least once
765
self.assertTrue(lines[('base\n', 'base')] > 0)
766
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
767
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
768
self.assertTrue(lines[('child\n', 'child')] > 0)
769
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
771
def test_add_lines_with_ghosts(self):
772
# some versioned file formats allow lines to be added with parent
773
# information that is > than that in the format. Formats that do
774
# not support this need to raise NotImplementedError on the
775
# add_lines_with_ghosts api.
777
# add a revision with ghost parents
778
# The preferred form is utf8, but we should translate when needed
779
parent_id_unicode = u'b\xbfse'
780
parent_id_utf8 = parent_id_unicode.encode('utf8')
782
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
783
except NotImplementedError:
784
# check the other ghost apis are also not implemented
785
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
786
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
788
vf = self.reopen_file()
789
# test key graph related apis: getncestry, _graph, get_parents
791
# - these are ghost unaware and must not be reflect ghosts
792
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
793
self.assertFalse(vf.has_version(parent_id_utf8))
794
# we have _with_ghost apis to give us ghost information.
795
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
796
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
797
# if we add something that is a ghost of another, it should correct the
798
# results of the prior apis
799
vf.add_lines(parent_id_utf8, [], [])
800
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
801
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
802
vf.get_parent_map(['notbxbfse']))
803
self.assertTrue(vf.has_version(parent_id_utf8))
804
# we have _with_ghost apis to give us ghost information.
805
self.assertEqual([parent_id_utf8, 'notbxbfse'],
806
vf.get_ancestry_with_ghosts(['notbxbfse']))
807
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
809
def test_add_lines_with_ghosts_after_normal_revs(self):
810
# some versioned file formats allow lines to be added with parent
811
# information that is > than that in the format. Formats that do
812
# not support this need to raise NotImplementedError on the
813
# add_lines_with_ghosts api.
815
# probe for ghost support
817
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
818
except NotImplementedError:
820
vf.add_lines_with_ghosts('references_ghost',
822
['line\n', 'line_b\n', 'line_c\n'])
823
origins = vf.annotate('references_ghost')
824
self.assertEquals(('base', 'line\n'), origins[0])
825
self.assertEquals(('base', 'line_b\n'), origins[1])
826
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
828
def test_readonly_mode(self):
829
transport = get_transport(self.get_url('.'))
830
factory = self.get_factory()
831
vf = factory('id', transport, 0777, create=True, access_mode='w')
832
vf = factory('id', transport, access_mode='r')
833
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
834
self.assertRaises(errors.ReadOnlyError,
835
vf.add_lines_with_ghosts,
840
def test_get_sha1s(self):
841
# check the sha1 data is available
844
vf.add_lines('a', [], ['a\n'])
845
# the same file, different metadata
846
vf.add_lines('b', ['a'], ['a\n'])
847
# a file differing only in last newline.
848
vf.add_lines('c', [], ['a'])
850
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
851
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
852
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
854
vf.get_sha1s(['a', 'c', 'b']))
857
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
859
def get_file(self, name='foo'):
860
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
861
get_scope=self.get_transaction)
863
def get_file_corrupted_text(self):
864
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
865
get_scope=self.get_transaction)
866
w.add_lines('v1', [], ['hello\n'])
867
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
869
# We are going to invasively corrupt the text
870
# Make sure the internals of weave are the same
871
self.assertEqual([('{', 0)
879
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
880
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
885
w._weave[4] = 'There\n'
888
def get_file_corrupted_checksum(self):
889
w = self.get_file_corrupted_text()
891
w._weave[4] = 'there\n'
892
self.assertEqual('hello\nthere\n', w.get_text('v2'))
894
#Invalid checksum, first digit changed
895
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
898
def reopen_file(self, name='foo', create=False):
899
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
900
get_scope=self.get_transaction)
902
def test_no_implicit_create(self):
903
self.assertRaises(errors.NoSuchFile,
906
get_transport(self.get_url('.')),
907
get_scope=self.get_transaction)
909
def get_factory(self):
913
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
916
TestCaseWithMemoryTransport.setUp(self)
917
mapper = PrefixMapper()
918
factory = make_file_factory(True, mapper)
919
self.vf1 = factory(self.get_transport('root-1'))
920
self.vf2 = factory(self.get_transport('root-2'))
921
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
922
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
924
def test_add_lines(self):
925
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
926
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
927
('root', 'a'), [], [])
928
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
929
('root', 'a:'), None, [])
930
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
931
('root', 'a:'), [], None)
933
def setup_abcde(self):
934
self.vf1.add_lines(('root', 'A'), [], ['a'])
935
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
936
self.vf2.add_lines(('root', 'C'), [], ['c'])
937
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
938
self.plan_merge_vf.add_lines(('root', 'E:'),
939
[('root', 'B'), ('root', 'D')], ['e'])
941
def test_get_parents(self):
943
self.assertEqual({('root', 'B'):(('root', 'A'),)},
944
self.plan_merge_vf.get_parent_map([('root', 'B')]))
945
self.assertEqual({('root', 'D'):(('root', 'C'),)},
946
self.plan_merge_vf.get_parent_map([('root', 'D')]))
947
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
948
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
950
self.plan_merge_vf.get_parent_map([('root', 'F')]))
952
('root', 'B'):(('root', 'A'),),
953
('root', 'D'):(('root', 'C'),),
954
('root', 'E:'):(('root', 'B'),('root', 'D')),
956
self.plan_merge_vf.get_parent_map(
957
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
959
def test_get_record_stream(self):
961
def get_record(suffix):
962
return self.plan_merge_vf.get_record_stream(
963
[('root', suffix)], 'unordered', True).next()
964
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
965
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
966
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
967
self.assertEqual('absent', get_record('F').storage_kind)
970
class TestReadonlyHttpMixin(object):
972
def get_transaction(self):
975
def test_readonly_http_works(self):
976
# we should be able to read from http with a versioned file.
978
# try an empty file access
979
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
980
self.assertEqual([], readonly_vf.versions())
982
vf.add_lines('1', [], ['a\n'])
983
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
984
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
985
self.assertEqual(['1', '2'], vf.versions())
986
for version in readonly_vf.versions():
987
readonly_vf.get_lines(version)
990
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
993
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
994
get_scope=self.get_transaction)
996
def get_factory(self):
1000
class MergeCasesMixin(object):
1002
def doMerge(self, base, a, b, mp):
1003
from cStringIO import StringIO
1004
from textwrap import dedent
1010
w.add_lines('text0', [], map(addcrlf, base))
1011
w.add_lines('text1', ['text0'], map(addcrlf, a))
1012
w.add_lines('text2', ['text0'], map(addcrlf, b))
1014
self.log_contents(w)
1016
self.log('merge plan:')
1017
p = list(w.plan_merge('text1', 'text2'))
1018
for state, line in p:
1020
self.log('%12s | %s' % (state, line[:-1]))
1024
mt.writelines(w.weave_merge(p))
1026
self.log(mt.getvalue())
1028
mp = map(addcrlf, mp)
1029
self.assertEqual(mt.readlines(), mp)
1032
def testOneInsert(self):
1038
def testSeparateInserts(self):
1039
self.doMerge(['aaa', 'bbb', 'ccc'],
1040
['aaa', 'xxx', 'bbb', 'ccc'],
1041
['aaa', 'bbb', 'yyy', 'ccc'],
1042
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1044
def testSameInsert(self):
1045
self.doMerge(['aaa', 'bbb', 'ccc'],
1046
['aaa', 'xxx', 'bbb', 'ccc'],
1047
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1048
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1049
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1050
def testOverlappedInsert(self):
1051
self.doMerge(['aaa', 'bbb'],
1052
['aaa', 'xxx', 'yyy', 'bbb'],
1053
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1055
# really it ought to reduce this to
1056
# ['aaa', 'xxx', 'yyy', 'bbb']
1059
def testClashReplace(self):
1060
self.doMerge(['aaa'],
1063
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1066
def testNonClashInsert1(self):
1067
self.doMerge(['aaa'],
1070
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1073
def testNonClashInsert2(self):
1074
self.doMerge(['aaa'],
1080
def testDeleteAndModify(self):
1081
"""Clashing delete and modification.
1083
If one side modifies a region and the other deletes it then
1084
there should be a conflict with one side blank.
1087
#######################################
1088
# skippd, not working yet
1091
self.doMerge(['aaa', 'bbb', 'ccc'],
1092
['aaa', 'ddd', 'ccc'],
1094
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1096
def _test_merge_from_strings(self, base, a, b, expected):
1098
w.add_lines('text0', [], base.splitlines(True))
1099
w.add_lines('text1', ['text0'], a.splitlines(True))
1100
w.add_lines('text2', ['text0'], b.splitlines(True))
1101
self.log('merge plan:')
1102
p = list(w.plan_merge('text1', 'text2'))
1103
for state, line in p:
1105
self.log('%12s | %s' % (state, line[:-1]))
1106
self.log('merge result:')
1107
result_text = ''.join(w.weave_merge(p))
1108
self.log(result_text)
1109
self.assertEqualDiff(result_text, expected)
1111
def test_weave_merge_conflicts(self):
1112
# does weave merge properly handle plans that end with unchanged?
1113
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1114
self.assertEqual(result, 'hello\n')
1116
def test_deletion_extended(self):
1117
"""One side deletes, the other deletes more.
1134
self._test_merge_from_strings(base, a, b, result)
1136
def test_deletion_overlap(self):
1137
"""Delete overlapping regions with no other conflict.
1139
Arguably it'd be better to treat these as agreement, rather than
1140
conflict, but for now conflict is safer.
1168
self._test_merge_from_strings(base, a, b, result)
1170
def test_agreement_deletion(self):
1171
"""Agree to delete some lines, without conflicts."""
1193
self._test_merge_from_strings(base, a, b, result)
1195
def test_sync_on_deletion(self):
1196
"""Specific case of merge where we can synchronize incorrectly.
1198
A previous version of the weave merge concluded that the two versions
1199
agreed on deleting line 2, and this could be a synchronization point.
1200
Line 1 was then considered in isolation, and thought to be deleted on
1203
It's better to consider the whole thing as a disagreement region.
1214
a's replacement line 2
1227
a's replacement line 2
1234
self._test_merge_from_strings(base, a, b, result)
1237
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1239
def get_file(self, name='foo'):
1240
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1242
def log_contents(self, w):
1243
self.log('weave is:')
1245
write_weave(w, tmpf)
1246
self.log(tmpf.getvalue())
1248
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1249
'xxx', '>>>>>>> ', 'bbb']
1252
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1254
def test_select_adaptor(self):
1255
"""Test expected adapters exist."""
1256
# One scenario for each lookup combination we expect to use.
1257
# Each is source_kind, requested_kind, adapter class
1259
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1260
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1261
('knit-annotated-delta-gz', 'knit-delta-gz',
1262
_mod_knit.DeltaAnnotatedToUnannotated),
1263
('knit-annotated-delta-gz', 'fulltext',
1264
_mod_knit.DeltaAnnotatedToFullText),
1265
('knit-annotated-ft-gz', 'knit-ft-gz',
1266
_mod_knit.FTAnnotatedToUnannotated),
1267
('knit-annotated-ft-gz', 'fulltext',
1268
_mod_knit.FTAnnotatedToFullText),
1270
for source, requested, klass in scenarios:
1271
adapter_factory = versionedfile.adapter_registry.get(
1272
(source, requested))
1273
adapter = adapter_factory(None)
1274
self.assertIsInstance(adapter, klass)
1276
def get_knit(self, annotated=True):
1277
mapper = ConstantMapper('knit')
1278
transport = self.get_transport()
1279
return make_file_factory(annotated, mapper)(transport)
1281
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1282
"""Grab the interested adapted texts for tests."""
1283
# origin is a fulltext
1284
entries = f.get_record_stream([('origin',)], 'unordered', False)
1285
base = entries.next()
1286
ft_data = ft_adapter.get_bytes(base)
1287
# merged is both a delta and multiple parents.
1288
entries = f.get_record_stream([('merged',)], 'unordered', False)
1289
merged = entries.next()
1290
delta_data = delta_adapter.get_bytes(merged)
1291
return ft_data, delta_data
1293
def test_deannotation_noeol(self):
1294
"""Test converting annotated knits to unannotated knits."""
1295
# we need a full text, and a delta
1297
get_diamond_files(f, 1, trailing_eol=False)
1298
ft_data, delta_data = self.helpGetBytes(f,
1299
_mod_knit.FTAnnotatedToUnannotated(None),
1300
_mod_knit.DeltaAnnotatedToUnannotated(None))
1302
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1305
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1307
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1308
'1,2,3\nleft\nright\nmerged\nend merged\n',
1309
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1311
def test_deannotation(self):
1312
"""Test converting annotated knits to unannotated knits."""
1313
# we need a full text, and a delta
1315
get_diamond_files(f, 1)
1316
ft_data, delta_data = self.helpGetBytes(f,
1317
_mod_knit.FTAnnotatedToUnannotated(None),
1318
_mod_knit.DeltaAnnotatedToUnannotated(None))
1320
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1323
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1325
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1326
'2,2,2\nright\nmerged\nend merged\n',
1327
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1329
def test_annotated_to_fulltext_no_eol(self):
1330
"""Test adapting annotated knits to full texts (for -> weaves)."""
1331
# we need a full text, and a delta
1333
get_diamond_files(f, 1, trailing_eol=False)
1334
# Reconstructing a full text requires a backing versioned file, and it
1335
# must have the base lines requested from it.
1336
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1337
ft_data, delta_data = self.helpGetBytes(f,
1338
_mod_knit.FTAnnotatedToFullText(None),
1339
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1340
self.assertEqual('origin', ft_data)
1341
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1342
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1343
True)], logged_vf.calls)
1345
def test_annotated_to_fulltext(self):
1346
"""Test adapting annotated knits to full texts (for -> weaves)."""
1347
# we need a full text, and a delta
1349
get_diamond_files(f, 1)
1350
# Reconstructing a full text requires a backing versioned file, and it
1351
# must have the base lines requested from it.
1352
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1353
ft_data, delta_data = self.helpGetBytes(f,
1354
_mod_knit.FTAnnotatedToFullText(None),
1355
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1356
self.assertEqual('origin\n', ft_data)
1357
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1358
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1359
True)], logged_vf.calls)
1361
def test_unannotated_to_fulltext(self):
1362
"""Test adapting unannotated knits to full texts.
1364
This is used for -> weaves, and for -> annotated knits.
1366
# we need a full text, and a delta
1367
f = self.get_knit(annotated=False)
1368
get_diamond_files(f, 1)
1369
# Reconstructing a full text requires a backing versioned file, and it
1370
# must have the base lines requested from it.
1371
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1372
ft_data, delta_data = self.helpGetBytes(f,
1373
_mod_knit.FTPlainToFullText(None),
1374
_mod_knit.DeltaPlainToFullText(logged_vf))
1375
self.assertEqual('origin\n', ft_data)
1376
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1377
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1378
True)], logged_vf.calls)
1380
def test_unannotated_to_fulltext_no_eol(self):
1381
"""Test adapting unannotated knits to full texts.
1383
This is used for -> weaves, and for -> annotated knits.
1385
# we need a full text, and a delta
1386
f = self.get_knit(annotated=False)
1387
get_diamond_files(f, 1, trailing_eol=False)
1388
# Reconstructing a full text requires a backing versioned file, and it
1389
# must have the base lines requested from it.
1390
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1391
ft_data, delta_data = self.helpGetBytes(f,
1392
_mod_knit.FTPlainToFullText(None),
1393
_mod_knit.DeltaPlainToFullText(logged_vf))
1394
self.assertEqual('origin', ft_data)
1395
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1396
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1397
True)], logged_vf.calls)
1400
class TestKeyMapper(TestCaseWithMemoryTransport):
1401
"""Tests for various key mapping logic."""
1403
def test_identity_mapper(self):
1404
mapper = versionedfile.ConstantMapper("inventory")
1405
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1406
self.assertEqual("inventory", mapper.map(('quux',)))
1408
def test_prefix_mapper(self):
1410
mapper = versionedfile.PrefixMapper()
1411
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1412
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1413
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1414
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1416
def test_hash_prefix_mapper(self):
1417
#format6: hash + plain
1418
mapper = versionedfile.HashPrefixMapper()
1419
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1420
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1421
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1422
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1424
def test_hash_escaped_mapper(self):
1425
#knit1: hash + escaped
1426
mapper = versionedfile.HashEscapedPrefixMapper()
1427
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1428
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1430
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1432
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1433
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1436
class TestVersionedFiles(TestCaseWithMemoryTransport):
1437
"""Tests for the multiple-file variant of VersionedFile."""
1439
def get_versionedfiles(self, relpath='files'):
1440
transport = self.get_transport(relpath)
1442
transport.mkdir('.')
1443
files = self.factory(transport)
1444
if self.cleanup is not None:
1445
self.addCleanup(lambda:self.cleanup(files))
1448
def test_annotate(self):
1449
files = self.get_versionedfiles()
1450
self.get_diamond_files(files)
1451
if self.key_length == 1:
1455
# introduced full text
1456
origins = files.annotate(prefix + ('origin',))
1458
(prefix + ('origin',), 'origin\n')],
1461
origins = files.annotate(prefix + ('base',))
1463
(prefix + ('base',), 'base\n')],
1466
origins = files.annotate(prefix + ('merged',))
1469
(prefix + ('base',), 'base\n'),
1470
(prefix + ('left',), 'left\n'),
1471
(prefix + ('right',), 'right\n'),
1472
(prefix + ('merged',), 'merged\n')
1476
# Without a graph everything is new.
1478
(prefix + ('merged',), 'base\n'),
1479
(prefix + ('merged',), 'left\n'),
1480
(prefix + ('merged',), 'right\n'),
1481
(prefix + ('merged',), 'merged\n')
1484
self.assertRaises(RevisionNotPresent,
1485
files.annotate, prefix + ('missing-key',))
1487
def test_construct(self):
1488
"""Each parameterised test can be constructed on a transport."""
1489
files = self.get_versionedfiles()
1491
def get_diamond_files(self, files, trailing_eol=True, left_only=False):
1492
return get_diamond_files(files, self.key_length,
1493
trailing_eol=trailing_eol, nograph=not self.graph,
1494
left_only=left_only)
1496
def test_add_lines_nostoresha(self):
1497
"""When nostore_sha is supplied using old content raises."""
1498
vf = self.get_versionedfiles()
1499
empty_text = ('a', [])
1500
sample_text_nl = ('b', ["foo\n", "bar\n"])
1501
sample_text_no_nl = ('c', ["foo\n", "bar"])
1503
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1504
sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1506
# we now have a copy of all the lines in the vf.
1507
for sha, (version, lines) in zip(
1508
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1509
new_key = self.get_simple_key(version + "2")
1510
self.assertRaises(errors.ExistingContent,
1511
vf.add_lines, new_key, [], lines,
1513
# and no new version should have been added.
1514
record = vf.get_record_stream([new_key], 'unordered', True).next()
1515
self.assertEqual('absent', record.storage_kind)
1517
def test_add_lines_return(self):
1518
files = self.get_versionedfiles()
1519
# save code by using the stock data insertion helper.
1520
adds = self.get_diamond_files(files)
1522
# We can only validate the first 2 elements returned from add_lines.
1524
self.assertEqual(3, len(add))
1525
results.append(add[:2])
1526
if self.key_length == 1:
1528
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1529
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1530
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1531
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1532
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1534
elif self.key_length == 2:
1536
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1537
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1538
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1539
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1540
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1541
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1542
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1543
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1544
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1545
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1548
def test_empty_lines(self):
1549
"""Empty files can be stored."""
1550
f = self.get_versionedfiles()
1551
key_a = self.get_simple_key('a')
1552
f.add_lines(key_a, [], [])
1553
self.assertEqual('',
1554
f.get_record_stream([key_a], 'unordered', True
1555
).next().get_bytes_as('fulltext'))
1556
key_b = self.get_simple_key('b')
1557
f.add_lines(key_b, self.get_parents([key_a]), [])
1558
self.assertEqual('',
1559
f.get_record_stream([key_b], 'unordered', True
1560
).next().get_bytes_as('fulltext'))
1562
def test_newline_only(self):
1563
f = self.get_versionedfiles()
1564
key_a = self.get_simple_key('a')
1565
f.add_lines(key_a, [], ['\n'])
1566
self.assertEqual('\n',
1567
f.get_record_stream([key_a], 'unordered', True
1568
).next().get_bytes_as('fulltext'))
1569
key_b = self.get_simple_key('b')
1570
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1571
self.assertEqual('\n',
1572
f.get_record_stream([key_b], 'unordered', True
1573
).next().get_bytes_as('fulltext'))
1575
def test_get_record_stream_empty(self):
1576
"""An empty stream can be requested without error."""
1577
f = self.get_versionedfiles()
1578
entries = f.get_record_stream([], 'unordered', False)
1579
self.assertEqual([], list(entries))
1581
def assertValidStorageKind(self, storage_kind):
1582
"""Assert that storage_kind is a valid storage_kind."""
1583
self.assertSubset([storage_kind],
1584
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1585
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1586
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1588
'knit-delta-closure', 'knit-delta-closure-ref'])
1590
def capture_stream(self, f, entries, on_seen, parents):
1591
"""Capture a stream for testing."""
1592
for factory in entries:
1593
on_seen(factory.key)
1594
self.assertValidStorageKind(factory.storage_kind)
1595
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1597
self.assertEqual(parents[factory.key], factory.parents)
1598
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1601
def test_get_record_stream_interface(self):
1602
"""each item in a stream has to provide a regular interface."""
1603
files = self.get_versionedfiles()
1604
self.get_diamond_files(files)
1605
keys, _ = self.get_keys_and_sort_order()
1606
parent_map = files.get_parent_map(keys)
1607
entries = files.get_record_stream(keys, 'unordered', False)
1609
self.capture_stream(files, entries, seen.add, parent_map)
1610
self.assertEqual(set(keys), seen)
1612
def get_simple_key(self, suffix):
1613
"""Return a key for the object under test."""
1614
if self.key_length == 1:
1617
return ('FileA',) + (suffix,)
1619
def get_keys_and_sort_order(self):
1620
"""Get diamond test keys list, and their sort ordering."""
1621
if self.key_length == 1:
1622
keys = [('merged',), ('left',), ('right',), ('base',)]
1623
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1626
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1628
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1632
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1633
('FileA', 'base'):0,
1634
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1635
('FileB', 'base'):0,
1637
return keys, sort_order
1639
def get_keys_and_groupcompress_sort_order(self):
1640
"""Get diamond test keys list, and their groupcompress sort ordering."""
1641
if self.key_length == 1:
1642
keys = [('merged',), ('left',), ('right',), ('base',)]
1643
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1646
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1648
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1652
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1653
('FileA', 'base'):2,
1654
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1655
('FileB', 'base'):5,
1657
return keys, sort_order
1659
def test_get_record_stream_interface_ordered(self):
1660
"""each item in a stream has to provide a regular interface."""
1661
files = self.get_versionedfiles()
1662
self.get_diamond_files(files)
1663
keys, sort_order = self.get_keys_and_sort_order()
1664
parent_map = files.get_parent_map(keys)
1665
entries = files.get_record_stream(keys, 'topological', False)
1667
self.capture_stream(files, entries, seen.append, parent_map)
1668
self.assertStreamOrder(sort_order, seen, keys)
1670
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1671
"""each item must be accessible as a fulltext."""
1672
files = self.get_versionedfiles()
1673
self.get_diamond_files(files)
1674
keys, sort_order = self.get_keys_and_sort_order()
1675
parent_map = files.get_parent_map(keys)
1676
entries = files.get_record_stream(keys, 'topological', True)
1678
for factory in entries:
1679
seen.append(factory.key)
1680
self.assertValidStorageKind(factory.storage_kind)
1681
self.assertSubset([factory.sha1],
1682
[None, files.get_sha1s([factory.key])[factory.key]])
1683
self.assertEqual(parent_map[factory.key], factory.parents)
1684
# self.assertEqual(files.get_text(factory.key),
1685
ft_bytes = factory.get_bytes_as('fulltext')
1686
self.assertIsInstance(ft_bytes, str)
1687
chunked_bytes = factory.get_bytes_as('chunked')
1688
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1690
self.assertStreamOrder(sort_order, seen, keys)
1692
def test_get_record_stream_interface_groupcompress(self):
1693
"""each item in a stream has to provide a regular interface."""
1694
files = self.get_versionedfiles()
1695
self.get_diamond_files(files)
1696
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1697
parent_map = files.get_parent_map(keys)
1698
entries = files.get_record_stream(keys, 'groupcompress', False)
1700
self.capture_stream(files, entries, seen.append, parent_map)
1701
self.assertStreamOrder(sort_order, seen, keys)
1703
def assertStreamOrder(self, sort_order, seen, keys):
1704
self.assertEqual(len(set(seen)), len(keys))
1705
if self.key_length == 1:
1708
lows = {('FileA',):0, ('FileB',):0}
1710
self.assertEqual(set(keys), set(seen))
1713
sort_pos = sort_order[key]
1714
self.assertTrue(sort_pos >= lows[key[:-1]],
1715
"Out of order in sorted stream: %r, %r" % (key, seen))
1716
lows[key[:-1]] = sort_pos
1718
def test_get_record_stream_unknown_storage_kind_raises(self):
1719
"""Asking for a storage kind that the stream cannot supply raises."""
1720
files = self.get_versionedfiles()
1721
self.get_diamond_files(files)
1722
if self.key_length == 1:
1723
keys = [('merged',), ('left',), ('right',), ('base',)]
1726
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1728
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1731
parent_map = files.get_parent_map(keys)
1732
entries = files.get_record_stream(keys, 'unordered', False)
1733
# We track the contents because we should be able to try, fail a
1734
# particular kind and then ask for one that works and continue.
1736
for factory in entries:
1737
seen.add(factory.key)
1738
self.assertValidStorageKind(factory.storage_kind)
1739
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1741
self.assertEqual(parent_map[factory.key], factory.parents)
1742
# currently no stream emits mpdiff
1743
self.assertRaises(errors.UnavailableRepresentation,
1744
factory.get_bytes_as, 'mpdiff')
1745
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1747
self.assertEqual(set(keys), seen)
1749
def test_get_record_stream_missing_records_are_absent(self):
1750
files = self.get_versionedfiles()
1751
self.get_diamond_files(files)
1752
if self.key_length == 1:
1753
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1756
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1757
('FileA', 'absent'), ('FileA', 'base'),
1758
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1759
('FileB', 'absent'), ('FileB', 'base'),
1760
('absent', 'absent'),
1762
parent_map = files.get_parent_map(keys)
1763
entries = files.get_record_stream(keys, 'unordered', False)
1764
self.assertAbsentRecord(files, keys, parent_map, entries)
1765
entries = files.get_record_stream(keys, 'topological', False)
1766
self.assertAbsentRecord(files, keys, parent_map, entries)
1768
def assertRecordHasContent(self, record, bytes):
1769
"""Assert that record has the bytes bytes."""
1770
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1771
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1773
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1774
files = self.get_versionedfiles()
1775
key = self.get_simple_key('foo')
1776
files.add_lines(key, (), ['my text\n', 'content'])
1777
stream = files.get_record_stream([key], 'unordered', False)
1778
record = stream.next()
1779
if record.storage_kind in ('chunked', 'fulltext'):
1780
# chunked and fulltext representations are for direct use not wire
1781
# serialisation: check they are able to be used directly. To send
1782
# such records over the wire translation will be needed.
1783
self.assertRecordHasContent(record, "my text\ncontent")
1785
bytes = [record.get_bytes_as(record.storage_kind)]
1786
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1787
source_record = record
1789
for record in network_stream:
1790
records.append(record)
1791
self.assertEqual(source_record.storage_kind,
1792
record.storage_kind)
1793
self.assertEqual(source_record.parents, record.parents)
1795
source_record.get_bytes_as(source_record.storage_kind),
1796
record.get_bytes_as(record.storage_kind))
1797
self.assertEqual(1, len(records))
1799
def assertStreamMetaEqual(self, records, expected, stream):
1800
"""Assert that streams expected and stream have the same records.
1802
:param records: A list to collect the seen records.
1803
:return: A generator of the records in stream.
1805
# We make assertions during copying to catch things early for
1807
for record, ref_record in izip(stream, expected):
1808
records.append(record)
1809
self.assertEqual(ref_record.key, record.key)
1810
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1811
self.assertEqual(ref_record.parents, record.parents)
1814
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1816
"""Convert a stream to a bytes iterator.
1818
:param skipped_records: A list with one element to increment when a
1820
:param full_texts: A dict from key->fulltext representation, for
1821
checking chunked or fulltext stored records.
1822
:param stream: A record_stream.
1823
:return: An iterator over the bytes of each record.
1825
for record in stream:
1826
if record.storage_kind in ('chunked', 'fulltext'):
1827
skipped_records[0] += 1
1828
# check the content is correct for direct use.
1829
self.assertRecordHasContent(record, full_texts[record.key])
1831
yield record.get_bytes_as(record.storage_kind)
1833
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1834
files = self.get_versionedfiles()
1835
target_files = self.get_versionedfiles('target')
1836
key = self.get_simple_key('ft')
1837
key_delta = self.get_simple_key('delta')
1838
files.add_lines(key, (), ['my text\n', 'content'])
1840
delta_parents = (key,)
1843
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1844
local = files.get_record_stream([key, key_delta], 'unordered', False)
1845
ref = files.get_record_stream([key, key_delta], 'unordered', False)
1846
skipped_records = [0]
1848
key: "my text\ncontent",
1849
key_delta: "different\ncontent\n",
1851
byte_stream = self.stream_to_bytes_or_skip_counter(
1852
skipped_records, full_texts, local)
1853
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1855
# insert the stream from the network into a versioned files object so we can
1856
# check the content was carried across correctly without doing delta
1858
target_files.insert_record_stream(
1859
self.assertStreamMetaEqual(records, ref, network_stream))
1860
# No duplicates on the wire thank you!
1861
self.assertEqual(2, len(records) + skipped_records[0])
1863
# if any content was copied it all must have all been.
1864
self.assertIdenticalVersionedFile(files, target_files)
1866
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
1867
# copy a delta over the wire
1868
files = self.get_versionedfiles()
1869
target_files = self.get_versionedfiles('target')
1870
key = self.get_simple_key('ft')
1871
key_delta = self.get_simple_key('delta')
1872
files.add_lines(key, (), ['my text\n', 'content'])
1874
delta_parents = (key,)
1877
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1878
# Copy the basis text across so we can reconstruct the delta during
1879
# insertion into target.
1880
target_files.insert_record_stream(files.get_record_stream([key],
1881
'unordered', False))
1882
local = files.get_record_stream([key_delta], 'unordered', False)
1883
ref = files.get_record_stream([key_delta], 'unordered', False)
1884
skipped_records = [0]
1886
key_delta: "different\ncontent\n",
1888
byte_stream = self.stream_to_bytes_or_skip_counter(
1889
skipped_records, full_texts, local)
1890
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1892
# insert the stream from the network into a versioned files object so we can
1893
# check the content was carried across correctly without doing delta
1894
# inspection during check_stream.
1895
target_files.insert_record_stream(
1896
self.assertStreamMetaEqual(records, ref, network_stream))
1897
# No duplicates on the wire thank you!
1898
self.assertEqual(1, len(records) + skipped_records[0])
1900
# if any content was copied it all must have all been
1901
self.assertIdenticalVersionedFile(files, target_files)
1903
def test_get_record_stream_wire_ready_delta_closure_included(self):
1904
# copy a delta over the wire with the ability to get its full text.
1905
files = self.get_versionedfiles()
1906
key = self.get_simple_key('ft')
1907
key_delta = self.get_simple_key('delta')
1908
files.add_lines(key, (), ['my text\n', 'content'])
1910
delta_parents = (key,)
1913
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1914
local = files.get_record_stream([key_delta], 'unordered', True)
1915
ref = files.get_record_stream([key_delta], 'unordered', True)
1916
skipped_records = [0]
1918
key_delta: "different\ncontent\n",
1920
byte_stream = self.stream_to_bytes_or_skip_counter(
1921
skipped_records, full_texts, local)
1922
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1924
# insert the stream from the network into a versioned files object so we can
1925
# check the content was carried across correctly without doing delta
1926
# inspection during check_stream.
1927
for record in self.assertStreamMetaEqual(records, ref, network_stream):
1928
# we have to be able to get the full text out:
1929
self.assertRecordHasContent(record, full_texts[record.key])
1930
# No duplicates on the wire thank you!
1931
self.assertEqual(1, len(records) + skipped_records[0])
1933
def assertAbsentRecord(self, files, keys, parents, entries):
1934
"""Helper for test_get_record_stream_missing_records_are_absent."""
1936
for factory in entries:
1937
seen.add(factory.key)
1938
if factory.key[-1] == 'absent':
1939
self.assertEqual('absent', factory.storage_kind)
1940
self.assertEqual(None, factory.sha1)
1941
self.assertEqual(None, factory.parents)
1943
self.assertValidStorageKind(factory.storage_kind)
1944
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1946
self.assertEqual(parents[factory.key], factory.parents)
1947
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1949
self.assertEqual(set(keys), seen)
1951
def test_filter_absent_records(self):
1952
"""Requested missing records can be filter trivially."""
1953
files = self.get_versionedfiles()
1954
self.get_diamond_files(files)
1955
keys, _ = self.get_keys_and_sort_order()
1956
parent_map = files.get_parent_map(keys)
1957
# Add an absent record in the middle of the present keys. (We don't ask
1958
# for just absent keys to ensure that content before and after the
1959
# absent keys is still delivered).
1960
present_keys = list(keys)
1961
if self.key_length == 1:
1962
keys.insert(2, ('extra',))
1964
keys.insert(2, ('extra', 'extra'))
1965
entries = files.get_record_stream(keys, 'unordered', False)
1967
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
1969
self.assertEqual(set(present_keys), seen)
1971
def get_mapper(self):
1972
"""Get a mapper suitable for the key length of the test interface."""
1973
if self.key_length == 1:
1974
return ConstantMapper('source')
1976
return HashEscapedPrefixMapper()
1978
def get_parents(self, parents):
1979
"""Get parents, taking self.graph into consideration."""
1985
def test_get_parent_map(self):
1986
files = self.get_versionedfiles()
1987
if self.key_length == 1:
1989
(('r0',), self.get_parents(())),
1990
(('r1',), self.get_parents((('r0',),))),
1991
(('r2',), self.get_parents(())),
1992
(('r3',), self.get_parents(())),
1993
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
1997
(('FileA', 'r0'), self.get_parents(())),
1998
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
1999
(('FileA', 'r2'), self.get_parents(())),
2000
(('FileA', 'r3'), self.get_parents(())),
2001
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2002
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2004
for key, parents in parent_details:
2005
files.add_lines(key, parents, [])
2006
# immediately after adding it should be queryable.
2007
self.assertEqual({key:parents}, files.get_parent_map([key]))
2008
# We can ask for an empty set
2009
self.assertEqual({}, files.get_parent_map([]))
2010
# We can ask for many keys
2011
all_parents = dict(parent_details)
2012
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2013
# Absent keys are just not included in the result.
2014
keys = all_parents.keys()
2015
if self.key_length == 1:
2016
keys.insert(1, ('missing',))
2018
keys.insert(1, ('missing', 'missing'))
2019
# Absent keys are just ignored
2020
self.assertEqual(all_parents, files.get_parent_map(keys))
2022
def test_get_sha1s(self):
2023
files = self.get_versionedfiles()
2024
self.get_diamond_files(files)
2025
if self.key_length == 1:
2026
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2028
# ask for shas from different prefixes.
2030
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2031
('FileA', 'merged'), ('FileB', 'right'),
2034
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2035
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2036
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2037
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2038
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2040
files.get_sha1s(keys))
2042
def test_insert_record_stream_empty(self):
2043
"""Inserting an empty record stream should work."""
2044
files = self.get_versionedfiles()
2045
files.insert_record_stream([])
2047
def assertIdenticalVersionedFile(self, expected, actual):
2048
"""Assert that left and right have the same contents."""
2049
self.assertEqual(set(actual.keys()), set(expected.keys()))
2050
actual_parents = actual.get_parent_map(actual.keys())
2052
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2054
for key, parents in actual_parents.items():
2055
self.assertEqual(None, parents)
2056
for key in actual.keys():
2057
actual_text = actual.get_record_stream(
2058
[key], 'unordered', True).next().get_bytes_as('fulltext')
2059
expected_text = expected.get_record_stream(
2060
[key], 'unordered', True).next().get_bytes_as('fulltext')
2061
self.assertEqual(actual_text, expected_text)
2063
def test_insert_record_stream_fulltexts(self):
2064
"""Any file should accept a stream of fulltexts."""
2065
files = self.get_versionedfiles()
2066
mapper = self.get_mapper()
2067
source_transport = self.get_transport('source')
2068
source_transport.mkdir('.')
2069
# weaves always output fulltexts.
2070
source = make_versioned_files_factory(WeaveFile, mapper)(
2072
self.get_diamond_files(source, trailing_eol=False)
2073
stream = source.get_record_stream(source.keys(), 'topological',
2075
files.insert_record_stream(stream)
2076
self.assertIdenticalVersionedFile(source, files)
2078
def test_insert_record_stream_fulltexts_noeol(self):
2079
"""Any file should accept a stream of fulltexts."""
2080
files = self.get_versionedfiles()
2081
mapper = self.get_mapper()
2082
source_transport = self.get_transport('source')
2083
source_transport.mkdir('.')
2084
# weaves always output fulltexts.
2085
source = make_versioned_files_factory(WeaveFile, mapper)(
2087
self.get_diamond_files(source, trailing_eol=False)
2088
stream = source.get_record_stream(source.keys(), 'topological',
2090
files.insert_record_stream(stream)
2091
self.assertIdenticalVersionedFile(source, files)
2093
def test_insert_record_stream_annotated_knits(self):
2094
"""Any file should accept a stream from plain knits."""
2095
files = self.get_versionedfiles()
2096
mapper = self.get_mapper()
2097
source_transport = self.get_transport('source')
2098
source_transport.mkdir('.')
2099
source = make_file_factory(True, mapper)(source_transport)
2100
self.get_diamond_files(source)
2101
stream = source.get_record_stream(source.keys(), 'topological',
2103
files.insert_record_stream(stream)
2104
self.assertIdenticalVersionedFile(source, files)
2106
def test_insert_record_stream_annotated_knits_noeol(self):
2107
"""Any file should accept a stream from plain knits."""
2108
files = self.get_versionedfiles()
2109
mapper = self.get_mapper()
2110
source_transport = self.get_transport('source')
2111
source_transport.mkdir('.')
2112
source = make_file_factory(True, mapper)(source_transport)
2113
self.get_diamond_files(source, trailing_eol=False)
2114
stream = source.get_record_stream(source.keys(), 'topological',
2116
files.insert_record_stream(stream)
2117
self.assertIdenticalVersionedFile(source, files)
2119
def test_insert_record_stream_plain_knits(self):
2120
"""Any file should accept a stream from plain knits."""
2121
files = self.get_versionedfiles()
2122
mapper = self.get_mapper()
2123
source_transport = self.get_transport('source')
2124
source_transport.mkdir('.')
2125
source = make_file_factory(False, mapper)(source_transport)
2126
self.get_diamond_files(source)
2127
stream = source.get_record_stream(source.keys(), 'topological',
2129
files.insert_record_stream(stream)
2130
self.assertIdenticalVersionedFile(source, files)
2132
def test_insert_record_stream_plain_knits_noeol(self):
2133
"""Any file should accept a stream from plain knits."""
2134
files = self.get_versionedfiles()
2135
mapper = self.get_mapper()
2136
source_transport = self.get_transport('source')
2137
source_transport.mkdir('.')
2138
source = make_file_factory(False, mapper)(source_transport)
2139
self.get_diamond_files(source, trailing_eol=False)
2140
stream = source.get_record_stream(source.keys(), 'topological',
2142
files.insert_record_stream(stream)
2143
self.assertIdenticalVersionedFile(source, files)
2145
def test_insert_record_stream_existing_keys(self):
2146
"""Inserting keys already in a file should not error."""
2147
files = self.get_versionedfiles()
2148
source = self.get_versionedfiles('source')
2149
self.get_diamond_files(source)
2150
# insert some keys into f.
2151
self.get_diamond_files(files, left_only=True)
2152
stream = source.get_record_stream(source.keys(), 'topological',
2154
files.insert_record_stream(stream)
2155
self.assertIdenticalVersionedFile(source, files)
2157
def test_insert_record_stream_missing_keys(self):
2158
"""Inserting a stream with absent keys should raise an error."""
2159
files = self.get_versionedfiles()
2160
source = self.get_versionedfiles('source')
2161
stream = source.get_record_stream([('missing',) * self.key_length],
2162
'topological', False)
2163
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2166
def test_insert_record_stream_out_of_order(self):
2167
"""An out of order stream can either error or work."""
2168
files = self.get_versionedfiles()
2169
source = self.get_versionedfiles('source')
2170
self.get_diamond_files(source)
2171
if self.key_length == 1:
2172
origin_keys = [('origin',)]
2173
end_keys = [('merged',), ('left',)]
2174
start_keys = [('right',), ('base',)]
2176
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2177
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2178
('FileB', 'merged',), ('FileB', 'left',)]
2179
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2180
('FileB', 'right',), ('FileB', 'base',)]
2181
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2182
end_entries = source.get_record_stream(end_keys, 'topological', False)
2183
start_entries = source.get_record_stream(start_keys, 'topological', False)
2184
entries = chain(origin_entries, end_entries, start_entries)
2186
files.insert_record_stream(entries)
2187
except RevisionNotPresent:
2188
# Must not have corrupted the file.
2191
self.assertIdenticalVersionedFile(source, files)
2193
def get_knit_delta_source(self):
2194
"""Get a source that can produce a stream with knit delta records,
2195
regardless of this test's scenario.
2197
mapper = self.get_mapper()
2198
source_transport = self.get_transport('source')
2199
source_transport.mkdir('.')
2200
source = make_file_factory(False, mapper)(source_transport)
2201
get_diamond_files(source, self.key_length, trailing_eol=True,
2202
nograph=False, left_only=False)
2205
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2206
"""Insertion where a needed basis is not included notifies the caller
2207
of the missing basis. In the meantime a record missing its basis is
2210
source = self.get_knit_delta_source()
2211
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2212
entries = source.get_record_stream(keys, 'unordered', False)
2213
files = self.get_versionedfiles()
2214
if self.support_partial_insertion:
2215
self.assertEqual([],
2216
list(files.get_missing_compression_parent_keys()))
2217
files.insert_record_stream(entries)
2218
missing_bases = files.get_missing_compression_parent_keys()
2219
self.assertEqual(set([self.get_simple_key('left')]),
2221
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2224
errors.RevisionNotPresent, files.insert_record_stream, entries)
2227
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2228
"""Insertion where a needed basis is not included notifies the caller
2229
of the missing basis. That basis can be added in a second
2230
insert_record_stream call that does not need to repeat records present
2231
in the previous stream. The record(s) that required that basis are
2232
fully inserted once their basis is no longer missing.
2234
if not self.support_partial_insertion:
2235
raise TestNotApplicable(
2236
'versioned file scenario does not support partial insertion')
2237
source = self.get_knit_delta_source()
2238
entries = source.get_record_stream([self.get_simple_key('origin'),
2239
self.get_simple_key('merged')], 'unordered', False)
2240
files = self.get_versionedfiles()
2241
files.insert_record_stream(entries)
2242
missing_bases = files.get_missing_compression_parent_keys()
2243
self.assertEqual(set([self.get_simple_key('left')]),
2245
# 'merged' is inserted (although a commit of a write group involving
2246
# this versionedfiles would fail).
2247
merged_key = self.get_simple_key('merged')
2249
[merged_key], files.get_parent_map([merged_key]).keys())
2250
# Add the full delta closure of the missing records
2251
missing_entries = source.get_record_stream(
2252
missing_bases, 'unordered', True)
2253
files.insert_record_stream(missing_entries)
2254
# Now 'merged' is fully inserted (and a commit would succeed).
2255
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2257
[merged_key], files.get_parent_map([merged_key]).keys())
2260
def test_iter_lines_added_or_present_in_keys(self):
2261
# test that we get at least an equalset of the lines added by
2262
# versions in the store.
2263
# the ordering here is to make a tree so that dumb searches have
2264
# more changes to muck up.
2266
class InstrumentedProgress(progress.DummyProgress):
2270
progress.DummyProgress.__init__(self)
2273
def update(self, msg=None, current=None, total=None):
2274
self.updates.append((msg, current, total))
2276
files = self.get_versionedfiles()
2277
# add a base to get included
2278
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2279
# add a ancestor to be included on one side
2280
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2281
# add a ancestor to be included on the other side
2282
files.add_lines(self.get_simple_key('rancestor'),
2283
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2284
# add a child of rancestor with no eofile-nl
2285
files.add_lines(self.get_simple_key('child'),
2286
self.get_parents([self.get_simple_key('rancestor')]),
2287
['base\n', 'child\n'])
2288
# add a child of lancestor and base to join the two roots
2289
files.add_lines(self.get_simple_key('otherchild'),
2290
self.get_parents([self.get_simple_key('lancestor'),
2291
self.get_simple_key('base')]),
2292
['base\n', 'lancestor\n', 'otherchild\n'])
2293
def iter_with_keys(keys, expected):
2294
# now we need to see what lines are returned, and how often.
2296
progress = InstrumentedProgress()
2297
# iterate over the lines
2298
for line in files.iter_lines_added_or_present_in_keys(keys,
2300
lines.setdefault(line, 0)
2302
if []!= progress.updates:
2303
self.assertEqual(expected, progress.updates)
2305
lines = iter_with_keys(
2306
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2307
[('Walking content', 0, 2),
2308
('Walking content', 1, 2),
2309
('Walking content', 2, 2)])
2310
# we must see child and otherchild
2311
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2313
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2314
# we dont care if we got more than that.
2317
lines = iter_with_keys(files.keys(),
2318
[('Walking content', 0, 5),
2319
('Walking content', 1, 5),
2320
('Walking content', 2, 5),
2321
('Walking content', 3, 5),
2322
('Walking content', 4, 5),
2323
('Walking content', 5, 5)])
2324
# all lines must be seen at least once
2325
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2327
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2329
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2330
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2332
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2334
def test_make_mpdiffs(self):
2335
from bzrlib import multiparent
2336
files = self.get_versionedfiles('source')
2337
# add texts that should trip the knit maximum delta chain threshold
2338
# as well as doing parallel chains of data in knits.
2339
# this is done by two chains of 25 insertions
2340
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2341
files.add_lines(self.get_simple_key('noeol'),
2342
self.get_parents([self.get_simple_key('base')]), ['line'])
2343
# detailed eol tests:
2344
# shared last line with parent no-eol
2345
files.add_lines(self.get_simple_key('noeolsecond'),
2346
self.get_parents([self.get_simple_key('noeol')]),
2348
# differing last line with parent, both no-eol
2349
files.add_lines(self.get_simple_key('noeolnotshared'),
2350
self.get_parents([self.get_simple_key('noeolsecond')]),
2351
['line\n', 'phone'])
2352
# add eol following a noneol parent, change content
2353
files.add_lines(self.get_simple_key('eol'),
2354
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2355
# add eol following a noneol parent, no change content
2356
files.add_lines(self.get_simple_key('eolline'),
2357
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2358
# noeol with no parents:
2359
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2360
# noeol preceeding its leftmost parent in the output:
2361
# this is done by making it a merge of two parents with no common
2362
# anestry: noeolbase and noeol with the
2363
# later-inserted parent the leftmost.
2364
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2365
self.get_parents([self.get_simple_key('noeolbase'),
2366
self.get_simple_key('noeol')]),
2368
# two identical eol texts
2369
files.add_lines(self.get_simple_key('noeoldup'),
2370
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2371
next_parent = self.get_simple_key('base')
2372
text_name = 'chain1-'
2374
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2375
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2376
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2377
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2378
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2379
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2380
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2381
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2382
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2383
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2384
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2385
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2386
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2387
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2388
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2389
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2390
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2391
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2392
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2393
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2394
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2395
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2396
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2397
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2398
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2399
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2401
for depth in range(26):
2402
new_version = self.get_simple_key(text_name + '%s' % depth)
2403
text = text + ['line\n']
2404
files.add_lines(new_version, self.get_parents([next_parent]), text)
2405
next_parent = new_version
2406
next_parent = self.get_simple_key('base')
2407
text_name = 'chain2-'
2409
for depth in range(26):
2410
new_version = self.get_simple_key(text_name + '%s' % depth)
2411
text = text + ['line\n']
2412
files.add_lines(new_version, self.get_parents([next_parent]), text)
2413
next_parent = new_version
2414
target = self.get_versionedfiles('target')
2415
for key in multiparent.topo_iter_keys(files, files.keys()):
2416
mpdiff = files.make_mpdiffs([key])[0]
2417
parents = files.get_parent_map([key])[key] or []
2419
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2420
self.assertEqualDiff(
2421
files.get_record_stream([key], 'unordered',
2422
True).next().get_bytes_as('fulltext'),
2423
target.get_record_stream([key], 'unordered',
2424
True).next().get_bytes_as('fulltext')
2427
def test_keys(self):
2428
# While use is discouraged, versions() is still needed by aspects of
2430
files = self.get_versionedfiles()
2431
self.assertEqual(set(), set(files.keys()))
2432
if self.key_length == 1:
2435
key = ('foo', 'bar',)
2436
files.add_lines(key, (), [])
2437
self.assertEqual(set([key]), set(files.keys()))
2440
class VirtualVersionedFilesTests(TestCase):
2441
"""Basic tests for the VirtualVersionedFiles implementations."""
2443
def _get_parent_map(self, keys):
2446
if k in self._parent_map:
2447
ret[k] = self._parent_map[k]
2451
TestCase.setUp(self)
2453
self._parent_map = {}
2454
self.texts = VirtualVersionedFiles(self._get_parent_map,
2457
def test_add_lines(self):
2458
self.assertRaises(NotImplementedError,
2459
self.texts.add_lines, "foo", [], [])
2461
def test_add_mpdiffs(self):
2462
self.assertRaises(NotImplementedError,
2463
self.texts.add_mpdiffs, [])
2465
def test_check(self):
2466
self.assertTrue(self.texts.check())
2468
def test_insert_record_stream(self):
2469
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2472
def test_get_sha1s_nonexistent(self):
2473
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2475
def test_get_sha1s(self):
2476
self._lines["key"] = ["dataline1", "dataline2"]
2477
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2478
self.texts.get_sha1s([("key",)]))
2480
def test_get_parent_map(self):
2481
self._parent_map = {"G": ("A", "B")}
2482
self.assertEquals({("G",): (("A",),("B",))},
2483
self.texts.get_parent_map([("G",), ("L",)]))
2485
def test_get_record_stream(self):
2486
self._lines["A"] = ["FOO", "BAR"]
2487
it = self.texts.get_record_stream([("A",)], "unordered", True)
2489
self.assertEquals("chunked", record.storage_kind)
2490
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2491
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2493
def test_get_record_stream_absent(self):
2494
it = self.texts.get_record_stream([("A",)], "unordered", True)
2496
self.assertEquals("absent", record.storage_kind)
2498
def test_iter_lines_added_or_present_in_keys(self):
2499
self._lines["A"] = ["FOO", "BAR"]
2500
self._lines["B"] = ["HEY"]
2501
self._lines["C"] = ["Alberta"]
2502
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2503
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2507
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2509
def get_ordering_vf(self, key_priority):
2510
builder = self.make_branch_builder('test')
2511
builder.start_series()
2512
builder.build_snapshot('A', None, [
2513
('add', ('', 'TREE_ROOT', 'directory', None))])
2514
builder.build_snapshot('B', ['A'], [])
2515
builder.build_snapshot('C', ['B'], [])
2516
builder.build_snapshot('D', ['C'], [])
2517
builder.finish_series()
2518
b = builder.get_branch()
2520
self.addCleanup(b.unlock)
2521
vf = b.repository.inventories
2522
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2524
def test_get_empty(self):
2525
vf = self.get_ordering_vf({})
2526
self.assertEqual([], vf.calls)
2528
def test_get_record_stream_topological(self):
2529
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2530
request_keys = [('B',), ('C',), ('D',), ('A',)]
2531
keys = [r.key for r in vf.get_record_stream(request_keys,
2532
'topological', False)]
2533
# We should have gotten the keys in topological order
2534
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2535
# And recorded that the request was made
2536
self.assertEqual([('get_record_stream', request_keys, 'topological',
2539
def test_get_record_stream_ordered(self):
2540
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2541
request_keys = [('B',), ('C',), ('D',), ('A',)]
2542
keys = [r.key for r in vf.get_record_stream(request_keys,
2543
'unordered', False)]
2544
# They should be returned based on their priority
2545
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2546
# And the request recorded
2547
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2550
def test_get_record_stream_implicit_order(self):
2551
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2552
request_keys = [('B',), ('C',), ('D',), ('A',)]
2553
keys = [r.key for r in vf.get_record_stream(request_keys,
2554
'unordered', False)]
2555
# A and C are not in the map, so they get sorted to the front. A comes
2556
# before C alphabetically, so it comes back first
2557
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2558
# And the request recorded
2559
self.assertEqual([('get_record_stream', request_keys, 'unordered',