1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from gzip import GzipFile
39
from ..errors import (
41
RevisionAlreadyPresent,
43
from ..bzr.knit import (
48
from ..sixish import (
54
TestCaseWithMemoryTransport,
58
from .http_utils import TestCaseWithWebserver
59
from ..transport.memory import MemoryTransport
60
from ..bzr import versionedfile as versionedfile
61
from ..bzr.versionedfile import (
63
HashEscapedPrefixMapper,
65
VirtualVersionedFiles,
66
make_versioned_files_factory,
68
from ..bzr.weave import (
72
from ..bzr.weavefile import write_weave
73
from .scenarios import load_tests_apply_scenarios
76
load_tests = load_tests_apply_scenarios
79
def get_diamond_vf(f, trailing_eol=True, left_only=False):
80
"""Get a diamond graph to exercise deltas and merges.
82
:param trailing_eol: If True end the last line with \n.
86
'base': (('origin',),),
88
'right': (('base',),),
89
'merged': (('left',), ('right',)),
91
# insert a diamond graph to exercise deltas and merges.
96
f.add_lines('origin', [], ['origin' + last_char])
97
f.add_lines('base', ['origin'], ['base' + last_char])
98
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
100
f.add_lines('right', ['base'],
101
['base\n', 'right' + last_char])
102
f.add_lines('merged', ['left', 'right'],
103
['base\n', 'left\n', 'right\n', 'merged' + last_char])
107
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
108
nograph=False, nokeys=False):
109
"""Get a diamond graph to exercise deltas and merges.
111
This creates a 5-node graph in files. If files supports 2-length keys two
112
graphs are made to exercise the support for multiple ids.
114
:param trailing_eol: If True end the last line with \n.
115
:param key_length: The length of keys in files. Currently supports length 1
117
:param left_only: If True do not add the right and merged nodes.
118
:param nograph: If True, do not provide parents to the add_lines calls;
119
this is useful for tests that need inserted data but have graphless
121
:param nokeys: If True, pass None is as the key for all insertions.
122
Currently implies nograph.
123
:return: The results of the add_lines calls.
130
prefixes = [('FileA',), ('FileB',)]
131
# insert a diamond graph to exercise deltas and merges.
137
def get_parents(suffix_list):
141
result = [prefix + suffix for suffix in suffix_list]
148
# we loop over each key because that spreads the inserts across prefixes,
149
# which is how commit operates.
150
for prefix in prefixes:
151
result.append(files.add_lines(prefix + get_key('origin'), (),
152
['origin' + last_char]))
153
for prefix in prefixes:
154
result.append(files.add_lines(prefix + get_key('base'),
155
get_parents([('origin',)]), ['base' + last_char]))
156
for prefix in prefixes:
157
result.append(files.add_lines(prefix + get_key('left'),
158
get_parents([('base',)]),
159
['base\n', 'left' + last_char]))
161
for prefix in prefixes:
162
result.append(files.add_lines(prefix + get_key('right'),
163
get_parents([('base',)]),
164
['base\n', 'right' + last_char]))
165
for prefix in prefixes:
166
result.append(files.add_lines(prefix + get_key('merged'),
167
get_parents([('left',), ('right',)]),
168
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
172
class VersionedFileTestMixIn(object):
173
"""A mixin test class for testing VersionedFiles.
175
This is not an adaptor-style test at this point because
176
theres no dynamic substitution of versioned file implementations,
177
they are strictly controlled by their owning repositories.
180
def get_transaction(self):
181
if not hasattr(self, '_transaction'):
182
self._transaction = None
183
return self._transaction
187
f.add_lines('r0', [], ['a\n', 'b\n'])
188
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
190
versions = f.versions()
191
self.assertTrue('r0' in versions)
192
self.assertTrue('r1' in versions)
193
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
194
self.assertEqual(f.get_text('r0'), 'a\nb\n')
195
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
196
self.assertEqual(2, len(f))
197
self.assertEqual(2, f.num_versions())
199
self.assertRaises(RevisionNotPresent,
200
f.add_lines, 'r2', ['foo'], [])
201
self.assertRaises(RevisionAlreadyPresent,
202
f.add_lines, 'r1', [], [])
204
# this checks that reopen with create=True does not break anything.
205
f = self.reopen_file(create=True)
208
def test_adds_with_parent_texts(self):
211
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
213
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
214
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
215
except NotImplementedError:
216
# if the format doesn't support ghosts, just add normally.
217
_, _, parent_texts['r1'] = f.add_lines('r1',
218
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
219
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
220
self.assertNotEqual(None, parent_texts['r0'])
221
self.assertNotEqual(None, parent_texts['r1'])
223
versions = f.versions()
224
self.assertTrue('r0' in versions)
225
self.assertTrue('r1' in versions)
226
self.assertTrue('r2' in versions)
227
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n'])
228
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n'])
229
self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n'])
230
self.assertEqual(3, f.num_versions())
231
origins = f.annotate('r1')
232
self.assertEqual(origins[0][0], 'r0')
233
self.assertEqual(origins[1][0], 'r1')
234
origins = f.annotate('r2')
235
self.assertEqual(origins[0][0], 'r1')
236
self.assertEqual(origins[1][0], 'r2')
239
f = self.reopen_file()
242
def test_add_unicode_content(self):
243
# unicode content is not permitted in versioned files.
244
# versioned files version sequences of bytes only.
246
self.assertRaises(errors.BzrBadParameterUnicode,
247
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
249
(errors.BzrBadParameterUnicode, NotImplementedError),
250
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
252
def test_add_follows_left_matching_blocks(self):
253
"""If we change left_matching_blocks, delta changes
255
Note: There are multiple correct deltas in this case, because
256
we start with 1 "a" and we get 3.
259
if isinstance(vf, WeaveFile):
260
raise TestSkipped("WeaveFile ignores left_matching_blocks")
261
vf.add_lines('1', [], ['a\n'])
262
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
263
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
264
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
265
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
266
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
267
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
269
def test_inline_newline_throws(self):
270
# \r characters are not permitted in lines being added
272
self.assertRaises(errors.BzrBadParameterContainsNewline,
273
vf.add_lines, 'a', [], ['a\n\n'])
275
(errors.BzrBadParameterContainsNewline, NotImplementedError),
276
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
277
# but inline CR's are allowed
278
vf.add_lines('a', [], ['a\r\n'])
280
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
281
except NotImplementedError:
284
def test_add_reserved(self):
286
self.assertRaises(errors.ReservedId,
287
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
289
def test_add_lines_nostoresha(self):
290
"""When nostore_sha is supplied using old content raises."""
292
empty_text = ('a', [])
293
sample_text_nl = ('b', ["foo\n", "bar\n"])
294
sample_text_no_nl = ('c', ["foo\n", "bar"])
296
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
297
sha, _, _ = vf.add_lines(version, [], lines)
299
# we now have a copy of all the lines in the vf.
300
for sha, (version, lines) in zip(
301
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
302
self.assertRaises(errors.ExistingContent,
303
vf.add_lines, version + "2", [], lines,
305
# and no new version should have been added.
306
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
309
def test_add_lines_with_ghosts_nostoresha(self):
310
"""When nostore_sha is supplied using old content raises."""
312
empty_text = ('a', [])
313
sample_text_nl = ('b', ["foo\n", "bar\n"])
314
sample_text_no_nl = ('c', ["foo\n", "bar"])
316
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
317
sha, _, _ = vf.add_lines(version, [], lines)
319
# we now have a copy of all the lines in the vf.
320
# is the test applicable to this vf implementation?
322
vf.add_lines_with_ghosts('d', [], [])
323
except NotImplementedError:
324
raise TestSkipped("add_lines_with_ghosts is optional")
325
for sha, (version, lines) in zip(
326
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
327
self.assertRaises(errors.ExistingContent,
328
vf.add_lines_with_ghosts, version + "2", [], lines,
330
# and no new version should have been added.
331
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
334
def test_add_lines_return_value(self):
335
# add_lines should return the sha1 and the text size.
337
empty_text = ('a', [])
338
sample_text_nl = ('b', ["foo\n", "bar\n"])
339
sample_text_no_nl = ('c', ["foo\n", "bar"])
340
# check results for the three cases:
341
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
342
# the first two elements are the same for all versioned files:
343
# - the digest and the size of the text. For some versioned files
344
# additional data is returned in additional tuple elements.
345
result = vf.add_lines(version, [], lines)
346
self.assertEqual(3, len(result))
347
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
349
# parents should not affect the result:
350
lines = sample_text_nl[1]
351
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
352
vf.add_lines('d', ['b', 'c'], lines)[0:2])
354
def test_get_reserved(self):
356
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
357
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
358
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
360
def test_add_unchanged_last_line_noeol_snapshot(self):
361
"""Add a text with an unchanged last line with no eol should work."""
362
# Test adding this in a number of chain lengths; because the interface
363
# for VersionedFile does not allow forcing a specific chain length, we
364
# just use a small base to get the first snapshot, then a much longer
365
# first line for the next add (which will make the third add snapshot)
366
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
367
# as a capped delta length, but ideally we would have some way of
368
# tuning the test to the store (e.g. keep going until a snapshot
370
for length in range(20):
372
vf = self.get_file('case-%d' % length)
375
for step in range(length):
376
version = prefix % step
377
lines = (['prelude \n'] * step) + ['line']
378
vf.add_lines(version, parents, lines)
379
version_lines[version] = lines
381
vf.add_lines('no-eol', parents, ['line'])
382
vf.get_texts(version_lines.keys())
383
self.assertEqualDiff('line', vf.get_text('no-eol'))
385
def test_get_texts_eol_variation(self):
386
# similar to the failure in <http://bugs.launchpad.net/234748>
388
sample_text_nl = ["line\n"]
389
sample_text_no_nl = ["line"]
396
lines = sample_text_nl
398
lines = sample_text_no_nl
399
# left_matching blocks is an internal api; it operates on the
400
# *internal* representation for a knit, which is with *all* lines
401
# being normalised to end with \n - even the final line in a no_nl
402
# file. Using it here ensures that a broken internal implementation
403
# (which is what this test tests) will generate a correct line
404
# delta (which is to say, an empty delta).
405
vf.add_lines(version, parents, lines,
406
left_matching_blocks=[(0, 0, 1)])
408
versions.append(version)
409
version_lines[version] = lines
411
vf.get_texts(versions)
412
vf.get_texts(reversed(versions))
414
def test_add_lines_with_matching_blocks_noeol_last_line(self):
415
"""Add a text with an unchanged last line with no eol should work."""
416
from breezy import multiparent
417
# Hand verified sha1 of the text we're adding.
418
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
419
# Create a mpdiff which adds a new line before the trailing line, and
420
# reuse the last line unaltered (which can cause annotation reuse).
421
# Test adding this in two situations:
422
# On top of a new insertion
423
vf = self.get_file('fulltext')
424
vf.add_lines('noeol', [], ['line'])
425
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
426
left_matching_blocks=[(0, 1, 1)])
427
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
429
vf = self.get_file('delta')
430
vf.add_lines('base', [], ['line'])
431
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
432
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
433
left_matching_blocks=[(1, 1, 1)])
434
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
436
def test_make_mpdiffs(self):
437
from breezy import multiparent
438
vf = self.get_file('foo')
439
sha1s = self._setup_for_deltas(vf)
440
new_vf = self.get_file('bar')
441
for version in multiparent.topo_iter(vf):
442
mpdiff = vf.make_mpdiffs([version])[0]
443
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
444
vf.get_sha1s([version])[version], mpdiff)])
445
self.assertEqualDiff(vf.get_text(version),
446
new_vf.get_text(version))
448
def test_make_mpdiffs_with_ghosts(self):
449
vf = self.get_file('foo')
451
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
452
except NotImplementedError:
453
# old Weave formats do not allow ghosts
455
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
457
def _setup_for_deltas(self, f):
458
self.assertFalse(f.has_version('base'))
459
# add texts that should trip the knit maximum delta chain threshold
460
# as well as doing parallel chains of data in knits.
461
# this is done by two chains of 25 insertions
462
f.add_lines('base', [], ['line\n'])
463
f.add_lines('noeol', ['base'], ['line'])
464
# detailed eol tests:
465
# shared last line with parent no-eol
466
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
467
# differing last line with parent, both no-eol
468
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
469
# add eol following a noneol parent, change content
470
f.add_lines('eol', ['noeol'], ['phone\n'])
471
# add eol following a noneol parent, no change content
472
f.add_lines('eolline', ['noeol'], ['line\n'])
473
# noeol with no parents:
474
f.add_lines('noeolbase', [], ['line'])
475
# noeol preceeding its leftmost parent in the output:
476
# this is done by making it a merge of two parents with no common
477
# anestry: noeolbase and noeol with the
478
# later-inserted parent the leftmost.
479
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
480
# two identical eol texts
481
f.add_lines('noeoldup', ['noeol'], ['line'])
483
text_name = 'chain1-'
485
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
486
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
487
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
488
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
489
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
490
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
491
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
492
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
493
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
494
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
495
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
496
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
497
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
498
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
499
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
500
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
501
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
502
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
503
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
504
19:'1ebed371807ba5935958ad0884595126e8c4e823',
505
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
506
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
507
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
508
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
509
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
510
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
512
for depth in range(26):
513
new_version = text_name + '%s' % depth
514
text = text + ['line\n']
515
f.add_lines(new_version, [next_parent], text)
516
next_parent = new_version
518
text_name = 'chain2-'
520
for depth in range(26):
521
new_version = text_name + '%s' % depth
522
text = text + ['line\n']
523
f.add_lines(new_version, [next_parent], text)
524
next_parent = new_version
527
def test_ancestry(self):
529
self.assertEqual([], f.get_ancestry([]))
530
f.add_lines('r0', [], ['a\n', 'b\n'])
531
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
532
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
533
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
534
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
535
self.assertEqual([], f.get_ancestry([]))
536
versions = f.get_ancestry(['rM'])
537
# there are some possibilities:
541
# so we check indexes
542
r0 = versions.index('r0')
543
r1 = versions.index('r1')
544
r2 = versions.index('r2')
545
self.assertFalse('r3' in versions)
546
rM = versions.index('rM')
547
self.assertTrue(r0 < r1)
548
self.assertTrue(r0 < r2)
549
self.assertTrue(r1 < rM)
550
self.assertTrue(r2 < rM)
552
self.assertRaises(RevisionNotPresent,
553
f.get_ancestry, ['rM', 'rX'])
555
self.assertEqual(set(f.get_ancestry('rM')),
556
set(f.get_ancestry('rM', topo_sorted=False)))
558
def test_mutate_after_finish(self):
559
self._transaction = 'before'
561
self._transaction = 'after'
562
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
563
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
565
def test_copy_to(self):
567
f.add_lines('0', [], ['a\n'])
568
t = MemoryTransport()
570
for suffix in self.get_factory().get_suffixes():
571
self.assertTrue(t.has('foo' + suffix))
573
def test_get_suffixes(self):
575
# and should be a list
576
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
578
def test_get_parent_map(self):
580
f.add_lines('r0', [], ['a\n', 'b\n'])
582
{'r0':()}, f.get_parent_map(['r0']))
583
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
585
{'r1':('r0',)}, f.get_parent_map(['r1']))
589
f.get_parent_map(['r0', 'r1']))
590
f.add_lines('r2', [], ['a\n', 'b\n'])
591
f.add_lines('r3', [], ['a\n', 'b\n'])
592
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
594
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
595
self.assertEqual({}, f.get_parent_map('y'))
599
f.get_parent_map(['r0', 'y', 'r1']))
601
def test_annotate(self):
603
f.add_lines('r0', [], ['a\n', 'b\n'])
604
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
605
origins = f.annotate('r1')
606
self.assertEqual(origins[0][0], 'r1')
607
self.assertEqual(origins[1][0], 'r0')
609
self.assertRaises(RevisionNotPresent,
612
def test_detection(self):
613
# Test weaves detect corruption.
615
# Weaves contain a checksum of their texts.
616
# When a text is extracted, this checksum should be
619
w = self.get_file_corrupted_text()
621
self.assertEqual('hello\n', w.get_text('v1'))
622
self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
623
self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
624
self.assertRaises(WeaveInvalidChecksum, w.check)
626
w = self.get_file_corrupted_checksum()
628
self.assertEqual('hello\n', w.get_text('v1'))
629
self.assertRaises(WeaveInvalidChecksum, w.get_text, 'v2')
630
self.assertRaises(WeaveInvalidChecksum, w.get_lines, 'v2')
631
self.assertRaises(WeaveInvalidChecksum, w.check)
633
def get_file_corrupted_text(self):
634
"""Return a versioned file with corrupt text but valid metadata."""
635
raise NotImplementedError(self.get_file_corrupted_text)
637
def reopen_file(self, name='foo'):
638
"""Open the versioned file from disk again."""
639
raise NotImplementedError(self.reopen_file)
641
def test_iter_lines_added_or_present_in_versions(self):
642
# test that we get at least an equalset of the lines added by
643
# versions in the weave
644
# the ordering here is to make a tree so that dumb searches have
645
# more changes to muck up.
647
class InstrumentedProgress(progress.ProgressTask):
650
progress.ProgressTask.__init__(self)
653
def update(self, msg=None, current=None, total=None):
654
self.updates.append((msg, current, total))
657
# add a base to get included
658
vf.add_lines('base', [], ['base\n'])
659
# add a ancestor to be included on one side
660
vf.add_lines('lancestor', [], ['lancestor\n'])
661
# add a ancestor to be included on the other side
662
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
663
# add a child of rancestor with no eofile-nl
664
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
665
# add a child of lancestor and base to join the two roots
666
vf.add_lines('otherchild',
667
['lancestor', 'base'],
668
['base\n', 'lancestor\n', 'otherchild\n'])
669
def iter_with_versions(versions, expected):
670
# now we need to see what lines are returned, and how often.
672
progress = InstrumentedProgress()
673
# iterate over the lines
674
for line in vf.iter_lines_added_or_present_in_versions(versions,
676
lines.setdefault(line, 0)
678
if []!= progress.updates:
679
self.assertEqual(expected, progress.updates)
681
lines = iter_with_versions(['child', 'otherchild'],
682
[('Walking content', 0, 2),
683
('Walking content', 1, 2),
684
('Walking content', 2, 2)])
685
# we must see child and otherchild
686
self.assertTrue(lines[('child\n', 'child')] > 0)
687
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
688
# we dont care if we got more than that.
691
lines = iter_with_versions(None, [('Walking content', 0, 5),
692
('Walking content', 1, 5),
693
('Walking content', 2, 5),
694
('Walking content', 3, 5),
695
('Walking content', 4, 5),
696
('Walking content', 5, 5)])
697
# all lines must be seen at least once
698
self.assertTrue(lines[('base\n', 'base')] > 0)
699
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
700
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
701
self.assertTrue(lines[('child\n', 'child')] > 0)
702
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
704
def test_add_lines_with_ghosts(self):
705
# some versioned file formats allow lines to be added with parent
706
# information that is > than that in the format. Formats that do
707
# not support this need to raise NotImplementedError on the
708
# add_lines_with_ghosts api.
710
# add a revision with ghost parents
711
# The preferred form is utf8, but we should translate when needed
712
parent_id_unicode = u'b\xbfse'
713
parent_id_utf8 = parent_id_unicode.encode('utf8')
715
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
716
except NotImplementedError:
717
# check the other ghost apis are also not implemented
718
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
719
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
721
vf = self.reopen_file()
722
# test key graph related apis: getncestry, _graph, get_parents
724
# - these are ghost unaware and must not be reflect ghosts
725
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
726
self.assertFalse(vf.has_version(parent_id_utf8))
727
# we have _with_ghost apis to give us ghost information.
728
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
729
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
730
# if we add something that is a ghost of another, it should correct the
731
# results of the prior apis
732
vf.add_lines(parent_id_utf8, [], [])
733
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
734
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
735
vf.get_parent_map(['notbxbfse']))
736
self.assertTrue(vf.has_version(parent_id_utf8))
737
# we have _with_ghost apis to give us ghost information.
738
self.assertEqual([parent_id_utf8, 'notbxbfse'],
739
vf.get_ancestry_with_ghosts(['notbxbfse']))
740
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
742
def test_add_lines_with_ghosts_after_normal_revs(self):
743
# some versioned file formats allow lines to be added with parent
744
# information that is > than that in the format. Formats that do
745
# not support this need to raise NotImplementedError on the
746
# add_lines_with_ghosts api.
748
# probe for ghost support
750
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
751
except NotImplementedError:
753
vf.add_lines_with_ghosts('references_ghost',
755
['line\n', 'line_b\n', 'line_c\n'])
756
origins = vf.annotate('references_ghost')
757
self.assertEqual(('base', 'line\n'), origins[0])
758
self.assertEqual(('base', 'line_b\n'), origins[1])
759
self.assertEqual(('references_ghost', 'line_c\n'), origins[2])
761
def test_readonly_mode(self):
762
t = self.get_transport()
763
factory = self.get_factory()
764
vf = factory('id', t, 0o777, create=True, access_mode='w')
765
vf = factory('id', t, access_mode='r')
766
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
767
self.assertRaises(errors.ReadOnlyError,
768
vf.add_lines_with_ghosts,
773
def test_get_sha1s(self):
774
# check the sha1 data is available
777
vf.add_lines('a', [], ['a\n'])
778
# the same file, different metadata
779
vf.add_lines('b', ['a'], ['a\n'])
780
# a file differing only in last newline.
781
vf.add_lines('c', [], ['a'])
783
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
784
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
785
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
787
vf.get_sha1s(['a', 'c', 'b']))
790
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
792
def get_file(self, name='foo'):
793
return WeaveFile(name, self.get_transport(),
795
get_scope=self.get_transaction)
797
def get_file_corrupted_text(self):
798
w = WeaveFile('foo', self.get_transport(),
800
get_scope=self.get_transaction)
801
w.add_lines('v1', [], ['hello\n'])
802
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
804
# We are going to invasively corrupt the text
805
# Make sure the internals of weave are the same
806
self.assertEqual([('{', 0)
814
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
815
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
820
w._weave[4] = 'There\n'
823
def get_file_corrupted_checksum(self):
824
w = self.get_file_corrupted_text()
826
w._weave[4] = 'there\n'
827
self.assertEqual('hello\nthere\n', w.get_text('v2'))
829
#Invalid checksum, first digit changed
830
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
833
def reopen_file(self, name='foo', create=False):
834
return WeaveFile(name, self.get_transport(),
836
get_scope=self.get_transaction)
838
def test_no_implicit_create(self):
839
self.assertRaises(errors.NoSuchFile,
842
self.get_transport(),
843
get_scope=self.get_transaction)
845
def get_factory(self):
849
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
852
super(TestPlanMergeVersionedFile, self).setUp()
853
mapper = PrefixMapper()
854
factory = make_file_factory(True, mapper)
855
self.vf1 = factory(self.get_transport('root-1'))
856
self.vf2 = factory(self.get_transport('root-2'))
857
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
858
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
860
def test_add_lines(self):
861
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
862
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
863
('root', 'a'), [], [])
864
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
865
('root', 'a:'), None, [])
866
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
867
('root', 'a:'), [], None)
869
def setup_abcde(self):
870
self.vf1.add_lines(('root', 'A'), [], ['a'])
871
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
872
self.vf2.add_lines(('root', 'C'), [], ['c'])
873
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
874
self.plan_merge_vf.add_lines(('root', 'E:'),
875
[('root', 'B'), ('root', 'D')], ['e'])
877
def test_get_parents(self):
879
self.assertEqual({('root', 'B'):(('root', 'A'),)},
880
self.plan_merge_vf.get_parent_map([('root', 'B')]))
881
self.assertEqual({('root', 'D'):(('root', 'C'),)},
882
self.plan_merge_vf.get_parent_map([('root', 'D')]))
883
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
884
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
886
self.plan_merge_vf.get_parent_map([('root', 'F')]))
888
('root', 'B'):(('root', 'A'),),
889
('root', 'D'):(('root', 'C'),),
890
('root', 'E:'):(('root', 'B'),('root', 'D')),
892
self.plan_merge_vf.get_parent_map(
893
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
895
def test_get_record_stream(self):
897
def get_record(suffix):
898
return next(self.plan_merge_vf.get_record_stream(
899
[('root', suffix)], 'unordered', True))
900
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
901
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
902
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
903
self.assertEqual('absent', get_record('F').storage_kind)
906
class TestReadonlyHttpMixin(object):
908
def get_transaction(self):
911
def test_readonly_http_works(self):
912
# we should be able to read from http with a versioned file.
914
# try an empty file access
915
readonly_vf = self.get_factory()('foo',
916
transport.get_transport_from_url(self.get_readonly_url('.')))
917
self.assertEqual([], readonly_vf.versions())
919
def test_readonly_http_works_with_feeling(self):
920
# we should be able to read from http with a versioned file.
923
vf.add_lines('1', [], ['a\n'])
924
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
925
readonly_vf = self.get_factory()('foo',
926
transport.get_transport_from_url(self.get_readonly_url('.')))
927
self.assertEqual(['1', '2'], vf.versions())
928
self.assertEqual(['1', '2'], readonly_vf.versions())
929
for version in readonly_vf.versions():
930
readonly_vf.get_lines(version)
933
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
936
return WeaveFile('foo', self.get_transport(),
938
get_scope=self.get_transaction)
940
def get_factory(self):
944
class MergeCasesMixin(object):
946
def doMerge(self, base, a, b, mp):
947
from textwrap import dedent
953
w.add_lines('text0', [], list(map(addcrlf, base)))
954
w.add_lines('text1', ['text0'], list(map(addcrlf, a)))
955
w.add_lines('text2', ['text0'], list(map(addcrlf, b)))
959
self.log('merge plan:')
960
p = list(w.plan_merge('text1', 'text2'))
961
for state, line in p:
963
self.log('%12s | %s' % (state, line[:-1]))
967
mt.writelines(w.weave_merge(p))
969
self.log(mt.getvalue())
971
mp = list(map(addcrlf, mp))
972
self.assertEqual(mt.readlines(), mp)
975
def testOneInsert(self):
981
def testSeparateInserts(self):
982
self.doMerge(['aaa', 'bbb', 'ccc'],
983
['aaa', 'xxx', 'bbb', 'ccc'],
984
['aaa', 'bbb', 'yyy', 'ccc'],
985
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
987
def testSameInsert(self):
988
self.doMerge(['aaa', 'bbb', 'ccc'],
989
['aaa', 'xxx', 'bbb', 'ccc'],
990
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
991
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
992
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
993
def testOverlappedInsert(self):
994
self.doMerge(['aaa', 'bbb'],
995
['aaa', 'xxx', 'yyy', 'bbb'],
996
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
998
# really it ought to reduce this to
999
# ['aaa', 'xxx', 'yyy', 'bbb']
1002
def testClashReplace(self):
1003
self.doMerge(['aaa'],
1006
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1009
def testNonClashInsert1(self):
1010
self.doMerge(['aaa'],
1013
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1016
def testNonClashInsert2(self):
1017
self.doMerge(['aaa'],
1023
def testDeleteAndModify(self):
1024
"""Clashing delete and modification.
1026
If one side modifies a region and the other deletes it then
1027
there should be a conflict with one side blank.
1030
#######################################
1031
# skippd, not working yet
1034
self.doMerge(['aaa', 'bbb', 'ccc'],
1035
['aaa', 'ddd', 'ccc'],
1037
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1039
def _test_merge_from_strings(self, base, a, b, expected):
1041
w.add_lines('text0', [], base.splitlines(True))
1042
w.add_lines('text1', ['text0'], a.splitlines(True))
1043
w.add_lines('text2', ['text0'], b.splitlines(True))
1044
self.log('merge plan:')
1045
p = list(w.plan_merge('text1', 'text2'))
1046
for state, line in p:
1048
self.log('%12s | %s' % (state, line[:-1]))
1049
self.log('merge result:')
1050
result_text = ''.join(w.weave_merge(p))
1051
self.log(result_text)
1052
self.assertEqualDiff(result_text, expected)
1054
def test_weave_merge_conflicts(self):
1055
# does weave merge properly handle plans that end with unchanged?
1056
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1057
self.assertEqual(result, 'hello\n')
1059
def test_deletion_extended(self):
1060
"""One side deletes, the other deletes more.
1081
self._test_merge_from_strings(base, a, b, result)
1083
def test_deletion_overlap(self):
1084
"""Delete overlapping regions with no other conflict.
1086
Arguably it'd be better to treat these as agreement, rather than
1087
conflict, but for now conflict is safer.
1115
self._test_merge_from_strings(base, a, b, result)
1117
def test_agreement_deletion(self):
1118
"""Agree to delete some lines, without conflicts."""
1140
self._test_merge_from_strings(base, a, b, result)
1142
def test_sync_on_deletion(self):
1143
"""Specific case of merge where we can synchronize incorrectly.
1145
A previous version of the weave merge concluded that the two versions
1146
agreed on deleting line 2, and this could be a synchronization point.
1147
Line 1 was then considered in isolation, and thought to be deleted on
1150
It's better to consider the whole thing as a disagreement region.
1161
a's replacement line 2
1174
a's replacement line 2
1181
self._test_merge_from_strings(base, a, b, result)
1184
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1186
def get_file(self, name='foo'):
1187
return WeaveFile(name, self.get_transport(),
1190
def log_contents(self, w):
1191
self.log('weave is:')
1193
write_weave(w, tmpf)
1194
self.log(tmpf.getvalue())
1196
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1197
'xxx', '>>>>>>> ', 'bbb']
1200
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1202
def test_select_adaptor(self):
1203
"""Test expected adapters exist."""
1204
# One scenario for each lookup combination we expect to use.
1205
# Each is source_kind, requested_kind, adapter class
1207
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1208
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1209
('knit-annotated-delta-gz', 'knit-delta-gz',
1210
_mod_knit.DeltaAnnotatedToUnannotated),
1211
('knit-annotated-delta-gz', 'fulltext',
1212
_mod_knit.DeltaAnnotatedToFullText),
1213
('knit-annotated-ft-gz', 'knit-ft-gz',
1214
_mod_knit.FTAnnotatedToUnannotated),
1215
('knit-annotated-ft-gz', 'fulltext',
1216
_mod_knit.FTAnnotatedToFullText),
1218
for source, requested, klass in scenarios:
1219
adapter_factory = versionedfile.adapter_registry.get(
1220
(source, requested))
1221
adapter = adapter_factory(None)
1222
self.assertIsInstance(adapter, klass)
1224
def get_knit(self, annotated=True):
1225
mapper = ConstantMapper('knit')
1226
transport = self.get_transport()
1227
return make_file_factory(annotated, mapper)(transport)
1229
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1230
"""Grab the interested adapted texts for tests."""
1231
# origin is a fulltext
1232
entries = f.get_record_stream([('origin',)], 'unordered', False)
1233
base = next(entries)
1234
ft_data = ft_adapter.get_bytes(base)
1235
# merged is both a delta and multiple parents.
1236
entries = f.get_record_stream([('merged',)], 'unordered', False)
1237
merged = next(entries)
1238
delta_data = delta_adapter.get_bytes(merged)
1239
return ft_data, delta_data
1241
def test_deannotation_noeol(self):
1242
"""Test converting annotated knits to unannotated knits."""
1243
# we need a full text, and a delta
1245
get_diamond_files(f, 1, trailing_eol=False)
1246
ft_data, delta_data = self.helpGetBytes(f,
1247
_mod_knit.FTAnnotatedToUnannotated(None),
1248
_mod_knit.DeltaAnnotatedToUnannotated(None))
1250
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1253
GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1255
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1256
'1,2,3\nleft\nright\nmerged\nend merged\n',
1257
GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1259
def test_deannotation(self):
1260
"""Test converting annotated knits to unannotated knits."""
1261
# we need a full text, and a delta
1263
get_diamond_files(f, 1)
1264
ft_data, delta_data = self.helpGetBytes(f,
1265
_mod_knit.FTAnnotatedToUnannotated(None),
1266
_mod_knit.DeltaAnnotatedToUnannotated(None))
1268
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1271
GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read())
1273
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1274
'2,2,2\nright\nmerged\nend merged\n',
1275
GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read())
1277
def test_annotated_to_fulltext_no_eol(self):
1278
"""Test adapting annotated knits to full texts (for -> weaves)."""
1279
# we need a full text, and a delta
1281
get_diamond_files(f, 1, trailing_eol=False)
1282
# Reconstructing a full text requires a backing versioned file, and it
1283
# must have the base lines requested from it.
1284
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1285
ft_data, delta_data = self.helpGetBytes(f,
1286
_mod_knit.FTAnnotatedToFullText(None),
1287
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1288
self.assertEqual('origin', ft_data)
1289
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1290
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1291
True)], logged_vf.calls)
1293
def test_annotated_to_fulltext(self):
1294
"""Test adapting annotated knits to full texts (for -> weaves)."""
1295
# we need a full text, and a delta
1297
get_diamond_files(f, 1)
1298
# Reconstructing a full text requires a backing versioned file, and it
1299
# must have the base lines requested from it.
1300
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1301
ft_data, delta_data = self.helpGetBytes(f,
1302
_mod_knit.FTAnnotatedToFullText(None),
1303
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1304
self.assertEqual('origin\n', ft_data)
1305
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1306
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1307
True)], logged_vf.calls)
1309
def test_unannotated_to_fulltext(self):
1310
"""Test adapting unannotated knits to full texts.
1312
This is used for -> weaves, and for -> annotated knits.
1314
# we need a full text, and a delta
1315
f = self.get_knit(annotated=False)
1316
get_diamond_files(f, 1)
1317
# Reconstructing a full text requires a backing versioned file, and it
1318
# must have the base lines requested from it.
1319
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1320
ft_data, delta_data = self.helpGetBytes(f,
1321
_mod_knit.FTPlainToFullText(None),
1322
_mod_knit.DeltaPlainToFullText(logged_vf))
1323
self.assertEqual('origin\n', ft_data)
1324
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1325
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1326
True)], logged_vf.calls)
1328
def test_unannotated_to_fulltext_no_eol(self):
1329
"""Test adapting unannotated knits to full texts.
1331
This is used for -> weaves, and for -> annotated knits.
1333
# we need a full text, and a delta
1334
f = self.get_knit(annotated=False)
1335
get_diamond_files(f, 1, trailing_eol=False)
1336
# Reconstructing a full text requires a backing versioned file, and it
1337
# must have the base lines requested from it.
1338
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1339
ft_data, delta_data = self.helpGetBytes(f,
1340
_mod_knit.FTPlainToFullText(None),
1341
_mod_knit.DeltaPlainToFullText(logged_vf))
1342
self.assertEqual('origin', ft_data)
1343
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1344
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1345
True)], logged_vf.calls)
1348
class TestKeyMapper(TestCaseWithMemoryTransport):
1349
"""Tests for various key mapping logic."""
1351
def test_identity_mapper(self):
1352
mapper = versionedfile.ConstantMapper("inventory")
1353
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1354
self.assertEqual("inventory", mapper.map(('quux',)))
1356
def test_prefix_mapper(self):
1358
mapper = versionedfile.PrefixMapper()
1359
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1360
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1361
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1362
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1364
def test_hash_prefix_mapper(self):
1365
#format6: hash + plain
1366
mapper = versionedfile.HashPrefixMapper()
1367
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1368
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1369
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1370
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1372
def test_hash_escaped_mapper(self):
1373
#knit1: hash + escaped
1374
mapper = versionedfile.HashEscapedPrefixMapper()
1375
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1376
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1378
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1380
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1381
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1384
class TestVersionedFiles(TestCaseWithMemoryTransport):
1385
"""Tests for the multiple-file variant of VersionedFile."""
1387
# We want to be sure of behaviour for:
1388
# weaves prefix layout (weave texts)
1389
# individually named weaves (weave inventories)
1390
# annotated knits - prefix|hash|hash-escape layout, we test the third only
1391
# as it is the most complex mapper.
1392
# individually named knits
1393
# individual no-graph knits in packs (signatures)
1394
# individual graph knits in packs (inventories)
1395
# individual graph nocompression knits in packs (revisions)
1396
# plain text knits in packs (texts)
1397
len_one_scenarios = [
1400
'factory':make_versioned_files_factory(WeaveFile,
1401
ConstantMapper('inventory')),
1404
'support_partial_insertion': False,
1408
'factory':make_file_factory(False, ConstantMapper('revisions')),
1411
'support_partial_insertion': False,
1413
('named-nograph-nodelta-knit-pack', {
1414
'cleanup':cleanup_pack_knit,
1415
'factory':make_pack_factory(False, False, 1),
1418
'support_partial_insertion': False,
1420
('named-graph-knit-pack', {
1421
'cleanup':cleanup_pack_knit,
1422
'factory':make_pack_factory(True, True, 1),
1425
'support_partial_insertion': True,
1427
('named-graph-nodelta-knit-pack', {
1428
'cleanup':cleanup_pack_knit,
1429
'factory':make_pack_factory(True, False, 1),
1432
'support_partial_insertion': False,
1434
('groupcompress-nograph', {
1435
'cleanup':groupcompress.cleanup_pack_group,
1436
'factory':groupcompress.make_pack_factory(False, False, 1),
1439
'support_partial_insertion':False,
1442
len_two_scenarios = [
1445
'factory':make_versioned_files_factory(WeaveFile,
1449
'support_partial_insertion': False,
1451
('annotated-knit-escape', {
1453
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
1456
'support_partial_insertion': False,
1458
('plain-knit-pack', {
1459
'cleanup':cleanup_pack_knit,
1460
'factory':make_pack_factory(True, True, 2),
1463
'support_partial_insertion': True,
1466
'cleanup':groupcompress.cleanup_pack_group,
1467
'factory':groupcompress.make_pack_factory(True, False, 1),
1470
'support_partial_insertion':False,
1474
scenarios = len_one_scenarios + len_two_scenarios
1476
def get_versionedfiles(self, relpath='files'):
1477
transport = self.get_transport(relpath)
1479
transport.mkdir('.')
1480
files = self.factory(transport)
1481
if self.cleanup is not None:
1482
self.addCleanup(self.cleanup, files)
1485
def get_simple_key(self, suffix):
1486
"""Return a key for the object under test."""
1487
if self.key_length == 1:
1490
return ('FileA',) + (suffix,)
1492
def test_add_fallback_implies_without_fallbacks(self):
1493
f = self.get_versionedfiles('files')
1494
if getattr(f, 'add_fallback_versioned_files', None) is None:
1495
raise TestNotApplicable("%s doesn't support fallbacks"
1496
% (f.__class__.__name__,))
1497
g = self.get_versionedfiles('fallback')
1498
key_a = self.get_simple_key('a')
1499
g.add_lines(key_a, [], ['\n'])
1500
f.add_fallback_versioned_files(g)
1501
self.assertTrue(key_a in f.get_parent_map([key_a]))
1502
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1504
def test_add_lines(self):
1505
f = self.get_versionedfiles()
1506
key0 = self.get_simple_key('r0')
1507
key1 = self.get_simple_key('r1')
1508
key2 = self.get_simple_key('r2')
1509
keyf = self.get_simple_key('foo')
1510
f.add_lines(key0, [], ['a\n', 'b\n'])
1512
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1514
f.add_lines(key1, [], ['b\n', 'c\n'])
1516
self.assertTrue(key0 in keys)
1517
self.assertTrue(key1 in keys)
1519
for record in f.get_record_stream([key0, key1], 'unordered', True):
1520
records.append((record.key, record.get_bytes_as('fulltext')))
1522
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1524
def test__add_text(self):
1525
f = self.get_versionedfiles()
1526
key0 = self.get_simple_key('r0')
1527
key1 = self.get_simple_key('r1')
1528
key2 = self.get_simple_key('r2')
1529
keyf = self.get_simple_key('foo')
1530
f._add_text(key0, [], 'a\nb\n')
1532
f._add_text(key1, [key0], 'b\nc\n')
1534
f._add_text(key1, [], 'b\nc\n')
1536
self.assertTrue(key0 in keys)
1537
self.assertTrue(key1 in keys)
1539
for record in f.get_record_stream([key0, key1], 'unordered', True):
1540
records.append((record.key, record.get_bytes_as('fulltext')))
1542
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1544
def test_annotate(self):
1545
files = self.get_versionedfiles()
1546
self.get_diamond_files(files)
1547
if self.key_length == 1:
1551
# introduced full text
1552
origins = files.annotate(prefix + ('origin',))
1554
(prefix + ('origin',), 'origin\n')],
1557
origins = files.annotate(prefix + ('base',))
1559
(prefix + ('base',), 'base\n')],
1562
origins = files.annotate(prefix + ('merged',))
1565
(prefix + ('base',), 'base\n'),
1566
(prefix + ('left',), 'left\n'),
1567
(prefix + ('right',), 'right\n'),
1568
(prefix + ('merged',), 'merged\n')
1572
# Without a graph everything is new.
1574
(prefix + ('merged',), 'base\n'),
1575
(prefix + ('merged',), 'left\n'),
1576
(prefix + ('merged',), 'right\n'),
1577
(prefix + ('merged',), 'merged\n')
1580
self.assertRaises(RevisionNotPresent,
1581
files.annotate, prefix + ('missing-key',))
1583
def test_check_no_parameters(self):
1584
files = self.get_versionedfiles()
1586
def test_check_progressbar_parameter(self):
1587
"""A progress bar can be supplied because check can be a generator."""
1588
pb = ui.ui_factory.nested_progress_bar()
1589
self.addCleanup(pb.finished)
1590
files = self.get_versionedfiles()
1591
files.check(progress_bar=pb)
1593
def test_check_with_keys_becomes_generator(self):
1594
files = self.get_versionedfiles()
1595
self.get_diamond_files(files)
1597
entries = files.check(keys=keys)
1599
# Texts output should be fulltexts.
1600
self.capture_stream(files, entries, seen.add,
1601
files.get_parent_map(keys), require_fulltext=True)
1602
# All texts should be output.
1603
self.assertEqual(set(keys), seen)
1605
def test_clear_cache(self):
1606
files = self.get_versionedfiles()
1609
def test_construct(self):
1610
"""Each parameterised test can be constructed on a transport."""
1611
files = self.get_versionedfiles()
1613
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1615
return get_diamond_files(files, self.key_length,
1616
trailing_eol=trailing_eol, nograph=not self.graph,
1617
left_only=left_only, nokeys=nokeys)
1619
def _add_content_nostoresha(self, add_lines):
1620
"""When nostore_sha is supplied using old content raises."""
1621
vf = self.get_versionedfiles()
1622
empty_text = ('a', [])
1623
sample_text_nl = ('b', ["foo\n", "bar\n"])
1624
sample_text_no_nl = ('c', ["foo\n", "bar"])
1626
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1628
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1631
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1634
# we now have a copy of all the lines in the vf.
1635
for sha, (version, lines) in zip(
1636
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1637
new_key = self.get_simple_key(version + "2")
1638
self.assertRaises(errors.ExistingContent,
1639
vf.add_lines, new_key, [], lines,
1641
self.assertRaises(errors.ExistingContent,
1642
vf._add_text, new_key, [], ''.join(lines),
1644
# and no new version should have been added.
1645
record = next(vf.get_record_stream([new_key], 'unordered', True))
1646
self.assertEqual('absent', record.storage_kind)
1648
def test_add_lines_nostoresha(self):
1649
self._add_content_nostoresha(add_lines=True)
1651
def test__add_text_nostoresha(self):
1652
self._add_content_nostoresha(add_lines=False)
1654
def test_add_lines_return(self):
1655
files = self.get_versionedfiles()
1656
# save code by using the stock data insertion helper.
1657
adds = self.get_diamond_files(files)
1659
# We can only validate the first 2 elements returned from add_lines.
1661
self.assertEqual(3, len(add))
1662
results.append(add[:2])
1663
if self.key_length == 1:
1665
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1666
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1667
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1668
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1669
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1671
elif self.key_length == 2:
1673
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1674
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1675
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1676
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1677
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1678
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1679
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1680
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1681
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1682
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1685
def test_add_lines_no_key_generates_chk_key(self):
1686
files = self.get_versionedfiles()
1687
# save code by using the stock data insertion helper.
1688
adds = self.get_diamond_files(files, nokeys=True)
1690
# We can only validate the first 2 elements returned from add_lines.
1692
self.assertEqual(3, len(add))
1693
results.append(add[:2])
1694
if self.key_length == 1:
1696
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1697
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1698
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1699
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1700
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1702
# Check the added items got CHK keys.
1704
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1705
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1706
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1707
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1708
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1711
elif self.key_length == 2:
1713
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1714
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1715
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1716
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1717
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1718
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1719
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1720
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1721
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1722
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1724
# Check the added items got CHK keys.
1726
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1727
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1728
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1729
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1730
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1731
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1732
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1733
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1734
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1735
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1739
def test_empty_lines(self):
1740
"""Empty files can be stored."""
1741
f = self.get_versionedfiles()
1742
key_a = self.get_simple_key('a')
1743
f.add_lines(key_a, [], [])
1744
self.assertEqual('',
1745
f.get_record_stream([key_a], 'unordered', True
1746
).next().get_bytes_as('fulltext'))
1747
key_b = self.get_simple_key('b')
1748
f.add_lines(key_b, self.get_parents([key_a]), [])
1749
self.assertEqual('',
1750
f.get_record_stream([key_b], 'unordered', True
1751
).next().get_bytes_as('fulltext'))
1753
def test_newline_only(self):
1754
f = self.get_versionedfiles()
1755
key_a = self.get_simple_key('a')
1756
f.add_lines(key_a, [], ['\n'])
1757
self.assertEqual('\n',
1758
f.get_record_stream([key_a], 'unordered', True
1759
).next().get_bytes_as('fulltext'))
1760
key_b = self.get_simple_key('b')
1761
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1762
self.assertEqual('\n',
1763
f.get_record_stream([key_b], 'unordered', True
1764
).next().get_bytes_as('fulltext'))
1766
def test_get_known_graph_ancestry(self):
1767
f = self.get_versionedfiles()
1769
raise TestNotApplicable('ancestry info only relevant with graph.')
1770
key_a = self.get_simple_key('a')
1771
key_b = self.get_simple_key('b')
1772
key_c = self.get_simple_key('c')
1778
f.add_lines(key_a, [], ['\n'])
1779
f.add_lines(key_b, [key_a], ['\n'])
1780
f.add_lines(key_c, [key_a, key_b], ['\n'])
1781
kg = f.get_known_graph_ancestry([key_c])
1782
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1783
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1785
def test_known_graph_with_fallbacks(self):
1786
f = self.get_versionedfiles('files')
1788
raise TestNotApplicable('ancestry info only relevant with graph.')
1789
if getattr(f, 'add_fallback_versioned_files', None) is None:
1790
raise TestNotApplicable("%s doesn't support fallbacks"
1791
% (f.__class__.__name__,))
1792
key_a = self.get_simple_key('a')
1793
key_b = self.get_simple_key('b')
1794
key_c = self.get_simple_key('c')
1795
# A only in fallback
1800
g = self.get_versionedfiles('fallback')
1801
g.add_lines(key_a, [], ['\n'])
1802
f.add_fallback_versioned_files(g)
1803
f.add_lines(key_b, [key_a], ['\n'])
1804
f.add_lines(key_c, [key_a, key_b], ['\n'])
1805
kg = f.get_known_graph_ancestry([key_c])
1806
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1808
def test_get_record_stream_empty(self):
1809
"""An empty stream can be requested without error."""
1810
f = self.get_versionedfiles()
1811
entries = f.get_record_stream([], 'unordered', False)
1812
self.assertEqual([], list(entries))
1814
def assertValidStorageKind(self, storage_kind):
1815
"""Assert that storage_kind is a valid storage_kind."""
1816
self.assertSubset([storage_kind],
1817
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1818
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1819
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1821
'knit-delta-closure', 'knit-delta-closure-ref',
1822
'groupcompress-block', 'groupcompress-block-ref'])
1824
def capture_stream(self, f, entries, on_seen, parents,
1825
require_fulltext=False):
1826
"""Capture a stream for testing."""
1827
for factory in entries:
1828
on_seen(factory.key)
1829
self.assertValidStorageKind(factory.storage_kind)
1830
if factory.sha1 is not None:
1831
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1833
self.assertEqual(parents[factory.key], factory.parents)
1834
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1836
if require_fulltext:
1837
factory.get_bytes_as('fulltext')
1839
def test_get_record_stream_interface(self):
1840
"""each item in a stream has to provide a regular interface."""
1841
files = self.get_versionedfiles()
1842
self.get_diamond_files(files)
1843
keys, _ = self.get_keys_and_sort_order()
1844
parent_map = files.get_parent_map(keys)
1845
entries = files.get_record_stream(keys, 'unordered', False)
1847
self.capture_stream(files, entries, seen.add, parent_map)
1848
self.assertEqual(set(keys), seen)
1850
def get_keys_and_sort_order(self):
1851
"""Get diamond test keys list, and their sort ordering."""
1852
if self.key_length == 1:
1853
keys = [('merged',), ('left',), ('right',), ('base',)]
1854
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1857
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1859
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1863
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1864
('FileA', 'base'):0,
1865
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1866
('FileB', 'base'):0,
1868
return keys, sort_order
1870
def get_keys_and_groupcompress_sort_order(self):
1871
"""Get diamond test keys list, and their groupcompress sort ordering."""
1872
if self.key_length == 1:
1873
keys = [('merged',), ('left',), ('right',), ('base',)]
1874
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1877
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1879
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1883
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1884
('FileA', 'base'):2,
1885
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1886
('FileB', 'base'):5,
1888
return keys, sort_order
1890
def test_get_record_stream_interface_ordered(self):
1891
"""each item in a stream has to provide a regular interface."""
1892
files = self.get_versionedfiles()
1893
self.get_diamond_files(files)
1894
keys, sort_order = self.get_keys_and_sort_order()
1895
parent_map = files.get_parent_map(keys)
1896
entries = files.get_record_stream(keys, 'topological', False)
1898
self.capture_stream(files, entries, seen.append, parent_map)
1899
self.assertStreamOrder(sort_order, seen, keys)
1901
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1902
"""each item must be accessible as a fulltext."""
1903
files = self.get_versionedfiles()
1904
self.get_diamond_files(files)
1905
keys, sort_order = self.get_keys_and_sort_order()
1906
parent_map = files.get_parent_map(keys)
1907
entries = files.get_record_stream(keys, 'topological', True)
1909
for factory in entries:
1910
seen.append(factory.key)
1911
self.assertValidStorageKind(factory.storage_kind)
1912
self.assertSubset([factory.sha1],
1913
[None, files.get_sha1s([factory.key])[factory.key]])
1914
self.assertEqual(parent_map[factory.key], factory.parents)
1915
# self.assertEqual(files.get_text(factory.key),
1916
ft_bytes = factory.get_bytes_as('fulltext')
1917
self.assertIsInstance(ft_bytes, str)
1918
chunked_bytes = factory.get_bytes_as('chunked')
1919
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1921
self.assertStreamOrder(sort_order, seen, keys)
1923
def test_get_record_stream_interface_groupcompress(self):
1924
"""each item in a stream has to provide a regular interface."""
1925
files = self.get_versionedfiles()
1926
self.get_diamond_files(files)
1927
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1928
parent_map = files.get_parent_map(keys)
1929
entries = files.get_record_stream(keys, 'groupcompress', False)
1931
self.capture_stream(files, entries, seen.append, parent_map)
1932
self.assertStreamOrder(sort_order, seen, keys)
1934
def assertStreamOrder(self, sort_order, seen, keys):
1935
self.assertEqual(len(set(seen)), len(keys))
1936
if self.key_length == 1:
1939
lows = {('FileA',):0, ('FileB',):0}
1941
self.assertEqual(set(keys), set(seen))
1944
sort_pos = sort_order[key]
1945
self.assertTrue(sort_pos >= lows[key[:-1]],
1946
"Out of order in sorted stream: %r, %r" % (key, seen))
1947
lows[key[:-1]] = sort_pos
1949
def test_get_record_stream_unknown_storage_kind_raises(self):
1950
"""Asking for a storage kind that the stream cannot supply raises."""
1951
files = self.get_versionedfiles()
1952
self.get_diamond_files(files)
1953
if self.key_length == 1:
1954
keys = [('merged',), ('left',), ('right',), ('base',)]
1957
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1959
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1962
parent_map = files.get_parent_map(keys)
1963
entries = files.get_record_stream(keys, 'unordered', False)
1964
# We track the contents because we should be able to try, fail a
1965
# particular kind and then ask for one that works and continue.
1967
for factory in entries:
1968
seen.add(factory.key)
1969
self.assertValidStorageKind(factory.storage_kind)
1970
if factory.sha1 is not None:
1971
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1973
self.assertEqual(parent_map[factory.key], factory.parents)
1974
# currently no stream emits mpdiff
1975
self.assertRaises(errors.UnavailableRepresentation,
1976
factory.get_bytes_as, 'mpdiff')
1977
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1979
self.assertEqual(set(keys), seen)
1981
def test_get_record_stream_missing_records_are_absent(self):
1982
files = self.get_versionedfiles()
1983
self.get_diamond_files(files)
1984
if self.key_length == 1:
1985
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1988
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1989
('FileA', 'absent'), ('FileA', 'base'),
1990
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1991
('FileB', 'absent'), ('FileB', 'base'),
1992
('absent', 'absent'),
1994
parent_map = files.get_parent_map(keys)
1995
entries = files.get_record_stream(keys, 'unordered', False)
1996
self.assertAbsentRecord(files, keys, parent_map, entries)
1997
entries = files.get_record_stream(keys, 'topological', False)
1998
self.assertAbsentRecord(files, keys, parent_map, entries)
2000
def assertRecordHasContent(self, record, bytes):
2001
"""Assert that record has the bytes bytes."""
2002
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
2003
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
2005
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
2006
files = self.get_versionedfiles()
2007
key = self.get_simple_key('foo')
2008
files.add_lines(key, (), ['my text\n', 'content'])
2009
stream = files.get_record_stream([key], 'unordered', False)
2010
record = next(stream)
2011
if record.storage_kind in ('chunked', 'fulltext'):
2012
# chunked and fulltext representations are for direct use not wire
2013
# serialisation: check they are able to be used directly. To send
2014
# such records over the wire translation will be needed.
2015
self.assertRecordHasContent(record, "my text\ncontent")
2017
bytes = [record.get_bytes_as(record.storage_kind)]
2018
network_stream = versionedfile.NetworkRecordStream(bytes).read()
2019
source_record = record
2021
for record in network_stream:
2022
records.append(record)
2023
self.assertEqual(source_record.storage_kind,
2024
record.storage_kind)
2025
self.assertEqual(source_record.parents, record.parents)
2027
source_record.get_bytes_as(source_record.storage_kind),
2028
record.get_bytes_as(record.storage_kind))
2029
self.assertEqual(1, len(records))
2031
def assertStreamMetaEqual(self, records, expected, stream):
2032
"""Assert that streams expected and stream have the same records.
2034
:param records: A list to collect the seen records.
2035
:return: A generator of the records in stream.
2037
# We make assertions during copying to catch things early for easier
2038
# debugging. This must use the iterating zip() from the future.
2039
for record, ref_record in zip(stream, expected):
2040
records.append(record)
2041
self.assertEqual(ref_record.key, record.key)
2042
self.assertEqual(ref_record.storage_kind, record.storage_kind)
2043
self.assertEqual(ref_record.parents, record.parents)
2046
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
2048
"""Convert a stream to a bytes iterator.
2050
:param skipped_records: A list with one element to increment when a
2052
:param full_texts: A dict from key->fulltext representation, for
2053
checking chunked or fulltext stored records.
2054
:param stream: A record_stream.
2055
:return: An iterator over the bytes of each record.
2057
for record in stream:
2058
if record.storage_kind in ('chunked', 'fulltext'):
2059
skipped_records[0] += 1
2060
# check the content is correct for direct use.
2061
self.assertRecordHasContent(record, full_texts[record.key])
2063
yield record.get_bytes_as(record.storage_kind)
2065
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
2066
files = self.get_versionedfiles()
2067
target_files = self.get_versionedfiles('target')
2068
key = self.get_simple_key('ft')
2069
key_delta = self.get_simple_key('delta')
2070
files.add_lines(key, (), ['my text\n', 'content'])
2072
delta_parents = (key,)
2075
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2076
local = files.get_record_stream([key, key_delta], 'unordered', False)
2077
ref = files.get_record_stream([key, key_delta], 'unordered', False)
2078
skipped_records = [0]
2080
key: "my text\ncontent",
2081
key_delta: "different\ncontent\n",
2083
byte_stream = self.stream_to_bytes_or_skip_counter(
2084
skipped_records, full_texts, local)
2085
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2087
# insert the stream from the network into a versioned files object so we can
2088
# check the content was carried across correctly without doing delta
2090
target_files.insert_record_stream(
2091
self.assertStreamMetaEqual(records, ref, network_stream))
2092
# No duplicates on the wire thank you!
2093
self.assertEqual(2, len(records) + skipped_records[0])
2095
# if any content was copied it all must have all been.
2096
self.assertIdenticalVersionedFile(files, target_files)
2098
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2099
# copy a delta over the wire
2100
files = self.get_versionedfiles()
2101
target_files = self.get_versionedfiles('target')
2102
key = self.get_simple_key('ft')
2103
key_delta = self.get_simple_key('delta')
2104
files.add_lines(key, (), ['my text\n', 'content'])
2106
delta_parents = (key,)
2109
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2110
# Copy the basis text across so we can reconstruct the delta during
2111
# insertion into target.
2112
target_files.insert_record_stream(files.get_record_stream([key],
2113
'unordered', False))
2114
local = files.get_record_stream([key_delta], 'unordered', False)
2115
ref = files.get_record_stream([key_delta], 'unordered', False)
2116
skipped_records = [0]
2118
key_delta: "different\ncontent\n",
2120
byte_stream = self.stream_to_bytes_or_skip_counter(
2121
skipped_records, full_texts, local)
2122
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2124
# insert the stream from the network into a versioned files object so we can
2125
# check the content was carried across correctly without doing delta
2126
# inspection during check_stream.
2127
target_files.insert_record_stream(
2128
self.assertStreamMetaEqual(records, ref, network_stream))
2129
# No duplicates on the wire thank you!
2130
self.assertEqual(1, len(records) + skipped_records[0])
2132
# if any content was copied it all must have all been
2133
self.assertIdenticalVersionedFile(files, target_files)
2135
def test_get_record_stream_wire_ready_delta_closure_included(self):
2136
# copy a delta over the wire with the ability to get its full text.
2137
files = self.get_versionedfiles()
2138
key = self.get_simple_key('ft')
2139
key_delta = self.get_simple_key('delta')
2140
files.add_lines(key, (), ['my text\n', 'content'])
2142
delta_parents = (key,)
2145
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2146
local = files.get_record_stream([key_delta], 'unordered', True)
2147
ref = files.get_record_stream([key_delta], 'unordered', True)
2148
skipped_records = [0]
2150
key_delta: "different\ncontent\n",
2152
byte_stream = self.stream_to_bytes_or_skip_counter(
2153
skipped_records, full_texts, local)
2154
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2156
# insert the stream from the network into a versioned files object so we can
2157
# check the content was carried across correctly without doing delta
2158
# inspection during check_stream.
2159
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2160
# we have to be able to get the full text out:
2161
self.assertRecordHasContent(record, full_texts[record.key])
2162
# No duplicates on the wire thank you!
2163
self.assertEqual(1, len(records) + skipped_records[0])
2165
def assertAbsentRecord(self, files, keys, parents, entries):
2166
"""Helper for test_get_record_stream_missing_records_are_absent."""
2168
for factory in entries:
2169
seen.add(factory.key)
2170
if factory.key[-1] == 'absent':
2171
self.assertEqual('absent', factory.storage_kind)
2172
self.assertEqual(None, factory.sha1)
2173
self.assertEqual(None, factory.parents)
2175
self.assertValidStorageKind(factory.storage_kind)
2176
if factory.sha1 is not None:
2177
sha1 = files.get_sha1s([factory.key])[factory.key]
2178
self.assertEqual(sha1, factory.sha1)
2179
self.assertEqual(parents[factory.key], factory.parents)
2180
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2182
self.assertEqual(set(keys), seen)
2184
def test_filter_absent_records(self):
2185
"""Requested missing records can be filter trivially."""
2186
files = self.get_versionedfiles()
2187
self.get_diamond_files(files)
2188
keys, _ = self.get_keys_and_sort_order()
2189
parent_map = files.get_parent_map(keys)
2190
# Add an absent record in the middle of the present keys. (We don't ask
2191
# for just absent keys to ensure that content before and after the
2192
# absent keys is still delivered).
2193
present_keys = list(keys)
2194
if self.key_length == 1:
2195
keys.insert(2, ('extra',))
2197
keys.insert(2, ('extra', 'extra'))
2198
entries = files.get_record_stream(keys, 'unordered', False)
2200
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2202
self.assertEqual(set(present_keys), seen)
2204
def get_mapper(self):
2205
"""Get a mapper suitable for the key length of the test interface."""
2206
if self.key_length == 1:
2207
return ConstantMapper('source')
2209
return HashEscapedPrefixMapper()
2211
def get_parents(self, parents):
2212
"""Get parents, taking self.graph into consideration."""
2218
def test_get_annotator(self):
2219
files = self.get_versionedfiles()
2220
self.get_diamond_files(files)
2221
origin_key = self.get_simple_key('origin')
2222
base_key = self.get_simple_key('base')
2223
left_key = self.get_simple_key('left')
2224
right_key = self.get_simple_key('right')
2225
merged_key = self.get_simple_key('merged')
2226
# annotator = files.get_annotator()
2227
# introduced full text
2228
origins, lines = files.get_annotator().annotate(origin_key)
2229
self.assertEqual([(origin_key,)], origins)
2230
self.assertEqual(['origin\n'], lines)
2232
origins, lines = files.get_annotator().annotate(base_key)
2233
self.assertEqual([(base_key,)], origins)
2235
origins, lines = files.get_annotator().annotate(merged_key)
2244
# Without a graph everything is new.
2251
self.assertRaises(RevisionNotPresent,
2252
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2254
def test_get_parent_map(self):
2255
files = self.get_versionedfiles()
2256
if self.key_length == 1:
2258
(('r0',), self.get_parents(())),
2259
(('r1',), self.get_parents((('r0',),))),
2260
(('r2',), self.get_parents(())),
2261
(('r3',), self.get_parents(())),
2262
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2266
(('FileA', 'r0'), self.get_parents(())),
2267
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2268
(('FileA', 'r2'), self.get_parents(())),
2269
(('FileA', 'r3'), self.get_parents(())),
2270
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2271
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2273
for key, parents in parent_details:
2274
files.add_lines(key, parents, [])
2275
# immediately after adding it should be queryable.
2276
self.assertEqual({key:parents}, files.get_parent_map([key]))
2277
# We can ask for an empty set
2278
self.assertEqual({}, files.get_parent_map([]))
2279
# We can ask for many keys
2280
all_parents = dict(parent_details)
2281
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2282
# Absent keys are just not included in the result.
2283
keys = all_parents.keys()
2284
if self.key_length == 1:
2285
keys.insert(1, ('missing',))
2287
keys.insert(1, ('missing', 'missing'))
2288
# Absent keys are just ignored
2289
self.assertEqual(all_parents, files.get_parent_map(keys))
2291
def test_get_sha1s(self):
2292
files = self.get_versionedfiles()
2293
self.get_diamond_files(files)
2294
if self.key_length == 1:
2295
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2297
# ask for shas from different prefixes.
2299
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2300
('FileA', 'merged'), ('FileB', 'right'),
2303
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2304
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2305
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2306
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2307
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2309
files.get_sha1s(keys))
2311
def test_insert_record_stream_empty(self):
2312
"""Inserting an empty record stream should work."""
2313
files = self.get_versionedfiles()
2314
files.insert_record_stream([])
2316
def assertIdenticalVersionedFile(self, expected, actual):
2317
"""Assert that left and right have the same contents."""
2318
self.assertEqual(set(actual.keys()), set(expected.keys()))
2319
actual_parents = actual.get_parent_map(actual.keys())
2321
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2323
for key, parents in actual_parents.items():
2324
self.assertEqual(None, parents)
2325
for key in actual.keys():
2326
actual_text = actual.get_record_stream(
2327
[key], 'unordered', True).next().get_bytes_as('fulltext')
2328
expected_text = expected.get_record_stream(
2329
[key], 'unordered', True).next().get_bytes_as('fulltext')
2330
self.assertEqual(actual_text, expected_text)
2332
def test_insert_record_stream_fulltexts(self):
2333
"""Any file should accept a stream of fulltexts."""
2334
files = self.get_versionedfiles()
2335
mapper = self.get_mapper()
2336
source_transport = self.get_transport('source')
2337
source_transport.mkdir('.')
2338
# weaves always output fulltexts.
2339
source = make_versioned_files_factory(WeaveFile, mapper)(
2341
self.get_diamond_files(source, trailing_eol=False)
2342
stream = source.get_record_stream(source.keys(), 'topological',
2344
files.insert_record_stream(stream)
2345
self.assertIdenticalVersionedFile(source, files)
2347
def test_insert_record_stream_fulltexts_noeol(self):
2348
"""Any file should accept a stream of fulltexts."""
2349
files = self.get_versionedfiles()
2350
mapper = self.get_mapper()
2351
source_transport = self.get_transport('source')
2352
source_transport.mkdir('.')
2353
# weaves always output fulltexts.
2354
source = make_versioned_files_factory(WeaveFile, mapper)(
2356
self.get_diamond_files(source, trailing_eol=False)
2357
stream = source.get_record_stream(source.keys(), 'topological',
2359
files.insert_record_stream(stream)
2360
self.assertIdenticalVersionedFile(source, files)
2362
def test_insert_record_stream_annotated_knits(self):
2363
"""Any file should accept a stream from plain knits."""
2364
files = self.get_versionedfiles()
2365
mapper = self.get_mapper()
2366
source_transport = self.get_transport('source')
2367
source_transport.mkdir('.')
2368
source = make_file_factory(True, mapper)(source_transport)
2369
self.get_diamond_files(source)
2370
stream = source.get_record_stream(source.keys(), 'topological',
2372
files.insert_record_stream(stream)
2373
self.assertIdenticalVersionedFile(source, files)
2375
def test_insert_record_stream_annotated_knits_noeol(self):
2376
"""Any file should accept a stream from plain knits."""
2377
files = self.get_versionedfiles()
2378
mapper = self.get_mapper()
2379
source_transport = self.get_transport('source')
2380
source_transport.mkdir('.')
2381
source = make_file_factory(True, mapper)(source_transport)
2382
self.get_diamond_files(source, trailing_eol=False)
2383
stream = source.get_record_stream(source.keys(), 'topological',
2385
files.insert_record_stream(stream)
2386
self.assertIdenticalVersionedFile(source, files)
2388
def test_insert_record_stream_plain_knits(self):
2389
"""Any file should accept a stream from plain knits."""
2390
files = self.get_versionedfiles()
2391
mapper = self.get_mapper()
2392
source_transport = self.get_transport('source')
2393
source_transport.mkdir('.')
2394
source = make_file_factory(False, mapper)(source_transport)
2395
self.get_diamond_files(source)
2396
stream = source.get_record_stream(source.keys(), 'topological',
2398
files.insert_record_stream(stream)
2399
self.assertIdenticalVersionedFile(source, files)
2401
def test_insert_record_stream_plain_knits_noeol(self):
2402
"""Any file should accept a stream from plain knits."""
2403
files = self.get_versionedfiles()
2404
mapper = self.get_mapper()
2405
source_transport = self.get_transport('source')
2406
source_transport.mkdir('.')
2407
source = make_file_factory(False, mapper)(source_transport)
2408
self.get_diamond_files(source, trailing_eol=False)
2409
stream = source.get_record_stream(source.keys(), 'topological',
2411
files.insert_record_stream(stream)
2412
self.assertIdenticalVersionedFile(source, files)
2414
def test_insert_record_stream_existing_keys(self):
2415
"""Inserting keys already in a file should not error."""
2416
files = self.get_versionedfiles()
2417
source = self.get_versionedfiles('source')
2418
self.get_diamond_files(source)
2419
# insert some keys into f.
2420
self.get_diamond_files(files, left_only=True)
2421
stream = source.get_record_stream(source.keys(), 'topological',
2423
files.insert_record_stream(stream)
2424
self.assertIdenticalVersionedFile(source, files)
2426
def test_insert_record_stream_missing_keys(self):
2427
"""Inserting a stream with absent keys should raise an error."""
2428
files = self.get_versionedfiles()
2429
source = self.get_versionedfiles('source')
2430
stream = source.get_record_stream([('missing',) * self.key_length],
2431
'topological', False)
2432
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2435
def test_insert_record_stream_out_of_order(self):
2436
"""An out of order stream can either error or work."""
2437
files = self.get_versionedfiles()
2438
source = self.get_versionedfiles('source')
2439
self.get_diamond_files(source)
2440
if self.key_length == 1:
2441
origin_keys = [('origin',)]
2442
end_keys = [('merged',), ('left',)]
2443
start_keys = [('right',), ('base',)]
2445
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2446
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2447
('FileB', 'merged',), ('FileB', 'left',)]
2448
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2449
('FileB', 'right',), ('FileB', 'base',)]
2450
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2451
end_entries = source.get_record_stream(end_keys, 'topological', False)
2452
start_entries = source.get_record_stream(start_keys, 'topological', False)
2453
entries = itertools.chain(origin_entries, end_entries, start_entries)
2455
files.insert_record_stream(entries)
2456
except RevisionNotPresent:
2457
# Must not have corrupted the file.
2460
self.assertIdenticalVersionedFile(source, files)
2462
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2463
"""An out of order stream can either error or work."""
2465
raise TestNotApplicable('ancestry info only relevant with graph.')
2466
# Create a reasonably long chain of records based on each other, where
2467
# most will be deltas.
2468
source = self.get_versionedfiles('source')
2471
content = [('same same %d\n' % n) for n in range(500)]
2472
for letter in 'abcdefghijklmnopqrstuvwxyz':
2473
key = ('key-' + letter,)
2474
if self.key_length == 2:
2475
key = ('prefix',) + key
2476
content.append('content for ' + letter + '\n')
2477
source.add_lines(key, parents, content)
2480
# Create a stream of these records, excluding the first record that the
2481
# rest ultimately depend upon, and insert it into a new vf.
2483
for key in reversed(keys):
2484
streams.append(source.get_record_stream([key], 'unordered', False))
2485
deltas = itertools.chain.from_iterable(streams[:-1])
2486
files = self.get_versionedfiles()
2488
files.insert_record_stream(deltas)
2489
except RevisionNotPresent:
2490
# Must not have corrupted the file.
2493
# Must only report either just the first key as a missing parent,
2494
# no key as missing (for nodelta scenarios).
2495
missing = set(files.get_missing_compression_parent_keys())
2496
missing.discard(keys[0])
2497
self.assertEqual(set(), missing)
2499
def get_knit_delta_source(self):
2500
"""Get a source that can produce a stream with knit delta records,
2501
regardless of this test's scenario.
2503
mapper = self.get_mapper()
2504
source_transport = self.get_transport('source')
2505
source_transport.mkdir('.')
2506
source = make_file_factory(False, mapper)(source_transport)
2507
get_diamond_files(source, self.key_length, trailing_eol=True,
2508
nograph=False, left_only=False)
2511
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2512
"""Insertion where a needed basis is not included notifies the caller
2513
of the missing basis. In the meantime a record missing its basis is
2516
source = self.get_knit_delta_source()
2517
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2518
entries = source.get_record_stream(keys, 'unordered', False)
2519
files = self.get_versionedfiles()
2520
if self.support_partial_insertion:
2521
self.assertEqual([],
2522
list(files.get_missing_compression_parent_keys()))
2523
files.insert_record_stream(entries)
2524
missing_bases = files.get_missing_compression_parent_keys()
2525
self.assertEqual({self.get_simple_key('left')},
2527
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2530
errors.RevisionNotPresent, files.insert_record_stream, entries)
2533
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2534
"""Insertion where a needed basis is not included notifies the caller
2535
of the missing basis. That basis can be added in a second
2536
insert_record_stream call that does not need to repeat records present
2537
in the previous stream. The record(s) that required that basis are
2538
fully inserted once their basis is no longer missing.
2540
if not self.support_partial_insertion:
2541
raise TestNotApplicable(
2542
'versioned file scenario does not support partial insertion')
2543
source = self.get_knit_delta_source()
2544
entries = source.get_record_stream([self.get_simple_key('origin'),
2545
self.get_simple_key('merged')], 'unordered', False)
2546
files = self.get_versionedfiles()
2547
files.insert_record_stream(entries)
2548
missing_bases = files.get_missing_compression_parent_keys()
2549
self.assertEqual({self.get_simple_key('left')},
2551
# 'merged' is inserted (although a commit of a write group involving
2552
# this versionedfiles would fail).
2553
merged_key = self.get_simple_key('merged')
2555
[merged_key], files.get_parent_map([merged_key]).keys())
2556
# Add the full delta closure of the missing records
2557
missing_entries = source.get_record_stream(
2558
missing_bases, 'unordered', True)
2559
files.insert_record_stream(missing_entries)
2560
# Now 'merged' is fully inserted (and a commit would succeed).
2561
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2563
[merged_key], files.get_parent_map([merged_key]).keys())
2566
def test_iter_lines_added_or_present_in_keys(self):
2567
# test that we get at least an equalset of the lines added by
2568
# versions in the store.
2569
# the ordering here is to make a tree so that dumb searches have
2570
# more changes to muck up.
2572
class InstrumentedProgress(progress.ProgressTask):
2575
progress.ProgressTask.__init__(self)
2578
def update(self, msg=None, current=None, total=None):
2579
self.updates.append((msg, current, total))
2581
files = self.get_versionedfiles()
2582
# add a base to get included
2583
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2584
# add a ancestor to be included on one side
2585
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2586
# add a ancestor to be included on the other side
2587
files.add_lines(self.get_simple_key('rancestor'),
2588
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2589
# add a child of rancestor with no eofile-nl
2590
files.add_lines(self.get_simple_key('child'),
2591
self.get_parents([self.get_simple_key('rancestor')]),
2592
['base\n', 'child\n'])
2593
# add a child of lancestor and base to join the two roots
2594
files.add_lines(self.get_simple_key('otherchild'),
2595
self.get_parents([self.get_simple_key('lancestor'),
2596
self.get_simple_key('base')]),
2597
['base\n', 'lancestor\n', 'otherchild\n'])
2598
def iter_with_keys(keys, expected):
2599
# now we need to see what lines are returned, and how often.
2601
progress = InstrumentedProgress()
2602
# iterate over the lines
2603
for line in files.iter_lines_added_or_present_in_keys(keys,
2605
lines.setdefault(line, 0)
2607
if []!= progress.updates:
2608
self.assertEqual(expected, progress.updates)
2610
lines = iter_with_keys(
2611
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2612
[('Walking content', 0, 2),
2613
('Walking content', 1, 2),
2614
('Walking content', 2, 2)])
2615
# we must see child and otherchild
2616
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2618
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2619
# we dont care if we got more than that.
2622
lines = iter_with_keys(files.keys(),
2623
[('Walking content', 0, 5),
2624
('Walking content', 1, 5),
2625
('Walking content', 2, 5),
2626
('Walking content', 3, 5),
2627
('Walking content', 4, 5),
2628
('Walking content', 5, 5)])
2629
# all lines must be seen at least once
2630
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2632
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2634
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2635
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2637
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2639
def test_make_mpdiffs(self):
2640
from breezy import multiparent
2641
files = self.get_versionedfiles('source')
2642
# add texts that should trip the knit maximum delta chain threshold
2643
# as well as doing parallel chains of data in knits.
2644
# this is done by two chains of 25 insertions
2645
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2646
files.add_lines(self.get_simple_key('noeol'),
2647
self.get_parents([self.get_simple_key('base')]), ['line'])
2648
# detailed eol tests:
2649
# shared last line with parent no-eol
2650
files.add_lines(self.get_simple_key('noeolsecond'),
2651
self.get_parents([self.get_simple_key('noeol')]),
2653
# differing last line with parent, both no-eol
2654
files.add_lines(self.get_simple_key('noeolnotshared'),
2655
self.get_parents([self.get_simple_key('noeolsecond')]),
2656
['line\n', 'phone'])
2657
# add eol following a noneol parent, change content
2658
files.add_lines(self.get_simple_key('eol'),
2659
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2660
# add eol following a noneol parent, no change content
2661
files.add_lines(self.get_simple_key('eolline'),
2662
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2663
# noeol with no parents:
2664
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2665
# noeol preceeding its leftmost parent in the output:
2666
# this is done by making it a merge of two parents with no common
2667
# anestry: noeolbase and noeol with the
2668
# later-inserted parent the leftmost.
2669
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2670
self.get_parents([self.get_simple_key('noeolbase'),
2671
self.get_simple_key('noeol')]),
2673
# two identical eol texts
2674
files.add_lines(self.get_simple_key('noeoldup'),
2675
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2676
next_parent = self.get_simple_key('base')
2677
text_name = 'chain1-'
2679
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2680
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2681
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2682
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2683
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2684
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2685
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2686
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2687
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2688
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2689
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2690
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2691
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2692
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2693
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2694
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2695
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2696
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2697
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2698
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2699
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2700
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2701
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2702
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2703
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2704
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2706
for depth in range(26):
2707
new_version = self.get_simple_key(text_name + '%s' % depth)
2708
text = text + ['line\n']
2709
files.add_lines(new_version, self.get_parents([next_parent]), text)
2710
next_parent = new_version
2711
next_parent = self.get_simple_key('base')
2712
text_name = 'chain2-'
2714
for depth in range(26):
2715
new_version = self.get_simple_key(text_name + '%s' % depth)
2716
text = text + ['line\n']
2717
files.add_lines(new_version, self.get_parents([next_parent]), text)
2718
next_parent = new_version
2719
target = self.get_versionedfiles('target')
2720
for key in multiparent.topo_iter_keys(files, files.keys()):
2721
mpdiff = files.make_mpdiffs([key])[0]
2722
parents = files.get_parent_map([key])[key] or []
2724
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2725
self.assertEqualDiff(
2726
files.get_record_stream([key], 'unordered',
2727
True).next().get_bytes_as('fulltext'),
2728
target.get_record_stream([key], 'unordered',
2729
True).next().get_bytes_as('fulltext')
2732
def test_keys(self):
2733
# While use is discouraged, versions() is still needed by aspects of
2735
files = self.get_versionedfiles()
2736
self.assertEqual(set(), set(files.keys()))
2737
if self.key_length == 1:
2740
key = ('foo', 'bar',)
2741
files.add_lines(key, (), [])
2742
self.assertEqual({key}, set(files.keys()))
2745
class VirtualVersionedFilesTests(TestCase):
2746
"""Basic tests for the VirtualVersionedFiles implementations."""
2748
def _get_parent_map(self, keys):
2751
if k in self._parent_map:
2752
ret[k] = self._parent_map[k]
2756
super(VirtualVersionedFilesTests, self).setUp()
2758
self._parent_map = {}
2759
self.texts = VirtualVersionedFiles(self._get_parent_map,
2762
def test_add_lines(self):
2763
self.assertRaises(NotImplementedError,
2764
self.texts.add_lines, "foo", [], [])
2766
def test_add_mpdiffs(self):
2767
self.assertRaises(NotImplementedError,
2768
self.texts.add_mpdiffs, [])
2770
def test_check_noerrors(self):
2773
def test_insert_record_stream(self):
2774
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2777
def test_get_sha1s_nonexistent(self):
2778
self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2780
def test_get_sha1s(self):
2781
self._lines["key"] = ["dataline1", "dataline2"]
2782
self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])},
2783
self.texts.get_sha1s([("key",)]))
2785
def test_get_parent_map(self):
2786
self._parent_map = {"G": ("A", "B")}
2787
self.assertEqual({("G",): (("A",),("B",))},
2788
self.texts.get_parent_map([("G",), ("L",)]))
2790
def test_get_record_stream(self):
2791
self._lines["A"] = ["FOO", "BAR"]
2792
it = self.texts.get_record_stream([("A",)], "unordered", True)
2794
self.assertEqual("chunked", record.storage_kind)
2795
self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
2796
self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
2798
def test_get_record_stream_absent(self):
2799
it = self.texts.get_record_stream([("A",)], "unordered", True)
2801
self.assertEqual("absent", record.storage_kind)
2803
def test_iter_lines_added_or_present_in_keys(self):
2804
self._lines["A"] = ["FOO", "BAR"]
2805
self._lines["B"] = ["HEY"]
2806
self._lines["C"] = ["Alberta"]
2807
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2808
self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2812
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2814
def get_ordering_vf(self, key_priority):
2815
builder = self.make_branch_builder('test')
2816
builder.start_series()
2817
builder.build_snapshot('A', None, [
2818
('add', ('', 'TREE_ROOT', 'directory', None))])
2819
builder.build_snapshot('B', ['A'], [])
2820
builder.build_snapshot('C', ['B'], [])
2821
builder.build_snapshot('D', ['C'], [])
2822
builder.finish_series()
2823
b = builder.get_branch()
2825
self.addCleanup(b.unlock)
2826
vf = b.repository.inventories
2827
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2829
def test_get_empty(self):
2830
vf = self.get_ordering_vf({})
2831
self.assertEqual([], vf.calls)
2833
def test_get_record_stream_topological(self):
2834
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2835
request_keys = [('B',), ('C',), ('D',), ('A',)]
2836
keys = [r.key for r in vf.get_record_stream(request_keys,
2837
'topological', False)]
2838
# We should have gotten the keys in topological order
2839
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2840
# And recorded that the request was made
2841
self.assertEqual([('get_record_stream', request_keys, 'topological',
2844
def test_get_record_stream_ordered(self):
2845
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2846
request_keys = [('B',), ('C',), ('D',), ('A',)]
2847
keys = [r.key for r in vf.get_record_stream(request_keys,
2848
'unordered', False)]
2849
# They should be returned based on their priority
2850
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2851
# And the request recorded
2852
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2855
def test_get_record_stream_implicit_order(self):
2856
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2857
request_keys = [('B',), ('C',), ('D',), ('A',)]
2858
keys = [r.key for r in vf.get_record_stream(request_keys,
2859
'unordered', False)]
2860
# A and C are not in the map, so they get sorted to the front. A comes
2861
# before C alphabetically, so it comes back first
2862
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2863
# And the request recorded
2864
self.assertEqual([('get_record_stream', request_keys, 'unordered',