1
# Copyright (C) 2005 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from StringIO import StringIO
32
from bzrlib.errors import (
34
RevisionAlreadyPresent,
37
from bzrlib.knit import (
42
from bzrlib.symbol_versioning import one_four
43
from bzrlib.tests import TestCaseWithMemoryTransport, TestSkipped
44
from bzrlib.tests.http_utils import TestCaseWithWebserver
45
from bzrlib.trace import mutter
46
from bzrlib.transport import get_transport
47
from bzrlib.transport.memory import MemoryTransport
48
from bzrlib.tsort import topo_sort
49
import bzrlib.versionedfile as versionedfile
50
from bzrlib.weave import WeaveFile
51
from bzrlib.weavefile import read_weave, write_weave
54
class VersionedFileTestMixIn(object):
55
"""A mixin test class for testing VersionedFiles.
57
This is not an adaptor-style test at this point because
58
theres no dynamic substitution of versioned file implementations,
59
they are strictly controlled by their owning repositories.
64
f.add_lines('r0', [], ['a\n', 'b\n'])
65
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
67
versions = f.versions()
68
self.assertTrue('r0' in versions)
69
self.assertTrue('r1' in versions)
70
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
71
self.assertEquals(f.get_text('r0'), 'a\nb\n')
72
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
73
self.assertEqual(2, len(f))
74
self.assertEqual(2, f.num_versions())
76
self.assertRaises(RevisionNotPresent,
77
f.add_lines, 'r2', ['foo'], [])
78
self.assertRaises(RevisionAlreadyPresent,
79
f.add_lines, 'r1', [], [])
81
# this checks that reopen with create=True does not break anything.
82
f = self.reopen_file(create=True)
85
def test_adds_with_parent_texts(self):
88
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
90
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
91
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
92
except NotImplementedError:
93
# if the format doesn't support ghosts, just add normally.
94
_, _, parent_texts['r1'] = f.add_lines('r1',
95
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
96
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
97
self.assertNotEqual(None, parent_texts['r0'])
98
self.assertNotEqual(None, parent_texts['r1'])
100
versions = f.versions()
101
self.assertTrue('r0' in versions)
102
self.assertTrue('r1' in versions)
103
self.assertTrue('r2' in versions)
104
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
105
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
106
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
107
self.assertEqual(3, f.num_versions())
108
origins = f.annotate('r1')
109
self.assertEquals(origins[0][0], 'r0')
110
self.assertEquals(origins[1][0], 'r1')
111
origins = f.annotate('r2')
112
self.assertEquals(origins[0][0], 'r1')
113
self.assertEquals(origins[1][0], 'r2')
116
f = self.reopen_file()
119
def test_add_unicode_content(self):
120
# unicode content is not permitted in versioned files.
121
# versioned files version sequences of bytes only.
123
self.assertRaises(errors.BzrBadParameterUnicode,
124
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
126
(errors.BzrBadParameterUnicode, NotImplementedError),
127
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
129
def test_add_follows_left_matching_blocks(self):
130
"""If we change left_matching_blocks, delta changes
132
Note: There are multiple correct deltas in this case, because
133
we start with 1 "a" and we get 3.
136
if isinstance(vf, WeaveFile):
137
raise TestSkipped("WeaveFile ignores left_matching_blocks")
138
vf.add_lines('1', [], ['a\n'])
139
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
140
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
141
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
142
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
143
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
144
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
146
def test_inline_newline_throws(self):
147
# \r characters are not permitted in lines being added
149
self.assertRaises(errors.BzrBadParameterContainsNewline,
150
vf.add_lines, 'a', [], ['a\n\n'])
152
(errors.BzrBadParameterContainsNewline, NotImplementedError),
153
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
154
# but inline CR's are allowed
155
vf.add_lines('a', [], ['a\r\n'])
157
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
158
except NotImplementedError:
161
def test_add_reserved(self):
163
self.assertRaises(errors.ReservedId,
164
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
166
def test_add_lines_nostoresha(self):
167
"""When nostore_sha is supplied using old content raises."""
169
empty_text = ('a', [])
170
sample_text_nl = ('b', ["foo\n", "bar\n"])
171
sample_text_no_nl = ('c', ["foo\n", "bar"])
173
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
174
sha, _, _ = vf.add_lines(version, [], lines)
176
# we now have a copy of all the lines in the vf.
177
for sha, (version, lines) in zip(
178
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
179
self.assertRaises(errors.ExistingContent,
180
vf.add_lines, version + "2", [], lines,
182
# and no new version should have been added.
183
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
186
def test_add_lines_with_ghosts_nostoresha(self):
187
"""When nostore_sha is supplied using old content raises."""
189
empty_text = ('a', [])
190
sample_text_nl = ('b', ["foo\n", "bar\n"])
191
sample_text_no_nl = ('c', ["foo\n", "bar"])
193
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
194
sha, _, _ = vf.add_lines(version, [], lines)
196
# we now have a copy of all the lines in the vf.
197
# is the test applicable to this vf implementation?
199
vf.add_lines_with_ghosts('d', [], [])
200
except NotImplementedError:
201
raise TestSkipped("add_lines_with_ghosts is optional")
202
for sha, (version, lines) in zip(
203
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
204
self.assertRaises(errors.ExistingContent,
205
vf.add_lines_with_ghosts, version + "2", [], lines,
207
# and no new version should have been added.
208
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
211
def test_add_lines_return_value(self):
212
# add_lines should return the sha1 and the text size.
214
empty_text = ('a', [])
215
sample_text_nl = ('b', ["foo\n", "bar\n"])
216
sample_text_no_nl = ('c', ["foo\n", "bar"])
217
# check results for the three cases:
218
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
219
# the first two elements are the same for all versioned files:
220
# - the digest and the size of the text. For some versioned files
221
# additional data is returned in additional tuple elements.
222
result = vf.add_lines(version, [], lines)
223
self.assertEqual(3, len(result))
224
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
226
# parents should not affect the result:
227
lines = sample_text_nl[1]
228
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
229
vf.add_lines('d', ['b', 'c'], lines)[0:2])
231
def test_get_reserved(self):
233
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
234
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
235
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
237
def test_make_mpdiffs(self):
238
from bzrlib import multiparent
239
vf = self.get_file('foo')
240
sha1s = self._setup_for_deltas(vf)
241
new_vf = self.get_file('bar')
242
for version in multiparent.topo_iter(vf):
243
mpdiff = vf.make_mpdiffs([version])[0]
244
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
245
vf.get_sha1(version), mpdiff)])
246
self.assertEqualDiff(vf.get_text(version),
247
new_vf.get_text(version))
249
def _setup_for_deltas(self, f):
250
self.assertFalse(f.has_version('base'))
251
# add texts that should trip the knit maximum delta chain threshold
252
# as well as doing parallel chains of data in knits.
253
# this is done by two chains of 25 insertions
254
f.add_lines('base', [], ['line\n'])
255
f.add_lines('noeol', ['base'], ['line'])
256
# detailed eol tests:
257
# shared last line with parent no-eol
258
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
259
# differing last line with parent, both no-eol
260
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
261
# add eol following a noneol parent, change content
262
f.add_lines('eol', ['noeol'], ['phone\n'])
263
# add eol following a noneol parent, no change content
264
f.add_lines('eolline', ['noeol'], ['line\n'])
265
# noeol with no parents:
266
f.add_lines('noeolbase', [], ['line'])
267
# noeol preceeding its leftmost parent in the output:
268
# this is done by making it a merge of two parents with no common
269
# anestry: noeolbase and noeol with the
270
# later-inserted parent the leftmost.
271
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
272
# two identical eol texts
273
f.add_lines('noeoldup', ['noeol'], ['line'])
275
text_name = 'chain1-'
277
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
278
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
279
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
280
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
281
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
282
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
283
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
284
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
285
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
286
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
287
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
288
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
289
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
290
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
291
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
292
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
293
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
294
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
295
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
296
19:'1ebed371807ba5935958ad0884595126e8c4e823',
297
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
298
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
299
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
300
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
301
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
302
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
304
for depth in range(26):
305
new_version = text_name + '%s' % depth
306
text = text + ['line\n']
307
f.add_lines(new_version, [next_parent], text)
308
next_parent = new_version
310
text_name = 'chain2-'
312
for depth in range(26):
313
new_version = text_name + '%s' % depth
314
text = text + ['line\n']
315
f.add_lines(new_version, [next_parent], text)
316
next_parent = new_version
319
def test_ancestry(self):
321
self.assertEqual([], f.get_ancestry([]))
322
f.add_lines('r0', [], ['a\n', 'b\n'])
323
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
324
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
325
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
326
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
327
self.assertEqual([], f.get_ancestry([]))
328
versions = f.get_ancestry(['rM'])
329
# there are some possibilities:
333
# so we check indexes
334
r0 = versions.index('r0')
335
r1 = versions.index('r1')
336
r2 = versions.index('r2')
337
self.assertFalse('r3' in versions)
338
rM = versions.index('rM')
339
self.assertTrue(r0 < r1)
340
self.assertTrue(r0 < r2)
341
self.assertTrue(r1 < rM)
342
self.assertTrue(r2 < rM)
344
self.assertRaises(RevisionNotPresent,
345
f.get_ancestry, ['rM', 'rX'])
347
self.assertEqual(set(f.get_ancestry('rM')),
348
set(f.get_ancestry('rM', topo_sorted=False)))
350
def test_mutate_after_finish(self):
352
f.transaction_finished()
353
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
354
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
355
self.assertRaises(errors.OutSideTransaction, f.join, '')
356
self.assertRaises(errors.OutSideTransaction, f.clone_text, 'base', 'bar', ['foo'])
358
def test_clear_cache(self):
360
# on a new file it should not error
362
# and after adding content, doing a clear_cache and a get should work.
363
f.add_lines('0', [], ['a'])
365
self.assertEqual(['a'], f.get_lines('0'))
367
def test_clone_text(self):
369
f.add_lines('r0', [], ['a\n', 'b\n'])
370
f.clone_text('r1', 'r0', ['r0'])
372
self.assertEquals(f.get_lines('r1'), f.get_lines('r0'))
373
self.assertEquals(f.get_lines('r1'), ['a\n', 'b\n'])
374
self.assertEqual({'r1':('r0',)}, f.get_parent_map(['r1']))
375
self.assertRaises(RevisionNotPresent,
376
f.clone_text, 'r2', 'rX', [])
377
self.assertRaises(RevisionAlreadyPresent,
378
f.clone_text, 'r1', 'r0', [])
380
verify_file(self.reopen_file())
382
def test_create_empty(self):
384
f.add_lines('0', [], ['a\n'])
385
new_f = f.create_empty('t', MemoryTransport())
386
# smoke test, specific types should check it is honoured correctly for
387
# non type attributes
388
self.assertEqual([], new_f.versions())
389
self.assertTrue(isinstance(new_f, f.__class__))
391
def test_copy_to(self):
393
f.add_lines('0', [], ['a\n'])
394
t = MemoryTransport()
396
for suffix in f.__class__.get_suffixes():
397
self.assertTrue(t.has('foo' + suffix))
399
def test_get_suffixes(self):
402
self.assertEqual(f.__class__.get_suffixes(), f.__class__.get_suffixes())
403
# and should be a list
404
self.assertTrue(isinstance(f.__class__.get_suffixes(), list))
406
def build_graph(self, file, graph):
407
for node in topo_sort(graph.items()):
408
file.add_lines(node, graph[node], [])
410
def test_get_graph(self):
416
self.build_graph(f, graph)
417
self.assertEqual(graph, f.get_graph())
419
def test_get_graph_partial(self):
427
complex_graph.update(simple_a)
432
complex_graph.update(simple_b)
439
complex_graph.update(simple_gam)
441
simple_b_gam.update(simple_gam)
442
simple_b_gam.update(simple_b)
443
self.build_graph(f, complex_graph)
444
self.assertEqual(simple_a, f.get_graph(['a']))
445
self.assertEqual(simple_b, f.get_graph(['b']))
446
self.assertEqual(simple_gam, f.get_graph(['gam']))
447
self.assertEqual(simple_b_gam, f.get_graph(['b', 'gam']))
449
def test_get_parents(self):
451
f.add_lines('r0', [], ['a\n', 'b\n'])
452
f.add_lines('r1', [], ['a\n', 'b\n'])
453
f.add_lines('r2', [], ['a\n', 'b\n'])
454
f.add_lines('r3', [], ['a\n', 'b\n'])
455
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
456
self.assertEqual(['r0', 'r1', 'r2', 'r3'],
457
self.applyDeprecated(one_four, f.get_parents, 'm'))
458
self.assertRaises(RevisionNotPresent,
459
self.applyDeprecated, one_four, f.get_parents, 'y')
461
def test_get_parent_map(self):
463
f.add_lines('r0', [], ['a\n', 'b\n'])
465
{'r0':()}, f.get_parent_map(['r0']))
466
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
468
{'r1':('r0',)}, f.get_parent_map(['r1']))
472
f.get_parent_map(['r0', 'r1']))
473
f.add_lines('r2', [], ['a\n', 'b\n'])
474
f.add_lines('r3', [], ['a\n', 'b\n'])
475
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
477
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
478
self.assertEqual({}, f.get_parent_map('y'))
482
f.get_parent_map(['r0', 'y', 'r1']))
484
def test_annotate(self):
486
f.add_lines('r0', [], ['a\n', 'b\n'])
487
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
488
origins = f.annotate('r1')
489
self.assertEquals(origins[0][0], 'r1')
490
self.assertEquals(origins[1][0], 'r0')
492
self.assertRaises(RevisionNotPresent,
495
def test_detection(self):
496
# Test weaves detect corruption.
498
# Weaves contain a checksum of their texts.
499
# When a text is extracted, this checksum should be
502
w = self.get_file_corrupted_text()
504
self.assertEqual('hello\n', w.get_text('v1'))
505
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
506
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
507
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
509
w = self.get_file_corrupted_checksum()
511
self.assertEqual('hello\n', w.get_text('v1'))
512
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
513
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
514
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
516
def get_file_corrupted_text(self):
517
"""Return a versioned file with corrupt text but valid metadata."""
518
raise NotImplementedError(self.get_file_corrupted_text)
520
def reopen_file(self, name='foo'):
521
"""Open the versioned file from disk again."""
522
raise NotImplementedError(self.reopen_file)
524
def test_iter_parents(self):
525
"""iter_parents returns the parents for many nodes."""
529
f.add_lines('r0', [], ['a\n', 'b\n'])
531
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
533
f.add_lines('r2', ['r1', 'r0'], ['a\n', 'b\n'])
535
# cases: each sample data individually:
536
self.assertEqual(set([('r0', ())]),
537
set(f.iter_parents(['r0'])))
538
self.assertEqual(set([('r1', ('r0', ))]),
539
set(f.iter_parents(['r1'])))
540
self.assertEqual(set([('r2', ('r1', 'r0'))]),
541
set(f.iter_parents(['r2'])))
542
# no nodes returned for a missing node
543
self.assertEqual(set(),
544
set(f.iter_parents(['missing'])))
545
# 1 node returned with missing nodes skipped
546
self.assertEqual(set([('r1', ('r0', ))]),
547
set(f.iter_parents(['ghost1', 'r1', 'ghost'])))
549
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
550
set(f.iter_parents(['r0', 'r1'])))
551
# 2 nodes returned, missing skipped
552
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
553
set(f.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
555
def test_iter_lines_added_or_present_in_versions(self):
556
# test that we get at least an equalset of the lines added by
557
# versions in the weave
558
# the ordering here is to make a tree so that dumb searches have
559
# more changes to muck up.
561
class InstrumentedProgress(progress.DummyProgress):
565
progress.DummyProgress.__init__(self)
568
def update(self, msg=None, current=None, total=None):
569
self.updates.append((msg, current, total))
572
# add a base to get included
573
vf.add_lines('base', [], ['base\n'])
574
# add a ancestor to be included on one side
575
vf.add_lines('lancestor', [], ['lancestor\n'])
576
# add a ancestor to be included on the other side
577
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
578
# add a child of rancestor with no eofile-nl
579
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
580
# add a child of lancestor and base to join the two roots
581
vf.add_lines('otherchild',
582
['lancestor', 'base'],
583
['base\n', 'lancestor\n', 'otherchild\n'])
584
def iter_with_versions(versions, expected):
585
# now we need to see what lines are returned, and how often.
587
progress = InstrumentedProgress()
588
# iterate over the lines
589
for line in vf.iter_lines_added_or_present_in_versions(versions,
591
lines.setdefault(line, 0)
593
if []!= progress.updates:
594
self.assertEqual(expected, progress.updates)
596
lines = iter_with_versions(['child', 'otherchild'],
597
[('Walking content.', 0, 2),
598
('Walking content.', 1, 2),
599
('Walking content.', 2, 2)])
600
# we must see child and otherchild
601
self.assertTrue(lines[('child\n', 'child')] > 0)
602
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
603
# we dont care if we got more than that.
606
lines = iter_with_versions(None, [('Walking content.', 0, 5),
607
('Walking content.', 1, 5),
608
('Walking content.', 2, 5),
609
('Walking content.', 3, 5),
610
('Walking content.', 4, 5),
611
('Walking content.', 5, 5)])
612
# all lines must be seen at least once
613
self.assertTrue(lines[('base\n', 'base')] > 0)
614
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
615
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
616
self.assertTrue(lines[('child\n', 'child')] > 0)
617
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
619
def test_add_lines_with_ghosts(self):
620
# some versioned file formats allow lines to be added with parent
621
# information that is > than that in the format. Formats that do
622
# not support this need to raise NotImplementedError on the
623
# add_lines_with_ghosts api.
625
# add a revision with ghost parents
626
# The preferred form is utf8, but we should translate when needed
627
parent_id_unicode = u'b\xbfse'
628
parent_id_utf8 = parent_id_unicode.encode('utf8')
630
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
631
except NotImplementedError:
632
# check the other ghost apis are also not implemented
633
self.assertRaises(NotImplementedError, vf.has_ghost, 'foo')
634
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
635
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
636
self.assertRaises(NotImplementedError, vf.get_graph_with_ghosts)
638
vf = self.reopen_file()
639
# test key graph related apis: getncestry, _graph, get_parents
641
# - these are ghost unaware and must not be reflect ghosts
642
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
644
self.applyDeprecated(one_four, vf.get_parents, 'notbxbfse'))
645
self.assertEqual({'notbxbfse':()}, vf.get_graph())
646
self.assertFalse(vf.has_version(parent_id_utf8))
647
# we have _with_ghost apis to give us ghost information.
648
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
649
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
650
self.assertEqual({'notbxbfse':(parent_id_utf8,)}, vf.get_graph_with_ghosts())
651
self.assertTrue(vf.has_ghost(parent_id_utf8))
652
# if we add something that is a ghost of another, it should correct the
653
# results of the prior apis
654
vf.add_lines(parent_id_utf8, [], [])
655
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
656
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
657
vf.get_parent_map(['notbxbfse']))
658
self.assertEqual({parent_id_utf8:(),
659
'notbxbfse':(parent_id_utf8, ),
662
self.assertTrue(vf.has_version(parent_id_utf8))
663
# we have _with_ghost apis to give us ghost information.
664
self.assertEqual([parent_id_utf8, 'notbxbfse'],
665
vf.get_ancestry_with_ghosts(['notbxbfse']))
666
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
667
self.assertEqual({parent_id_utf8:(),
668
'notbxbfse':(parent_id_utf8,),
670
vf.get_graph_with_ghosts())
671
self.assertFalse(vf.has_ghost(parent_id_utf8))
673
def test_add_lines_with_ghosts_after_normal_revs(self):
674
# some versioned file formats allow lines to be added with parent
675
# information that is > than that in the format. Formats that do
676
# not support this need to raise NotImplementedError on the
677
# add_lines_with_ghosts api.
679
# probe for ghost support
682
except NotImplementedError:
684
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
685
vf.add_lines_with_ghosts('references_ghost',
687
['line\n', 'line_b\n', 'line_c\n'])
688
origins = vf.annotate('references_ghost')
689
self.assertEquals(('base', 'line\n'), origins[0])
690
self.assertEquals(('base', 'line_b\n'), origins[1])
691
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
693
def test_readonly_mode(self):
694
transport = get_transport(self.get_url('.'))
695
factory = self.get_factory()
696
vf = factory('id', transport, 0777, create=True, access_mode='w')
697
vf = factory('id', transport, access_mode='r')
698
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
699
self.assertRaises(errors.ReadOnlyError,
700
vf.add_lines_with_ghosts,
704
self.assertRaises(errors.ReadOnlyError, vf.join, 'base')
705
self.assertRaises(errors.ReadOnlyError, vf.clone_text, 'base', 'bar', ['foo'])
707
def test_get_sha1(self):
708
# check the sha1 data is available
711
vf.add_lines('a', [], ['a\n'])
712
# the same file, different metadata
713
vf.add_lines('b', ['a'], ['a\n'])
714
# a file differing only in last newline.
715
vf.add_lines('c', [], ['a'])
717
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('a'))
719
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('b'))
721
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', vf.get_sha1('c'))
723
self.assertEqual(['3f786850e387550fdab836ed7e6dc881de23001b',
724
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
725
'3f786850e387550fdab836ed7e6dc881de23001b'],
726
vf.get_sha1s(['a', 'c', 'b']))
729
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
731
def get_file(self, name='foo'):
732
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
734
def get_file_corrupted_text(self):
735
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True)
736
w.add_lines('v1', [], ['hello\n'])
737
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
739
# We are going to invasively corrupt the text
740
# Make sure the internals of weave are the same
741
self.assertEqual([('{', 0)
749
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
750
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
755
w._weave[4] = 'There\n'
758
def get_file_corrupted_checksum(self):
759
w = self.get_file_corrupted_text()
761
w._weave[4] = 'there\n'
762
self.assertEqual('hello\nthere\n', w.get_text('v2'))
764
#Invalid checksum, first digit changed
765
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
768
def reopen_file(self, name='foo', create=False):
769
return WeaveFile(name, get_transport(self.get_url('.')), create=create)
771
def test_no_implicit_create(self):
772
self.assertRaises(errors.NoSuchFile,
775
get_transport(self.get_url('.')))
777
def get_factory(self):
781
class TestKnit(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
783
def get_file(self, name='foo'):
784
return self.get_factory()(name, get_transport(self.get_url('.')),
785
delta=True, create=True)
787
def get_factory(self):
788
return KnitVersionedFile
790
def get_file_corrupted_text(self):
791
knit = self.get_file()
792
knit.add_lines('v1', [], ['hello\n'])
793
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
796
def reopen_file(self, name='foo', create=False):
797
return self.get_factory()(name, get_transport(self.get_url('.')),
801
def test_detection(self):
802
knit = self.get_file()
805
def test_no_implicit_create(self):
806
self.assertRaises(errors.NoSuchFile,
809
get_transport(self.get_url('.')))
812
class TestPlaintextKnit(TestKnit):
813
"""Test a knit with no cached annotations"""
815
def _factory(self, name, transport, file_mode=None, access_mode=None,
816
delta=True, create=False):
817
return KnitVersionedFile(name, transport, file_mode, access_mode,
818
KnitPlainFactory(), delta=delta,
821
def get_factory(self):
825
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
828
TestCaseWithMemoryTransport.setUp(self)
829
self.vf1 = KnitVersionedFile('root', self.get_transport(), create=True)
830
self.vf2 = KnitVersionedFile('root', self.get_transport(), create=True)
831
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root',
832
[self.vf1, self.vf2])
834
def test_add_lines(self):
835
self.plan_merge_vf.add_lines('a:', [], [])
836
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a', [],
838
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a:', None,
840
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a:', [],
843
def test_ancestry(self):
844
self.vf1.add_lines('A', [], [])
845
self.vf1.add_lines('B', ['A'], [])
846
self.plan_merge_vf.add_lines('C:', ['B'], [])
847
self.plan_merge_vf.add_lines('D:', ['C:'], [])
848
self.assertEqual(set(['A', 'B', 'C:', 'D:']),
849
self.plan_merge_vf.get_ancestry('D:', topo_sorted=False))
851
def setup_abcde(self):
852
self.vf1.add_lines('A', [], ['a'])
853
self.vf1.add_lines('B', ['A'], ['b'])
854
self.vf2.add_lines('C', [], ['c'])
855
self.vf2.add_lines('D', ['C'], ['d'])
856
self.plan_merge_vf.add_lines('E:', ['B', 'D'], ['e'])
858
def test_ancestry_uses_all_versionedfiles(self):
860
self.assertEqual(set(['A', 'B', 'C', 'D', 'E:']),
861
self.plan_merge_vf.get_ancestry('E:', topo_sorted=False))
863
def test_ancestry_raises_revision_not_present(self):
864
error = self.assertRaises(errors.RevisionNotPresent,
865
self.plan_merge_vf.get_ancestry, 'E:', False)
866
self.assertContainsRe(str(error), '{E:} not present in "root"')
868
def test_get_parents(self):
870
self.assertEqual({'B':('A',)}, self.plan_merge_vf.get_parent_map(['B']))
871
self.assertEqual({'D':('C',)}, self.plan_merge_vf.get_parent_map(['D']))
872
self.assertEqual({'E:':('B', 'D')},
873
self.plan_merge_vf.get_parent_map(['E:']))
874
self.assertEqual({}, self.plan_merge_vf.get_parent_map(['F']))
879
}, self.plan_merge_vf.get_parent_map(['B', 'D', 'E:', 'F']))
881
def test_get_lines(self):
883
self.assertEqual(['a'], self.plan_merge_vf.get_lines('A'))
884
self.assertEqual(['c'], self.plan_merge_vf.get_lines('C'))
885
self.assertEqual(['e'], self.plan_merge_vf.get_lines('E:'))
886
error = self.assertRaises(errors.RevisionNotPresent,
887
self.plan_merge_vf.get_lines, 'F')
888
self.assertContainsRe(str(error), '{F} not present in "root"')
891
class InterString(versionedfile.InterVersionedFile):
892
"""An inter-versionedfile optimised code path for strings.
894
This is for use during testing where we use strings as versionedfiles
895
so that none of the default regsitered interversionedfile classes will
896
match - which lets us test the match logic.
900
def is_compatible(source, target):
901
"""InterString is compatible with strings-as-versionedfiles."""
902
return isinstance(source, str) and isinstance(target, str)
905
# TODO this and the InterRepository core logic should be consolidatable
906
# if we make the registry a separate class though we still need to
907
# test the behaviour in the active registry to catch failure-to-handle-
909
class TestInterVersionedFile(TestCaseWithMemoryTransport):
911
def test_get_default_inter_versionedfile(self):
912
# test that the InterVersionedFile.get(a, b) probes
913
# for a class where is_compatible(a, b) returns
914
# true and returns a default interversionedfile otherwise.
915
# This also tests that the default registered optimised interversionedfile
916
# classes do not barf inappropriately when a surprising versionedfile type
918
dummy_a = "VersionedFile 1."
919
dummy_b = "VersionedFile 2."
920
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
922
def assertGetsDefaultInterVersionedFile(self, a, b):
923
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
924
inter = versionedfile.InterVersionedFile.get(a, b)
925
self.assertEqual(versionedfile.InterVersionedFile,
927
self.assertEqual(a, inter.source)
928
self.assertEqual(b, inter.target)
930
def test_register_inter_versionedfile_class(self):
931
# test that a optimised code path provider - a
932
# InterVersionedFile subclass can be registered and unregistered
933
# and that it is correctly selected when given a versionedfile
934
# pair that it returns true on for the is_compatible static method
936
dummy_a = "VersionedFile 1."
937
dummy_b = "VersionedFile 2."
938
versionedfile.InterVersionedFile.register_optimiser(InterString)
940
# we should get the default for something InterString returns False
942
self.assertFalse(InterString.is_compatible(dummy_a, None))
943
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
944
# and we should get an InterString for a pair it 'likes'
945
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
946
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
947
self.assertEqual(InterString, inter.__class__)
948
self.assertEqual(dummy_a, inter.source)
949
self.assertEqual(dummy_b, inter.target)
951
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
952
# now we should get the default InterVersionedFile object again.
953
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
956
class TestReadonlyHttpMixin(object):
958
def test_readonly_http_works(self):
959
# we should be able to read from http with a versioned file.
961
# try an empty file access
962
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
963
self.assertEqual([], readonly_vf.versions())
965
vf.add_lines('1', [], ['a\n'])
966
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
967
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
968
self.assertEqual(['1', '2'], vf.versions())
969
for version in readonly_vf.versions():
970
readonly_vf.get_lines(version)
973
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
976
return WeaveFile('foo', get_transport(self.get_url('.')), create=True)
978
def get_factory(self):
982
class TestKnitHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
985
return KnitVersionedFile('foo', get_transport(self.get_url('.')),
986
delta=True, create=True)
988
def get_factory(self):
989
return KnitVersionedFile
992
class MergeCasesMixin(object):
994
def doMerge(self, base, a, b, mp):
995
from cStringIO import StringIO
996
from textwrap import dedent
1002
w.add_lines('text0', [], map(addcrlf, base))
1003
w.add_lines('text1', ['text0'], map(addcrlf, a))
1004
w.add_lines('text2', ['text0'], map(addcrlf, b))
1006
self.log_contents(w)
1008
self.log('merge plan:')
1009
p = list(w.plan_merge('text1', 'text2'))
1010
for state, line in p:
1012
self.log('%12s | %s' % (state, line[:-1]))
1016
mt.writelines(w.weave_merge(p))
1018
self.log(mt.getvalue())
1020
mp = map(addcrlf, mp)
1021
self.assertEqual(mt.readlines(), mp)
1024
def testOneInsert(self):
1030
def testSeparateInserts(self):
1031
self.doMerge(['aaa', 'bbb', 'ccc'],
1032
['aaa', 'xxx', 'bbb', 'ccc'],
1033
['aaa', 'bbb', 'yyy', 'ccc'],
1034
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1036
def testSameInsert(self):
1037
self.doMerge(['aaa', 'bbb', 'ccc'],
1038
['aaa', 'xxx', 'bbb', 'ccc'],
1039
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1040
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1041
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1042
def testOverlappedInsert(self):
1043
self.doMerge(['aaa', 'bbb'],
1044
['aaa', 'xxx', 'yyy', 'bbb'],
1045
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1047
# really it ought to reduce this to
1048
# ['aaa', 'xxx', 'yyy', 'bbb']
1051
def testClashReplace(self):
1052
self.doMerge(['aaa'],
1055
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1058
def testNonClashInsert1(self):
1059
self.doMerge(['aaa'],
1062
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1065
def testNonClashInsert2(self):
1066
self.doMerge(['aaa'],
1072
def testDeleteAndModify(self):
1073
"""Clashing delete and modification.
1075
If one side modifies a region and the other deletes it then
1076
there should be a conflict with one side blank.
1079
#######################################
1080
# skippd, not working yet
1083
self.doMerge(['aaa', 'bbb', 'ccc'],
1084
['aaa', 'ddd', 'ccc'],
1086
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1088
def _test_merge_from_strings(self, base, a, b, expected):
1090
w.add_lines('text0', [], base.splitlines(True))
1091
w.add_lines('text1', ['text0'], a.splitlines(True))
1092
w.add_lines('text2', ['text0'], b.splitlines(True))
1093
self.log('merge plan:')
1094
p = list(w.plan_merge('text1', 'text2'))
1095
for state, line in p:
1097
self.log('%12s | %s' % (state, line[:-1]))
1098
self.log('merge result:')
1099
result_text = ''.join(w.weave_merge(p))
1100
self.log(result_text)
1101
self.assertEqualDiff(result_text, expected)
1103
def test_weave_merge_conflicts(self):
1104
# does weave merge properly handle plans that end with unchanged?
1105
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1106
self.assertEqual(result, 'hello\n')
1108
def test_deletion_extended(self):
1109
"""One side deletes, the other deletes more.
1126
self._test_merge_from_strings(base, a, b, result)
1128
def test_deletion_overlap(self):
1129
"""Delete overlapping regions with no other conflict.
1131
Arguably it'd be better to treat these as agreement, rather than
1132
conflict, but for now conflict is safer.
1160
self._test_merge_from_strings(base, a, b, result)
1162
def test_agreement_deletion(self):
1163
"""Agree to delete some lines, without conflicts."""
1185
self._test_merge_from_strings(base, a, b, result)
1187
def test_sync_on_deletion(self):
1188
"""Specific case of merge where we can synchronize incorrectly.
1190
A previous version of the weave merge concluded that the two versions
1191
agreed on deleting line 2, and this could be a synchronization point.
1192
Line 1 was then considered in isolation, and thought to be deleted on
1195
It's better to consider the whole thing as a disagreement region.
1206
a's replacement line 2
1219
a's replacement line 2
1226
self._test_merge_from_strings(base, a, b, result)
1229
class TestKnitMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1231
def get_file(self, name='foo'):
1232
return KnitVersionedFile(name, get_transport(self.get_url('.')),
1233
delta=True, create=True)
1235
def log_contents(self, w):
1239
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1241
def get_file(self, name='foo'):
1242
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1244
def log_contents(self, w):
1245
self.log('weave is:')
1247
write_weave(w, tmpf)
1248
self.log(tmpf.getvalue())
1250
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1251
'xxx', '>>>>>>> ', 'bbb']
1254
class TestFormatSignatures(TestCaseWithMemoryTransport):
1256
def get_knit_file(self, name, annotated):
1258
factory = KnitAnnotateFactory()
1260
factory = KnitPlainFactory()
1261
return KnitVersionedFile(
1262
name, get_transport(self.get_url('.')), create=True,
1265
def test_knit_format_signatures(self):
1266
"""Different formats of knit have different signature strings."""
1267
knit = self.get_knit_file('a', True)
1268
self.assertEqual('knit-annotated', knit.get_format_signature())
1269
knit = self.get_knit_file('p', False)
1270
self.assertEqual('knit-plain', knit.get_format_signature())