1
# Copyright (C) 2005 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from StringIO import StringIO
32
from bzrlib.errors import (
34
RevisionAlreadyPresent,
37
from bzrlib.knit import KnitVersionedFile, \
39
from bzrlib.tests import TestCaseWithTransport
40
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
41
from bzrlib.trace import mutter
42
from bzrlib.transport import get_transport
43
from bzrlib.transport.memory import MemoryTransport
44
from bzrlib.tsort import topo_sort
45
import bzrlib.versionedfile as versionedfile
46
from bzrlib.weave import WeaveFile
47
from bzrlib.weavefile import read_weave, write_weave
50
class VersionedFileTestMixIn(object):
51
"""A mixin test class for testing VersionedFiles.
53
This is not an adaptor-style test at this point because
54
theres no dynamic substitution of versioned file implementations,
55
they are strictly controlled by their owning repositories.
60
f.add_lines('r0', [], ['a\n', 'b\n'])
61
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
63
versions = f.versions()
64
self.assertTrue('r0' in versions)
65
self.assertTrue('r1' in versions)
66
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
67
self.assertEquals(f.get_text('r0'), 'a\nb\n')
68
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
69
self.assertEqual(2, len(f))
70
self.assertEqual(2, f.num_versions())
72
self.assertRaises(RevisionNotPresent,
73
f.add_lines, 'r2', ['foo'], [])
74
self.assertRaises(RevisionAlreadyPresent,
75
f.add_lines, 'r1', [], [])
77
# this checks that reopen with create=True does not break anything.
78
f = self.reopen_file(create=True)
81
def test_adds_with_parent_texts(self):
84
parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
86
parent_texts['r1'] = f.add_lines_with_ghosts('r1',
89
parent_texts=parent_texts)
90
except NotImplementedError:
91
# if the format doesn't support ghosts, just add normally.
92
parent_texts['r1'] = f.add_lines('r1',
95
parent_texts=parent_texts)
96
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
97
self.assertNotEqual(None, parent_texts['r0'])
98
self.assertNotEqual(None, parent_texts['r1'])
100
versions = f.versions()
101
self.assertTrue('r0' in versions)
102
self.assertTrue('r1' in versions)
103
self.assertTrue('r2' in versions)
104
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
105
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
106
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
107
self.assertEqual(3, f.num_versions())
108
origins = f.annotate('r1')
109
self.assertEquals(origins[0][0], 'r0')
110
self.assertEquals(origins[1][0], 'r1')
111
origins = f.annotate('r2')
112
self.assertEquals(origins[0][0], 'r1')
113
self.assertEquals(origins[1][0], 'r2')
116
f = self.reopen_file()
119
def test_add_unicode_content(self):
120
# unicode content is not permitted in versioned files.
121
# versioned files version sequences of bytes only.
123
self.assertRaises(errors.BzrBadParameterUnicode,
124
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
126
(errors.BzrBadParameterUnicode, NotImplementedError),
127
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
129
def test_inline_newline_throws(self):
130
# \r characters are not permitted in lines being added
132
self.assertRaises(errors.BzrBadParameterContainsNewline,
133
vf.add_lines, 'a', [], ['a\n\n'])
135
(errors.BzrBadParameterContainsNewline, NotImplementedError),
136
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
137
# but inline CR's are allowed
138
vf.add_lines('a', [], ['a\r\n'])
140
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
141
except NotImplementedError:
144
def test_add_reserved(self):
146
self.assertRaises(errors.ReservedId,
147
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
149
self.assertRaises(errors.ReservedId,
150
vf.add_delta, 'a:', [], None, 'sha1', False, ((0, 0, 0, []),))
152
def test_get_reserved(self):
154
self.assertRaises(errors.ReservedId, vf.get_delta, 'b:')
155
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
156
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
157
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
159
def test_get_delta(self):
161
sha1s = self._setup_for_deltas(f)
162
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
163
[(0, 0, 1, [('base', 'line\n')])])
164
self.assertEqual(expected_delta, f.get_delta('base'))
166
text_name = 'chain1-'
167
for depth in range(26):
168
new_version = text_name + '%s' % depth
169
expected_delta = (next_parent, sha1s[depth],
171
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
172
self.assertEqual(expected_delta, f.get_delta(new_version))
173
next_parent = new_version
175
text_name = 'chain2-'
176
for depth in range(26):
177
new_version = text_name + '%s' % depth
178
expected_delta = (next_parent, sha1s[depth], False,
179
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
180
self.assertEqual(expected_delta, f.get_delta(new_version))
181
next_parent = new_version
182
# smoke test for eol support
183
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
184
self.assertEqual(['line'], f.get_lines('noeol'))
185
self.assertEqual(expected_delta, f.get_delta('noeol'))
187
def test_get_deltas(self):
189
sha1s = self._setup_for_deltas(f)
190
deltas = f.get_deltas(f.versions())
191
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
192
[(0, 0, 1, [('base', 'line\n')])])
193
self.assertEqual(expected_delta, deltas['base'])
195
text_name = 'chain1-'
196
for depth in range(26):
197
new_version = text_name + '%s' % depth
198
expected_delta = (next_parent, sha1s[depth],
200
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
201
self.assertEqual(expected_delta, deltas[new_version])
202
next_parent = new_version
204
text_name = 'chain2-'
205
for depth in range(26):
206
new_version = text_name + '%s' % depth
207
expected_delta = (next_parent, sha1s[depth], False,
208
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
209
self.assertEqual(expected_delta, deltas[new_version])
210
next_parent = new_version
211
# smoke tests for eol support
212
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
213
self.assertEqual(['line'], f.get_lines('noeol'))
214
self.assertEqual(expected_delta, deltas['noeol'])
215
# smoke tests for eol support - two noeol in a row same content
216
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
217
[(0, 1, 2, [('noeolsecond', 'line\n'), ('noeolsecond', 'line\n')])]),
218
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
219
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))
220
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))
221
self.assertTrue(deltas['noeolsecond'] in expected_deltas)
222
# two no-eol in a row, different content
223
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,
224
[(1, 2, 1, [('noeolnotshared', 'phone\n')])])
225
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))
226
self.assertEqual(expected_delta, deltas['noeolnotshared'])
227
# eol folling a no-eol with content change
228
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,
229
[(0, 1, 1, [('eol', 'phone\n')])])
230
self.assertEqual(['phone\n'], f.get_lines('eol'))
231
self.assertEqual(expected_delta, deltas['eol'])
232
# eol folling a no-eol with content change
233
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
234
[(0, 1, 1, [('eolline', 'line\n')])])
235
self.assertEqual(['line\n'], f.get_lines('eolline'))
236
self.assertEqual(expected_delta, deltas['eolline'])
237
# eol with no parents
238
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
239
[(0, 0, 1, [('noeolbase', 'line\n')])])
240
self.assertEqual(['line'], f.get_lines('noeolbase'))
241
self.assertEqual(expected_delta, deltas['noeolbase'])
242
# eol with two parents, in inverse insertion order
243
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
244
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]),
245
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
246
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]))
247
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))
248
#self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
250
def test_make_mpdiffs(self):
251
from bzrlib import multiparent
252
vf = self.get_file('foo')
253
sha1s = self._setup_for_deltas(vf)
254
new_vf = self.get_file('bar')
255
for version in multiparent.topo_iter(vf):
256
mpdiff = vf.make_mpdiffs([version])[0]
257
new_vf.add_mpdiffs([(version, vf.get_parents(version),
258
vf.get_sha1(version), mpdiff)])
259
self.assertEqualDiff(vf.get_text(version),
260
new_vf.get_text(version))
262
def _setup_for_deltas(self, f):
263
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')
264
# add texts that should trip the knit maximum delta chain threshold
265
# as well as doing parallel chains of data in knits.
266
# this is done by two chains of 25 insertions
267
f.add_lines('base', [], ['line\n'])
268
f.add_lines('noeol', ['base'], ['line'])
269
# detailed eol tests:
270
# shared last line with parent no-eol
271
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
272
# differing last line with parent, both no-eol
273
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
274
# add eol following a noneol parent, change content
275
f.add_lines('eol', ['noeol'], ['phone\n'])
276
# add eol following a noneol parent, no change content
277
f.add_lines('eolline', ['noeol'], ['line\n'])
278
# noeol with no parents:
279
f.add_lines('noeolbase', [], ['line'])
280
# noeol preceeding its leftmost parent in the output:
281
# this is done by making it a merge of two parents with no common
282
# anestry: noeolbase and noeol with the
283
# later-inserted parent the leftmost.
284
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
285
# two identical eol texts
286
f.add_lines('noeoldup', ['noeol'], ['line'])
288
text_name = 'chain1-'
290
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
291
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
292
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
293
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
294
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
295
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
296
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
297
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
298
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
299
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
300
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
301
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
302
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
303
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
304
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
305
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
306
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
307
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
308
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
309
19:'1ebed371807ba5935958ad0884595126e8c4e823',
310
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
311
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
312
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
313
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
314
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
315
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
317
for depth in range(26):
318
new_version = text_name + '%s' % depth
319
text = text + ['line\n']
320
f.add_lines(new_version, [next_parent], text)
321
next_parent = new_version
323
text_name = 'chain2-'
325
for depth in range(26):
326
new_version = text_name + '%s' % depth
327
text = text + ['line\n']
328
f.add_lines(new_version, [next_parent], text)
329
next_parent = new_version
332
def test_add_delta(self):
333
# tests for the add-delta facility.
334
# at this point, optimising for speed, we assume no checks when deltas are inserted.
335
# this may need to be revisited.
336
source = self.get_file('source')
337
source.add_lines('base', [], ['line\n'])
339
text_name = 'chain1-'
341
for depth in range(26):
342
new_version = text_name + '%s' % depth
343
text = text + ['line\n']
344
source.add_lines(new_version, [next_parent], text)
345
next_parent = new_version
347
text_name = 'chain2-'
349
for depth in range(26):
350
new_version = text_name + '%s' % depth
351
text = text + ['line\n']
352
source.add_lines(new_version, [next_parent], text)
353
next_parent = new_version
354
source.add_lines('noeol', ['base'], ['line'])
356
target = self.get_file('target')
357
for version in source.versions():
358
parent, sha1, noeol, delta = source.get_delta(version)
359
target.add_delta(version,
360
source.get_parents(version),
365
self.assertRaises(RevisionAlreadyPresent,
366
target.add_delta, 'base', [], None, '', False, [])
367
for version in source.versions():
368
self.assertEqual(source.get_lines(version),
369
target.get_lines(version))
371
def test_ancestry(self):
373
self.assertEqual([], f.get_ancestry([]))
374
f.add_lines('r0', [], ['a\n', 'b\n'])
375
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
376
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
377
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
378
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
379
self.assertEqual([], f.get_ancestry([]))
380
versions = f.get_ancestry(['rM'])
381
# there are some possibilities:
385
# so we check indexes
386
r0 = versions.index('r0')
387
r1 = versions.index('r1')
388
r2 = versions.index('r2')
389
self.assertFalse('r3' in versions)
390
rM = versions.index('rM')
391
self.assertTrue(r0 < r1)
392
self.assertTrue(r0 < r2)
393
self.assertTrue(r1 < rM)
394
self.assertTrue(r2 < rM)
396
self.assertRaises(RevisionNotPresent,
397
f.get_ancestry, ['rM', 'rX'])
399
self.assertEqual(set(f.get_ancestry('rM')),
400
set(f.get_ancestry('rM', topo_sorted=False)))
402
def test_mutate_after_finish(self):
404
f.transaction_finished()
405
self.assertRaises(errors.OutSideTransaction, f.add_delta, '', [], '', '', False, [])
406
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
407
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
408
self.assertRaises(errors.OutSideTransaction, f.fix_parents, '', [])
409
self.assertRaises(errors.OutSideTransaction, f.join, '')
410
self.assertRaises(errors.OutSideTransaction, f.clone_text, 'base', 'bar', ['foo'])
412
def test_clear_cache(self):
414
# on a new file it should not error
416
# and after adding content, doing a clear_cache and a get should work.
417
f.add_lines('0', [], ['a'])
419
self.assertEqual(['a'], f.get_lines('0'))
421
def test_clone_text(self):
423
f.add_lines('r0', [], ['a\n', 'b\n'])
424
f.clone_text('r1', 'r0', ['r0'])
426
self.assertEquals(f.get_lines('r1'), f.get_lines('r0'))
427
self.assertEquals(f.get_lines('r1'), ['a\n', 'b\n'])
428
self.assertEquals(f.get_parents('r1'), ['r0'])
430
self.assertRaises(RevisionNotPresent,
431
f.clone_text, 'r2', 'rX', [])
432
self.assertRaises(RevisionAlreadyPresent,
433
f.clone_text, 'r1', 'r0', [])
435
verify_file(self.reopen_file())
437
def test_create_empty(self):
439
f.add_lines('0', [], ['a\n'])
440
new_f = f.create_empty('t', MemoryTransport())
441
# smoke test, specific types should check it is honoured correctly for
442
# non type attributes
443
self.assertEqual([], new_f.versions())
444
self.assertTrue(isinstance(new_f, f.__class__))
446
def test_copy_to(self):
448
f.add_lines('0', [], ['a\n'])
449
t = MemoryTransport()
451
for suffix in f.__class__.get_suffixes():
452
self.assertTrue(t.has('foo' + suffix))
454
def test_get_suffixes(self):
457
self.assertEqual(f.__class__.get_suffixes(), f.__class__.get_suffixes())
458
# and should be a list
459
self.assertTrue(isinstance(f.__class__.get_suffixes(), list))
461
def build_graph(self, file, graph):
462
for node in topo_sort(graph.items()):
463
file.add_lines(node, graph[node], [])
465
def test_get_graph(self):
471
self.build_graph(f, graph)
472
self.assertEqual(graph, f.get_graph())
474
def test_get_graph_partial(self):
482
complex_graph.update(simple_a)
487
complex_graph.update(simple_b)
494
complex_graph.update(simple_gam)
496
simple_b_gam.update(simple_gam)
497
simple_b_gam.update(simple_b)
498
self.build_graph(f, complex_graph)
499
self.assertEqual(simple_a, f.get_graph(['a']))
500
self.assertEqual(simple_b, f.get_graph(['b']))
501
self.assertEqual(simple_gam, f.get_graph(['gam']))
502
self.assertEqual(simple_b_gam, f.get_graph(['b', 'gam']))
504
def test_get_parents(self):
506
f.add_lines('r0', [], ['a\n', 'b\n'])
507
f.add_lines('r1', [], ['a\n', 'b\n'])
508
f.add_lines('r2', [], ['a\n', 'b\n'])
509
f.add_lines('r3', [], ['a\n', 'b\n'])
510
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
511
self.assertEquals(f.get_parents('m'), ['r0', 'r1', 'r2', 'r3'])
513
self.assertRaises(RevisionNotPresent,
516
def test_annotate(self):
518
f.add_lines('r0', [], ['a\n', 'b\n'])
519
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
520
origins = f.annotate('r1')
521
self.assertEquals(origins[0][0], 'r1')
522
self.assertEquals(origins[1][0], 'r0')
524
self.assertRaises(RevisionNotPresent,
528
# tests that walk returns all the inclusions for the requested
529
# revisions as well as the revisions changes themselves.
530
f = self.get_file('1')
531
f.add_lines('r0', [], ['a\n', 'b\n'])
532
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
533
f.add_lines('rX', ['r1'], ['d\n', 'b\n'])
534
f.add_lines('rY', ['r1'], ['c\n', 'e\n'])
537
for lineno, insert, dset, text in f.walk(['rX', 'rY']):
538
lines[text] = (insert, dset)
540
self.assertTrue(lines['a\n'], ('r0', set(['r1'])))
541
self.assertTrue(lines['b\n'], ('r0', set(['rY'])))
542
self.assertTrue(lines['c\n'], ('r1', set(['rX'])))
543
self.assertTrue(lines['d\n'], ('rX', set([])))
544
self.assertTrue(lines['e\n'], ('rY', set([])))
546
def test_detection(self):
547
# Test weaves detect corruption.
549
# Weaves contain a checksum of their texts.
550
# When a text is extracted, this checksum should be
553
w = self.get_file_corrupted_text()
555
self.assertEqual('hello\n', w.get_text('v1'))
556
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
557
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
558
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
560
w = self.get_file_corrupted_checksum()
562
self.assertEqual('hello\n', w.get_text('v1'))
563
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
564
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
565
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
567
def get_file_corrupted_text(self):
568
"""Return a versioned file with corrupt text but valid metadata."""
569
raise NotImplementedError(self.get_file_corrupted_text)
571
def reopen_file(self, name='foo'):
572
"""Open the versioned file from disk again."""
573
raise NotImplementedError(self.reopen_file)
575
def test_iter_lines_added_or_present_in_versions(self):
576
# test that we get at least an equalset of the lines added by
577
# versions in the weave
578
# the ordering here is to make a tree so that dumb searches have
579
# more changes to muck up.
581
class InstrumentedProgress(progress.DummyProgress):
585
progress.DummyProgress.__init__(self)
588
def update(self, msg=None, current=None, total=None):
589
self.updates.append((msg, current, total))
592
# add a base to get included
593
vf.add_lines('base', [], ['base\n'])
594
# add a ancestor to be included on one side
595
vf.add_lines('lancestor', [], ['lancestor\n'])
596
# add a ancestor to be included on the other side
597
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
598
# add a child of rancestor with no eofile-nl
599
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
600
# add a child of lancestor and base to join the two roots
601
vf.add_lines('otherchild',
602
['lancestor', 'base'],
603
['base\n', 'lancestor\n', 'otherchild\n'])
604
def iter_with_versions(versions, expected):
605
# now we need to see what lines are returned, and how often.
612
progress = InstrumentedProgress()
613
# iterate over the lines
614
for line in vf.iter_lines_added_or_present_in_versions(versions,
617
if []!= progress.updates:
618
self.assertEqual(expected, progress.updates)
620
lines = iter_with_versions(['child', 'otherchild'],
621
[('Walking content.', 0, 2),
622
('Walking content.', 1, 2),
623
('Walking content.', 2, 2)])
624
# we must see child and otherchild
625
self.assertTrue(lines['child\n'] > 0)
626
self.assertTrue(lines['otherchild\n'] > 0)
627
# we dont care if we got more than that.
630
lines = iter_with_versions(None, [('Walking content.', 0, 5),
631
('Walking content.', 1, 5),
632
('Walking content.', 2, 5),
633
('Walking content.', 3, 5),
634
('Walking content.', 4, 5),
635
('Walking content.', 5, 5)])
636
# all lines must be seen at least once
637
self.assertTrue(lines['base\n'] > 0)
638
self.assertTrue(lines['lancestor\n'] > 0)
639
self.assertTrue(lines['rancestor\n'] > 0)
640
self.assertTrue(lines['child\n'] > 0)
641
self.assertTrue(lines['otherchild\n'] > 0)
643
def test_fix_parents(self):
644
# some versioned files allow incorrect parents to be corrected after
645
# insertion - this may not fix ancestry..
646
# if they do not supported, they just do not implement it.
647
# we test this as an interface test to ensure that those that *do*
648
# implementent it get it right.
650
vf.add_lines('notbase', [], [])
651
vf.add_lines('base', [], [])
653
vf.fix_parents('notbase', ['base'])
654
except NotImplementedError:
656
self.assertEqual(['base'], vf.get_parents('notbase'))
657
# open again, check it stuck.
659
self.assertEqual(['base'], vf.get_parents('notbase'))
661
def test_fix_parents_with_ghosts(self):
662
# when fixing parents, ghosts that are listed should not be ghosts
667
vf.add_lines_with_ghosts('notbase', ['base', 'stillghost'], [])
668
except NotImplementedError:
670
vf.add_lines('base', [], [])
671
vf.fix_parents('notbase', ['base', 'stillghost'])
672
self.assertEqual(['base'], vf.get_parents('notbase'))
673
# open again, check it stuck.
675
self.assertEqual(['base'], vf.get_parents('notbase'))
676
# and check the ghosts
677
self.assertEqual(['base', 'stillghost'],
678
vf.get_parents_with_ghosts('notbase'))
680
def test_add_lines_with_ghosts(self):
681
# some versioned file formats allow lines to be added with parent
682
# information that is > than that in the format. Formats that do
683
# not support this need to raise NotImplementedError on the
684
# add_lines_with_ghosts api.
686
# add a revision with ghost parents
687
# The preferred form is utf8, but we should translate when needed
688
parent_id_unicode = u'b\xbfse'
689
parent_id_utf8 = parent_id_unicode.encode('utf8')
691
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
692
except NotImplementedError:
693
# check the other ghost apis are also not implemented
694
self.assertRaises(NotImplementedError, vf.has_ghost, 'foo')
695
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
696
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
697
self.assertRaises(NotImplementedError, vf.get_graph_with_ghosts)
699
vf = self.reopen_file()
700
# test key graph related apis: getncestry, _graph, get_parents
702
# - these are ghost unaware and must not be reflect ghosts
703
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
704
self.assertEqual([], vf.get_parents('notbxbfse'))
705
self.assertEqual({'notbxbfse':[]}, vf.get_graph())
706
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],
707
vf.has_version, parent_id_unicode))
708
self.assertFalse(vf.has_version(parent_id_utf8))
709
# we have _with_ghost apis to give us ghost information.
710
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
711
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
712
self.assertEqual({'notbxbfse':[parent_id_utf8]}, vf.get_graph_with_ghosts())
713
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],
714
vf.has_ghost, parent_id_unicode))
715
self.assertTrue(vf.has_ghost(parent_id_utf8))
716
# if we add something that is a ghost of another, it should correct the
717
# results of the prior apis
718
self.callDeprecated([osutils._revision_id_warning],
719
vf.add_lines, parent_id_unicode, [], [])
720
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
721
self.assertEqual([parent_id_utf8], vf.get_parents('notbxbfse'))
722
self.assertEqual({parent_id_utf8:[],
723
'notbxbfse':[parent_id_utf8],
726
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],
727
vf.has_version, parent_id_unicode))
728
self.assertTrue(vf.has_version(parent_id_utf8))
729
# we have _with_ghost apis to give us ghost information.
730
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
731
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
732
self.assertEqual({parent_id_utf8:[],
733
'notbxbfse':[parent_id_utf8],
735
vf.get_graph_with_ghosts())
736
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],
737
vf.has_ghost, parent_id_unicode))
738
self.assertFalse(vf.has_ghost(parent_id_utf8))
740
def test_add_lines_with_ghosts_after_normal_revs(self):
741
# some versioned file formats allow lines to be added with parent
742
# information that is > than that in the format. Formats that do
743
# not support this need to raise NotImplementedError on the
744
# add_lines_with_ghosts api.
746
# probe for ghost support
749
except NotImplementedError:
751
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
752
vf.add_lines_with_ghosts('references_ghost',
754
['line\n', 'line_b\n', 'line_c\n'])
755
origins = vf.annotate('references_ghost')
756
self.assertEquals(('base', 'line\n'), origins[0])
757
self.assertEquals(('base', 'line_b\n'), origins[1])
758
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
760
def test_readonly_mode(self):
761
transport = get_transport(self.get_url('.'))
762
factory = self.get_factory()
763
vf = factory('id', transport, 0777, create=True, access_mode='w')
764
vf = factory('id', transport, access_mode='r')
765
self.assertRaises(errors.ReadOnlyError, vf.add_delta, '', [], '', '', False, [])
766
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
767
self.assertRaises(errors.ReadOnlyError,
768
vf.add_lines_with_ghosts,
772
self.assertRaises(errors.ReadOnlyError, vf.fix_parents, 'base', [])
773
self.assertRaises(errors.ReadOnlyError, vf.join, 'base')
774
self.assertRaises(errors.ReadOnlyError, vf.clone_text, 'base', 'bar', ['foo'])
776
def test_get_sha1(self):
777
# check the sha1 data is available
780
vf.add_lines('a', [], ['a\n'])
781
# the same file, different metadata
782
vf.add_lines('b', ['a'], ['a\n'])
783
# a file differing only in last newline.
784
vf.add_lines('c', [], ['a'])
786
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('a'))
788
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('b'))
790
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', vf.get_sha1('c'))
792
self.assertEqual(['3f786850e387550fdab836ed7e6dc881de23001b',
793
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
794
'3f786850e387550fdab836ed7e6dc881de23001b'],
795
vf.get_sha1s(['a', 'c', 'b']))
798
class TestWeave(TestCaseWithTransport, VersionedFileTestMixIn):
800
def get_file(self, name='foo'):
801
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
803
def get_file_corrupted_text(self):
804
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True)
805
w.add_lines('v1', [], ['hello\n'])
806
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
808
# We are going to invasively corrupt the text
809
# Make sure the internals of weave are the same
810
self.assertEqual([('{', 0)
818
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
819
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
824
w._weave[4] = 'There\n'
827
def get_file_corrupted_checksum(self):
828
w = self.get_file_corrupted_text()
830
w._weave[4] = 'there\n'
831
self.assertEqual('hello\nthere\n', w.get_text('v2'))
833
#Invalid checksum, first digit changed
834
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
837
def reopen_file(self, name='foo', create=False):
838
return WeaveFile(name, get_transport(self.get_url('.')), create=create)
840
def test_no_implicit_create(self):
841
self.assertRaises(errors.NoSuchFile,
844
get_transport(self.get_url('.')))
846
def get_factory(self):
850
class TestKnit(TestCaseWithTransport, VersionedFileTestMixIn):
852
def get_file(self, name='foo'):
853
return KnitVersionedFile(name, get_transport(self.get_url('.')),
854
delta=True, create=True)
856
def get_factory(self):
857
return KnitVersionedFile
859
def get_file_corrupted_text(self):
860
knit = self.get_file()
861
knit.add_lines('v1', [], ['hello\n'])
862
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
865
def reopen_file(self, name='foo', create=False):
866
return KnitVersionedFile(name, get_transport(self.get_url('.')),
870
def test_detection(self):
871
knit = self.get_file()
874
def test_no_implicit_create(self):
875
self.assertRaises(errors.NoSuchFile,
878
get_transport(self.get_url('.')))
881
class InterString(versionedfile.InterVersionedFile):
882
"""An inter-versionedfile optimised code path for strings.
884
This is for use during testing where we use strings as versionedfiles
885
so that none of the default regsitered interversionedfile classes will
886
match - which lets us test the match logic.
890
def is_compatible(source, target):
891
"""InterString is compatible with strings-as-versionedfiles."""
892
return isinstance(source, str) and isinstance(target, str)
895
# TODO this and the InterRepository core logic should be consolidatable
896
# if we make the registry a separate class though we still need to
897
# test the behaviour in the active registry to catch failure-to-handle-
899
class TestInterVersionedFile(TestCaseWithTransport):
901
def test_get_default_inter_versionedfile(self):
902
# test that the InterVersionedFile.get(a, b) probes
903
# for a class where is_compatible(a, b) returns
904
# true and returns a default interversionedfile otherwise.
905
# This also tests that the default registered optimised interversionedfile
906
# classes do not barf inappropriately when a surprising versionedfile type
908
dummy_a = "VersionedFile 1."
909
dummy_b = "VersionedFile 2."
910
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
912
def assertGetsDefaultInterVersionedFile(self, a, b):
913
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
914
inter = versionedfile.InterVersionedFile.get(a, b)
915
self.assertEqual(versionedfile.InterVersionedFile,
917
self.assertEqual(a, inter.source)
918
self.assertEqual(b, inter.target)
920
def test_register_inter_versionedfile_class(self):
921
# test that a optimised code path provider - a
922
# InterVersionedFile subclass can be registered and unregistered
923
# and that it is correctly selected when given a versionedfile
924
# pair that it returns true on for the is_compatible static method
926
dummy_a = "VersionedFile 1."
927
dummy_b = "VersionedFile 2."
928
versionedfile.InterVersionedFile.register_optimiser(InterString)
930
# we should get the default for something InterString returns False
932
self.assertFalse(InterString.is_compatible(dummy_a, None))
933
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
934
# and we should get an InterString for a pair it 'likes'
935
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
936
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
937
self.assertEqual(InterString, inter.__class__)
938
self.assertEqual(dummy_a, inter.source)
939
self.assertEqual(dummy_b, inter.target)
941
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
942
# now we should get the default InterVersionedFile object again.
943
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
946
class TestReadonlyHttpMixin(object):
948
def test_readonly_http_works(self):
949
# we should be able to read from http with a versioned file.
951
# try an empty file access
952
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
953
self.assertEqual([], readonly_vf.versions())
955
vf.add_lines('1', [], ['a\n'])
956
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
957
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
958
self.assertEqual(['1', '2'], vf.versions())
959
for version in readonly_vf.versions():
960
readonly_vf.get_lines(version)
963
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
966
return WeaveFile('foo', get_transport(self.get_url('.')), create=True)
968
def get_factory(self):
972
class TestKnitHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
975
return KnitVersionedFile('foo', get_transport(self.get_url('.')),
976
delta=True, create=True)
978
def get_factory(self):
979
return KnitVersionedFile
982
class MergeCasesMixin(object):
984
def doMerge(self, base, a, b, mp):
985
from cStringIO import StringIO
986
from textwrap import dedent
992
w.add_lines('text0', [], map(addcrlf, base))
993
w.add_lines('text1', ['text0'], map(addcrlf, a))
994
w.add_lines('text2', ['text0'], map(addcrlf, b))
998
self.log('merge plan:')
999
p = list(w.plan_merge('text1', 'text2'))
1000
for state, line in p:
1002
self.log('%12s | %s' % (state, line[:-1]))
1006
mt.writelines(w.weave_merge(p))
1008
self.log(mt.getvalue())
1010
mp = map(addcrlf, mp)
1011
self.assertEqual(mt.readlines(), mp)
1014
def testOneInsert(self):
1020
def testSeparateInserts(self):
1021
self.doMerge(['aaa', 'bbb', 'ccc'],
1022
['aaa', 'xxx', 'bbb', 'ccc'],
1023
['aaa', 'bbb', 'yyy', 'ccc'],
1024
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1026
def testSameInsert(self):
1027
self.doMerge(['aaa', 'bbb', 'ccc'],
1028
['aaa', 'xxx', 'bbb', 'ccc'],
1029
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1030
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1031
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1032
def testOverlappedInsert(self):
1033
self.doMerge(['aaa', 'bbb'],
1034
['aaa', 'xxx', 'yyy', 'bbb'],
1035
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1037
# really it ought to reduce this to
1038
# ['aaa', 'xxx', 'yyy', 'bbb']
1041
def testClashReplace(self):
1042
self.doMerge(['aaa'],
1045
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1048
def testNonClashInsert1(self):
1049
self.doMerge(['aaa'],
1052
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1055
def testNonClashInsert2(self):
1056
self.doMerge(['aaa'],
1062
def testDeleteAndModify(self):
1063
"""Clashing delete and modification.
1065
If one side modifies a region and the other deletes it then
1066
there should be a conflict with one side blank.
1069
#######################################
1070
# skippd, not working yet
1073
self.doMerge(['aaa', 'bbb', 'ccc'],
1074
['aaa', 'ddd', 'ccc'],
1076
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1078
def _test_merge_from_strings(self, base, a, b, expected):
1080
w.add_lines('text0', [], base.splitlines(True))
1081
w.add_lines('text1', ['text0'], a.splitlines(True))
1082
w.add_lines('text2', ['text0'], b.splitlines(True))
1083
self.log('merge plan:')
1084
p = list(w.plan_merge('text1', 'text2'))
1085
for state, line in p:
1087
self.log('%12s | %s' % (state, line[:-1]))
1088
self.log('merge result:')
1089
result_text = ''.join(w.weave_merge(p))
1090
self.log(result_text)
1091
self.assertEqualDiff(result_text, expected)
1093
def test_weave_merge_conflicts(self):
1094
# does weave merge properly handle plans that end with unchanged?
1095
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1096
self.assertEqual(result, 'hello\n')
1098
def test_deletion_extended(self):
1099
"""One side deletes, the other deletes more.
1116
self._test_merge_from_strings(base, a, b, result)
1118
def test_deletion_overlap(self):
1119
"""Delete overlapping regions with no other conflict.
1121
Arguably it'd be better to treat these as agreement, rather than
1122
conflict, but for now conflict is safer.
1150
self._test_merge_from_strings(base, a, b, result)
1152
def test_agreement_deletion(self):
1153
"""Agree to delete some lines, without conflicts."""
1175
self._test_merge_from_strings(base, a, b, result)
1177
def test_sync_on_deletion(self):
1178
"""Specific case of merge where we can synchronize incorrectly.
1180
A previous version of the weave merge concluded that the two versions
1181
agreed on deleting line 2, and this could be a synchronization point.
1182
Line 1 was then considered in isolation, and thought to be deleted on
1185
It's better to consider the whole thing as a disagreement region.
1196
a's replacement line 2
1209
a's replacement line 2
1216
self._test_merge_from_strings(base, a, b, result)
1219
class TestKnitMerge(TestCaseWithTransport, MergeCasesMixin):
1221
def get_file(self, name='foo'):
1222
return KnitVersionedFile(name, get_transport(self.get_url('.')),
1223
delta=True, create=True)
1225
def log_contents(self, w):
1229
class TestWeaveMerge(TestCaseWithTransport, MergeCasesMixin):
1231
def get_file(self, name='foo'):
1232
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1234
def log_contents(self, w):
1235
self.log('weave is:')
1237
write_weave(w, tmpf)
1238
self.log(tmpf.getvalue())
1240
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1241
'xxx', '>>>>>>> ', 'bbb']