21
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
24
from gzip import GzipFile
29
29
graph as _mod_graph,
36
from bzrlib.errors import (
38
RevisionAlreadyPresent,
41
from bzrlib.knit import (
39
from ..errors import (
41
RevisionAlreadyPresent,
43
from ..bzr.knit import (
48
from bzrlib.tests import (
48
from ..sixish import (
50
54
TestCaseWithMemoryTransport,
54
split_suite_by_condition,
57
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
from bzrlib.trace import mutter
59
from bzrlib.transport import get_transport
60
from bzrlib.transport.memory import MemoryTransport
61
from bzrlib.tsort import topo_sort
62
from bzrlib.tuned_gzip import GzipFile
63
import bzrlib.versionedfile as versionedfile
64
from bzrlib.versionedfile import (
58
from .http_utils import TestCaseWithWebserver
59
from ..transport.memory import MemoryTransport
60
from ..bzr import versionedfile as versionedfile
61
from ..bzr.versionedfile import (
66
63
HashEscapedPrefixMapper,
68
65
VirtualVersionedFiles,
69
66
make_versioned_files_factory,
71
from bzrlib.weave import WeaveFile
72
from bzrlib.weavefile import read_weave, write_weave
75
def load_tests(standard_tests, module, loader):
76
"""Parameterize VersionedFiles tests for different implementations."""
77
to_adapt, result = split_suite_by_condition(
78
standard_tests, condition_isinstance(TestVersionedFiles))
79
# We want to be sure of behaviour for:
80
# weaves prefix layout (weave texts)
81
# individually named weaves (weave inventories)
82
# annotated knits - prefix|hash|hash-escape layout, we test the third only
83
# as it is the most complex mapper.
84
# individually named knits
85
# individual no-graph knits in packs (signatures)
86
# individual graph knits in packs (inventories)
87
# individual graph nocompression knits in packs (revisions)
88
# plain text knits in packs (texts)
92
'factory':make_versioned_files_factory(WeaveFile,
93
ConstantMapper('inventory')),
96
'support_partial_insertion': False,
100
'factory':make_file_factory(False, ConstantMapper('revisions')),
103
'support_partial_insertion': False,
105
('named-nograph-nodelta-knit-pack', {
106
'cleanup':cleanup_pack_knit,
107
'factory':make_pack_factory(False, False, 1),
110
'support_partial_insertion': False,
112
('named-graph-knit-pack', {
113
'cleanup':cleanup_pack_knit,
114
'factory':make_pack_factory(True, True, 1),
117
'support_partial_insertion': True,
119
('named-graph-nodelta-knit-pack', {
120
'cleanup':cleanup_pack_knit,
121
'factory':make_pack_factory(True, False, 1),
124
'support_partial_insertion': False,
126
('groupcompress-nograph', {
127
'cleanup':groupcompress.cleanup_pack_group,
128
'factory':groupcompress.make_pack_factory(False, False, 1),
131
'support_partial_insertion':False,
134
len_two_scenarios = [
137
'factory':make_versioned_files_factory(WeaveFile,
141
'support_partial_insertion': False,
143
('annotated-knit-escape', {
145
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
148
'support_partial_insertion': False,
150
('plain-knit-pack', {
151
'cleanup':cleanup_pack_knit,
152
'factory':make_pack_factory(True, True, 2),
155
'support_partial_insertion': True,
158
'cleanup':groupcompress.cleanup_pack_group,
159
'factory':groupcompress.make_pack_factory(True, False, 1),
162
'support_partial_insertion':False,
165
scenarios = len_one_scenarios + len_two_scenarios
166
return multiply_tests(to_adapt, scenarios, result)
68
from ..bzr.weave import (
72
from ..bzr.weavefile import write_weave
73
from .scenarios import load_tests_apply_scenarios
76
load_tests = load_tests_apply_scenarios
169
79
def get_diamond_vf(f, trailing_eol=True, left_only=False):
172
82
:param trailing_eol: If True end the last line with \n.
176
'base': (('origin',),),
177
'left': (('base',),),
178
'right': (('base',),),
179
'merged': (('left',), ('right',)),
86
b'base': ((b'origin',),),
87
b'left': ((b'base',),),
88
b'right': ((b'base',),),
89
b'merged': ((b'left',), (b'right',)),
181
91
# insert a diamond graph to exercise deltas and merges.
186
f.add_lines('origin', [], ['origin' + last_char])
187
f.add_lines('base', ['origin'], ['base' + last_char])
188
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
96
f.add_lines(b'origin', [], [b'origin' + last_char])
97
f.add_lines(b'base', [b'origin'], [b'base' + last_char])
98
f.add_lines(b'left', [b'base'], [b'base\n', b'left' + last_char])
190
f.add_lines('right', ['base'],
191
['base\n', 'right' + last_char])
192
f.add_lines('merged', ['left', 'right'],
193
['base\n', 'left\n', 'right\n', 'merged' + last_char])
100
f.add_lines(b'right', [b'base'],
101
[b'base\n', b'right' + last_char])
102
f.add_lines(b'merged', [b'left', b'right'],
103
[b'base\n', b'left\n', b'right\n', b'merged' + last_char])
194
104
return f, parents
238
148
# we loop over each key because that spreads the inserts across prefixes,
239
149
# which is how commit operates.
240
150
for prefix in prefixes:
241
result.append(files.add_lines(prefix + get_key('origin'), (),
242
['origin' + last_char]))
243
for prefix in prefixes:
244
result.append(files.add_lines(prefix + get_key('base'),
245
get_parents([('origin',)]), ['base' + last_char]))
246
for prefix in prefixes:
247
result.append(files.add_lines(prefix + get_key('left'),
248
get_parents([('base',)]),
249
['base\n', 'left' + last_char]))
151
result.append(files.add_lines(prefix + get_key(b'origin'), (),
152
[b'origin' + last_char]))
153
for prefix in prefixes:
154
result.append(files.add_lines(prefix + get_key(b'base'),
155
get_parents([(b'origin',)]), [b'base' + last_char]))
156
for prefix in prefixes:
157
result.append(files.add_lines(prefix + get_key(b'left'),
158
get_parents([(b'base',)]),
159
[b'base\n', b'left' + last_char]))
250
160
if not left_only:
251
161
for prefix in prefixes:
252
result.append(files.add_lines(prefix + get_key('right'),
253
get_parents([('base',)]),
254
['base\n', 'right' + last_char]))
162
result.append(files.add_lines(prefix + get_key(b'right'),
163
get_parents([(b'base',)]),
164
[b'base\n', b'right' + last_char]))
255
165
for prefix in prefixes:
256
result.append(files.add_lines(prefix + get_key('merged'),
257
get_parents([('left',), ('right',)]),
258
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
166
result.append(files.add_lines(prefix + get_key(b'merged'),
167
get_parents([(b'left',), (b'right',)]),
168
[b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
275
185
def test_add(self):
276
186
f = self.get_file()
277
f.add_lines('r0', [], ['a\n', 'b\n'])
278
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
187
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
188
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
279
189
def verify_file(f):
280
190
versions = f.versions()
281
self.assertTrue('r0' in versions)
282
self.assertTrue('r1' in versions)
283
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
284
self.assertEquals(f.get_text('r0'), 'a\nb\n')
285
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
191
self.assertTrue(b'r0' in versions)
192
self.assertTrue(b'r1' in versions)
193
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
194
self.assertEqual(f.get_text(b'r0'), b'a\nb\n')
195
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
286
196
self.assertEqual(2, len(f))
287
197
self.assertEqual(2, f.num_versions())
289
199
self.assertRaises(RevisionNotPresent,
290
f.add_lines, 'r2', ['foo'], [])
200
f.add_lines, b'r2', [b'foo'], [])
291
201
self.assertRaises(RevisionAlreadyPresent,
292
f.add_lines, 'r1', [], [])
202
f.add_lines, b'r1', [], [])
294
204
# this checks that reopen with create=True does not break anything.
295
205
f = self.reopen_file(create=True)
298
208
def test_adds_with_parent_texts(self):
299
209
f = self.get_file()
300
210
parent_texts = {}
301
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
211
_, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
303
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
304
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
213
_, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
214
[b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
305
215
except NotImplementedError:
306
216
# if the format doesn't support ghosts, just add normally.
307
_, _, parent_texts['r1'] = f.add_lines('r1',
308
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
309
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
310
self.assertNotEqual(None, parent_texts['r0'])
311
self.assertNotEqual(None, parent_texts['r1'])
217
_, _, parent_texts[b'r1'] = f.add_lines(b'r1',
218
[b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
219
f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'], parent_texts=parent_texts)
220
self.assertNotEqual(None, parent_texts[b'r0'])
221
self.assertNotEqual(None, parent_texts[b'r1'])
312
222
def verify_file(f):
313
223
versions = f.versions()
314
self.assertTrue('r0' in versions)
315
self.assertTrue('r1' in versions)
316
self.assertTrue('r2' in versions)
317
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
224
self.assertTrue(b'r0' in versions)
225
self.assertTrue(b'r1' in versions)
226
self.assertTrue(b'r2' in versions)
227
self.assertEqual(f.get_lines(b'r0'), [b'a\n', b'b\n'])
228
self.assertEqual(f.get_lines(b'r1'), [b'b\n', b'c\n'])
229
self.assertEqual(f.get_lines(b'r2'), [b'c\n', b'd\n'])
320
230
self.assertEqual(3, f.num_versions())
321
origins = f.annotate('r1')
322
self.assertEquals(origins[0][0], 'r0')
323
self.assertEquals(origins[1][0], 'r1')
324
origins = f.annotate('r2')
325
self.assertEquals(origins[0][0], 'r1')
326
self.assertEquals(origins[1][0], 'r2')
231
origins = f.annotate(b'r1')
232
self.assertEqual(origins[0][0], b'r0')
233
self.assertEqual(origins[1][0], b'r1')
234
origins = f.annotate(b'r2')
235
self.assertEqual(origins[0][0], b'r1')
236
self.assertEqual(origins[1][0], b'r2')
329
239
f = self.reopen_file()
348
258
vf = self.get_file()
349
259
if isinstance(vf, WeaveFile):
350
260
raise TestSkipped("WeaveFile ignores left_matching_blocks")
351
vf.add_lines('1', [], ['a\n'])
352
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
261
vf.add_lines(b'1', [], [b'a\n'])
262
vf.add_lines(b'2', [b'1'], [b'a\n', b'a\n', b'a\n'],
353
263
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
354
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
355
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
264
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'2'))
265
vf.add_lines(b'3', [b'1'], [b'a\n', b'a\n', b'a\n'],
356
266
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
357
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
267
self.assertEqual([b'a\n', b'a\n', b'a\n'], vf.get_lines(b'3'))
359
269
def test_inline_newline_throws(self):
360
270
# \r characters are not permitted in lines being added
361
271
vf = self.get_file()
362
272
self.assertRaises(errors.BzrBadParameterContainsNewline,
363
vf.add_lines, 'a', [], ['a\n\n'])
273
vf.add_lines, b'a', [], [b'a\n\n'])
364
274
self.assertRaises(
365
275
(errors.BzrBadParameterContainsNewline, NotImplementedError),
366
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
276
vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
367
277
# but inline CR's are allowed
368
vf.add_lines('a', [], ['a\r\n'])
278
vf.add_lines(b'a', [], [b'a\r\n'])
370
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
280
vf.add_lines_with_ghosts(b'b', [], [b'a\r\n'])
371
281
except NotImplementedError:
374
284
def test_add_reserved(self):
375
285
vf = self.get_file()
376
286
self.assertRaises(errors.ReservedId,
377
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
287
vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
379
289
def test_add_lines_nostoresha(self):
380
290
"""When nostore_sha is supplied using old content raises."""
381
291
vf = self.get_file()
382
empty_text = ('a', [])
383
sample_text_nl = ('b', ["foo\n", "bar\n"])
384
sample_text_no_nl = ('c', ["foo\n", "bar"])
292
empty_text = (b'a', [])
293
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
294
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
386
296
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
387
297
sha, _, _ = vf.add_lines(version, [], lines)
409
319
# we now have a copy of all the lines in the vf.
410
320
# is the test applicable to this vf implementation?
412
vf.add_lines_with_ghosts('d', [], [])
322
vf.add_lines_with_ghosts(b'd', [], [])
413
323
except NotImplementedError:
414
324
raise TestSkipped("add_lines_with_ghosts is optional")
415
325
for sha, (version, lines) in zip(
416
326
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
417
327
self.assertRaises(errors.ExistingContent,
418
vf.add_lines_with_ghosts, version + "2", [], lines,
328
vf.add_lines_with_ghosts, version + b"2", [], lines,
420
330
# and no new version should have been added.
421
331
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
424
334
def test_add_lines_return_value(self):
425
335
# add_lines should return the sha1 and the text size.
426
336
vf = self.get_file()
427
empty_text = ('a', [])
428
sample_text_nl = ('b', ["foo\n", "bar\n"])
429
sample_text_no_nl = ('c', ["foo\n", "bar"])
337
empty_text = (b'a', [])
338
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
339
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
430
340
# check results for the three cases:
431
341
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
432
342
# the first two elements are the same for all versioned files:
511
421
# Test adding this in two situations:
512
422
# On top of a new insertion
513
423
vf = self.get_file('fulltext')
514
vf.add_lines('noeol', [], ['line'])
515
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
424
vf.add_lines(b'noeol', [], [b'line'])
425
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
516
426
left_matching_blocks=[(0, 1, 1)])
517
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
427
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
518
428
# On top of a delta
519
429
vf = self.get_file('delta')
520
vf.add_lines('base', [], ['line'])
521
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
522
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
430
vf.add_lines(b'base', [], [b'line'])
431
vf.add_lines(b'noeol', [b'base'], [b'prelude\n', b'line'])
432
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
523
433
left_matching_blocks=[(1, 1, 1)])
524
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
434
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
526
436
def test_make_mpdiffs(self):
527
from bzrlib import multiparent
437
from breezy import multiparent
528
438
vf = self.get_file('foo')
529
439
sha1s = self._setup_for_deltas(vf)
530
440
new_vf = self.get_file('bar')
538
448
def test_make_mpdiffs_with_ghosts(self):
539
449
vf = self.get_file('foo')
541
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
451
vf.add_lines_with_ghosts(b'text', [b'ghost'], [b'line\n'])
542
452
except NotImplementedError:
543
453
# old Weave formats do not allow ghosts
545
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
455
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, [b'ghost'])
547
457
def _setup_for_deltas(self, f):
548
458
self.assertFalse(f.has_version('base'))
549
459
# add texts that should trip the knit maximum delta chain threshold
550
460
# as well as doing parallel chains of data in knits.
551
461
# this is done by two chains of 25 insertions
552
f.add_lines('base', [], ['line\n'])
553
f.add_lines('noeol', ['base'], ['line'])
462
f.add_lines(b'base', [], [b'line\n'])
463
f.add_lines(b'noeol', [b'base'], [b'line'])
554
464
# detailed eol tests:
555
465
# shared last line with parent no-eol
556
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
466
f.add_lines(b'noeolsecond', [b'noeol'], [b'line\n', b'line'])
557
467
# differing last line with parent, both no-eol
558
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
468
f.add_lines(b'noeolnotshared', [b'noeolsecond'], [b'line\n', b'phone'])
559
469
# add eol following a noneol parent, change content
560
f.add_lines('eol', ['noeol'], ['phone\n'])
470
f.add_lines(b'eol', [b'noeol'], [b'phone\n'])
561
471
# add eol following a noneol parent, no change content
562
f.add_lines('eolline', ['noeol'], ['line\n'])
472
f.add_lines(b'eolline', [b'noeol'], [b'line\n'])
563
473
# noeol with no parents:
564
f.add_lines('noeolbase', [], ['line'])
474
f.add_lines(b'noeolbase', [], [b'line'])
565
475
# noeol preceeding its leftmost parent in the output:
566
476
# this is done by making it a merge of two parents with no common
567
477
# anestry: noeolbase and noeol with the
568
478
# later-inserted parent the leftmost.
569
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
479
f.add_lines(b'eolbeforefirstparent', [b'noeolbase', b'noeol'], [b'line'])
570
480
# two identical eol texts
571
f.add_lines('noeoldup', ['noeol'], ['line'])
573
text_name = 'chain1-'
575
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
576
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
577
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
578
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
579
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
580
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
581
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
582
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
583
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
584
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
585
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
586
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
587
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
588
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
589
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
590
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
591
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
592
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
593
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
594
19:'1ebed371807ba5935958ad0884595126e8c4e823',
595
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
596
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
597
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
598
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
599
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
600
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
481
f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
482
next_parent = b'base'
483
text_name = b'chain1-'
485
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
486
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
487
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
488
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
489
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
490
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
491
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
492
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
493
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
494
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
495
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
496
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
497
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
498
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
499
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
500
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
501
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
502
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
503
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
504
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
505
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
506
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
507
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
508
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
509
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
510
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
602
512
for depth in range(26):
603
new_version = text_name + '%s' % depth
604
text = text + ['line\n']
513
new_version = text_name + b'%d' % depth
514
text = text + [b'line\n']
605
515
f.add_lines(new_version, [next_parent], text)
606
516
next_parent = new_version
608
text_name = 'chain2-'
517
next_parent = b'base'
518
text_name = b'chain2-'
610
520
for depth in range(26):
611
new_version = text_name + '%s' % depth
612
text = text + ['line\n']
521
new_version = text_name + b'%d' % depth
522
text = text + [b'line\n']
613
523
f.add_lines(new_version, [next_parent], text)
614
524
next_parent = new_version
617
527
def test_ancestry(self):
618
528
f = self.get_file()
619
529
self.assertEqual([], f.get_ancestry([]))
620
f.add_lines('r0', [], ['a\n', 'b\n'])
621
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
622
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
623
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
624
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
530
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
531
f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
532
f.add_lines(b'r2', [b'r0'], [b'b\n', b'c\n'])
533
f.add_lines(b'r3', [b'r2'], [b'b\n', b'c\n'])
534
f.add_lines(b'rM', [b'r1', b'r2'], [b'b\n', b'c\n'])
625
535
self.assertEqual([], f.get_ancestry([]))
626
versions = f.get_ancestry(['rM'])
536
versions = f.get_ancestry([b'rM'])
627
537
# there are some possibilities:
631
541
# so we check indexes
632
r0 = versions.index('r0')
633
r1 = versions.index('r1')
634
r2 = versions.index('r2')
635
self.assertFalse('r3' in versions)
636
rM = versions.index('rM')
542
r0 = versions.index(b'r0')
543
r1 = versions.index(b'r1')
544
r2 = versions.index(b'r2')
545
self.assertFalse(b'r3' in versions)
546
rM = versions.index(b'rM')
637
547
self.assertTrue(r0 < r1)
638
548
self.assertTrue(r0 < r2)
639
549
self.assertTrue(r1 < rM)
640
550
self.assertTrue(r2 < rM)
642
552
self.assertRaises(RevisionNotPresent,
643
f.get_ancestry, ['rM', 'rX'])
553
f.get_ancestry, [b'rM', b'rX'])
645
self.assertEqual(set(f.get_ancestry('rM')),
646
set(f.get_ancestry('rM', topo_sorted=False)))
555
self.assertEqual(set(f.get_ancestry(b'rM')),
556
set(f.get_ancestry(b'rM', topo_sorted=False)))
648
558
def test_mutate_after_finish(self):
649
559
self._transaction = 'before'
650
560
f = self.get_file()
651
561
self._transaction = 'after'
652
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
653
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
562
self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
563
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, b'', [], [])
655
565
def test_copy_to(self):
656
566
f = self.get_file()
657
f.add_lines('0', [], ['a\n'])
567
f.add_lines(b'0', [], [b'a\n'])
658
568
t = MemoryTransport()
659
569
f.copy_to('foo', t)
660
570
for suffix in self.get_factory().get_suffixes():
668
578
def test_get_parent_map(self):
669
579
f = self.get_file()
670
f.add_lines('r0', [], ['a\n', 'b\n'])
672
{'r0':()}, f.get_parent_map(['r0']))
673
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
675
{'r1':('r0',)}, f.get_parent_map(['r1']))
679
f.get_parent_map(['r0', 'r1']))
680
f.add_lines('r2', [], ['a\n', 'b\n'])
681
f.add_lines('r3', [], ['a\n', 'b\n'])
682
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
684
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
685
self.assertEqual({}, f.get_parent_map('y'))
689
f.get_parent_map(['r0', 'y', 'r1']))
580
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
582
{b'r0':()}, f.get_parent_map([b'r0']))
583
f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
585
{b'r1':(b'r0',)}, f.get_parent_map([b'r1']))
589
f.get_parent_map([b'r0', b'r1']))
590
f.add_lines(b'r2', [], [b'a\n', b'b\n'])
591
f.add_lines(b'r3', [], [b'a\n', b'b\n'])
592
f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
594
{b'm':(b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
595
self.assertEqual({}, f.get_parent_map(b'y'))
599
f.get_parent_map([b'r0', b'y', b'r1']))
691
601
def test_annotate(self):
692
602
f = self.get_file()
693
f.add_lines('r0', [], ['a\n', 'b\n'])
694
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
695
origins = f.annotate('r1')
696
self.assertEquals(origins[0][0], 'r1')
697
self.assertEquals(origins[1][0], 'r0')
603
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
604
f.add_lines(b'r1', [b'r0'], [b'c\n', b'b\n'])
605
origins = f.annotate(b'r1')
606
self.assertEqual(origins[0][0], b'r1')
607
self.assertEqual(origins[1][0], b'r0')
699
609
self.assertRaises(RevisionNotPresent,
702
612
def test_detection(self):
703
613
# Test weaves detect corruption.
802
712
parent_id_unicode = u'b\xbfse'
803
713
parent_id_utf8 = parent_id_unicode.encode('utf8')
805
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
715
vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
806
716
except NotImplementedError:
807
717
# check the other ghost apis are also not implemented
808
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
809
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
718
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, [b'foo'])
719
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, b'foo')
811
721
vf = self.reopen_file()
812
722
# test key graph related apis: getncestry, _graph, get_parents
814
724
# - these are ghost unaware and must not be reflect ghosts
815
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
725
self.assertEqual([b'notbxbfse'], vf.get_ancestry(b'notbxbfse'))
816
726
self.assertFalse(vf.has_version(parent_id_utf8))
817
727
# we have _with_ghost apis to give us ghost information.
818
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
819
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
728
self.assertEqual([parent_id_utf8, b'notbxbfse'], vf.get_ancestry_with_ghosts([b'notbxbfse']))
729
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts(b'notbxbfse'))
820
730
# if we add something that is a ghost of another, it should correct the
821
731
# results of the prior apis
822
732
vf.add_lines(parent_id_utf8, [], [])
823
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
824
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
825
vf.get_parent_map(['notbxbfse']))
733
self.assertEqual([parent_id_utf8, b'notbxbfse'], vf.get_ancestry([b'notbxbfse']))
734
self.assertEqual({b'notbxbfse':(parent_id_utf8,)},
735
vf.get_parent_map([b'notbxbfse']))
826
736
self.assertTrue(vf.has_version(parent_id_utf8))
827
737
# we have _with_ghost apis to give us ghost information.
828
self.assertEqual([parent_id_utf8, 'notbxbfse'],
829
vf.get_ancestry_with_ghosts(['notbxbfse']))
830
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
738
self.assertEqual([parent_id_utf8, b'notbxbfse'],
739
vf.get_ancestry_with_ghosts([b'notbxbfse']))
740
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts(b'notbxbfse'))
832
742
def test_add_lines_with_ghosts_after_normal_revs(self):
833
743
# some versioned file formats allow lines to be added with parent
837
747
vf = self.get_file()
838
748
# probe for ghost support
840
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
750
vf.add_lines_with_ghosts(b'base', [], [b'line\n', b'line_b\n'])
841
751
except NotImplementedError:
843
vf.add_lines_with_ghosts('references_ghost',
845
['line\n', 'line_b\n', 'line_c\n'])
846
origins = vf.annotate('references_ghost')
847
self.assertEquals(('base', 'line\n'), origins[0])
848
self.assertEquals(('base', 'line_b\n'), origins[1])
849
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
753
vf.add_lines_with_ghosts(b'references_ghost',
754
[b'base', b'a_ghost'],
755
[b'line\n', b'line_b\n', b'line_c\n'])
756
origins = vf.annotate(b'references_ghost')
757
self.assertEqual((b'base', b'line\n'), origins[0])
758
self.assertEqual((b'base', b'line_b\n'), origins[1])
759
self.assertEqual((b'references_ghost', b'line_c\n'), origins[2])
851
761
def test_readonly_mode(self):
852
transport = get_transport(self.get_url('.'))
762
t = self.get_transport()
853
763
factory = self.get_factory()
854
vf = factory('id', transport, 0777, create=True, access_mode='w')
855
vf = factory('id', transport, access_mode='r')
856
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
764
vf = factory('id', t, 0o777, create=True, access_mode='w')
765
vf = factory('id', t, access_mode='r')
766
self.assertRaises(errors.ReadOnlyError, vf.add_lines, b'base', [], [])
857
767
self.assertRaises(errors.ReadOnlyError,
858
768
vf.add_lines_with_ghosts,
864
774
# check the sha1 data is available
865
775
vf = self.get_file()
867
vf.add_lines('a', [], ['a\n'])
777
vf.add_lines(b'a', [], [b'a\n'])
868
778
# the same file, different metadata
869
vf.add_lines('b', ['a'], ['a\n'])
779
vf.add_lines(b'b', [b'a'], [b'a\n'])
870
780
# a file differing only in last newline.
871
vf.add_lines('c', [], ['a'])
781
vf.add_lines(b'c', [], [b'a'])
872
782
self.assertEqual({
873
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
874
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
875
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
783
b'a': b'3f786850e387550fdab836ed7e6dc881de23001b',
784
b'c': b'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
785
b'b': b'3f786850e387550fdab836ed7e6dc881de23001b',
877
vf.get_sha1s(['a', 'c', 'b']))
787
vf.get_sha1s([b'a', b'c', b'b']))
880
790
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
882
792
def get_file(self, name='foo'):
883
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
get_scope=self.get_transaction)
793
return WeaveFile(name, self.get_transport(),
795
get_scope=self.get_transaction)
886
797
def get_file_corrupted_text(self):
887
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
get_scope=self.get_transaction)
889
w.add_lines('v1', [], ['hello\n'])
890
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
798
w = WeaveFile('foo', self.get_transport(),
800
get_scope=self.get_transaction)
801
w.add_lines(b'v1', [], [b'hello\n'])
802
w.add_lines(b'v2', [b'v1'], [b'hello\n', b'there\n'])
892
804
# We are going to invasively corrupt the text
893
805
# Make sure the internals of weave are the same
894
self.assertEqual([('{', 0)
806
self.assertEqual([(b'{', 0)
902
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
903
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
814
self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f'
815
, b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
908
w._weave[4] = 'There\n'
820
w._weave[4] = b'There\n'
911
823
def get_file_corrupted_checksum(self):
912
824
w = self.get_file_corrupted_text()
914
w._weave[4] = 'there\n'
915
self.assertEqual('hello\nthere\n', w.get_text('v2'))
826
w._weave[4] = b'there\n'
827
self.assertEqual(b'hello\nthere\n', w.get_text(b'v2'))
917
829
#Invalid checksum, first digit changed
918
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
830
w._sha1s[1] =b'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
921
833
def reopen_file(self, name='foo', create=False):
922
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
get_scope=self.get_transaction)
834
return WeaveFile(name, self.get_transport(),
836
get_scope=self.get_transaction)
925
838
def test_no_implicit_create(self):
926
839
self.assertRaises(errors.NoSuchFile,
929
get_transport(self.get_url('.')),
842
self.get_transport(),
930
843
get_scope=self.get_transaction)
932
845
def get_factory(self):
945
858
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
947
860
def test_add_lines(self):
948
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
949
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
950
('root', 'a'), [], [])
951
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
952
('root', 'a:'), None, [])
953
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
954
('root', 'a:'), [], None)
861
self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
862
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
863
(b'root', b'a'), [], [])
864
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
865
(b'root', b'a:'), None, [])
866
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
867
(b'root', b'a:'), [], None)
956
869
def setup_abcde(self):
957
self.vf1.add_lines(('root', 'A'), [], ['a'])
958
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
959
self.vf2.add_lines(('root', 'C'), [], ['c'])
960
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
961
self.plan_merge_vf.add_lines(('root', 'E:'),
962
[('root', 'B'), ('root', 'D')], ['e'])
870
self.vf1.add_lines((b'root', b'A'), [], [b'a'])
871
self.vf1.add_lines((b'root', b'B'), [(b'root', b'A')], [b'b'])
872
self.vf2.add_lines((b'root', b'C'), [], [b'c'])
873
self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
874
self.plan_merge_vf.add_lines((b'root', b'E:'),
875
[(b'root', b'B'), (b'root', b'D')], [b'e'])
964
877
def test_get_parents(self):
965
878
self.setup_abcde()
966
self.assertEqual({('root', 'B'):(('root', 'A'),)},
967
self.plan_merge_vf.get_parent_map([('root', 'B')]))
968
self.assertEqual({('root', 'D'):(('root', 'C'),)},
969
self.plan_merge_vf.get_parent_map([('root', 'D')]))
970
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
971
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
879
self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
880
self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
881
self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
882
self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
883
self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
884
self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
972
885
self.assertEqual({},
973
self.plan_merge_vf.get_parent_map([('root', 'F')]))
886
self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
974
887
self.assertEqual({
975
('root', 'B'):(('root', 'A'),),
976
('root', 'D'):(('root', 'C'),),
977
('root', 'E:'):(('root', 'B'),('root', 'D')),
888
(b'root', b'B'): ((b'root', b'A'),),
889
(b'root', b'D'): ((b'root', b'C'),),
890
(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
979
892
self.plan_merge_vf.get_parent_map(
980
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
893
[(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
982
895
def test_get_record_stream(self):
983
896
self.setup_abcde()
984
897
def get_record(suffix):
985
return self.plan_merge_vf.get_record_stream(
986
[('root', suffix)], 'unordered', True).next()
987
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
988
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
989
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
898
return next(self.plan_merge_vf.get_record_stream(
899
[(b'root', suffix)], 'unordered', True))
900
self.assertEqual(b'a', get_record(b'A').get_bytes_as('fulltext'))
901
self.assertEqual(b'c', get_record(b'C').get_bytes_as('fulltext'))
902
self.assertEqual(b'e', get_record(b'E:').get_bytes_as('fulltext'))
990
903
self.assertEqual('absent', get_record('F').storage_kind)
1023
944
class MergeCasesMixin(object):
1025
946
def doMerge(self, base, a, b, mp):
1026
from cStringIO import StringIO
1027
947
from textwrap import dedent
1032
952
w = self.get_file()
1033
w.add_lines('text0', [], map(addcrlf, base))
1034
w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
w.add_lines('text2', ['text0'], map(addcrlf, b))
953
w.add_lines(b'text0', [], list(map(addcrlf, base)))
954
w.add_lines(b'text1', [b'text0'], list(map(addcrlf, a)))
955
w.add_lines(b'text2', [b'text0'], list(map(addcrlf, b)))
1037
957
self.log_contents(w)
1039
959
self.log('merge plan:')
1040
p = list(w.plan_merge('text1', 'text2'))
960
p = list(w.plan_merge(b'text1', b'text2'))
1041
961
for state, line in p:
1043
963
self.log('%12s | %s' % (state, line[:-1]))
1045
965
self.log('merge:')
1047
967
mt.writelines(w.weave_merge(p))
1049
969
self.log(mt.getvalue())
1051
mp = map(addcrlf, mp)
971
mp = list(map(addcrlf, mp))
1052
972
self.assertEqual(mt.readlines(), mp)
1055
974
def testOneInsert(self):
1056
975
self.doMerge([],
1061
980
def testSeparateInserts(self):
1062
self.doMerge(['aaa', 'bbb', 'ccc'],
1063
['aaa', 'xxx', 'bbb', 'ccc'],
1064
['aaa', 'bbb', 'yyy', 'ccc'],
1065
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
981
self.doMerge([b'aaa', b'bbb', b'ccc'],
982
[b'aaa', b'xxx', b'bbb', b'ccc'],
983
[b'aaa', b'bbb', b'yyy', b'ccc'],
984
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
1067
986
def testSameInsert(self):
1068
self.doMerge(['aaa', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1071
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1072
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
987
self.doMerge([b'aaa', b'bbb', b'ccc'],
988
[b'aaa', b'xxx', b'bbb', b'ccc'],
989
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
990
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
991
overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
1073
992
def testOverlappedInsert(self):
1074
self.doMerge(['aaa', 'bbb'],
1075
['aaa', 'xxx', 'yyy', 'bbb'],
1076
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
993
self.doMerge([b'aaa', b'bbb'],
994
[b'aaa', b'xxx', b'yyy', b'bbb'],
995
[b'aaa', b'xxx', b'bbb'], self.overlappedInsertExpected)
1078
997
# really it ought to reduce this to
1079
# ['aaa', 'xxx', 'yyy', 'bbb']
998
# [b'aaa', b'xxx', b'yyy', b'bbb']
1082
1001
def testClashReplace(self):
1083
self.doMerge(['aaa'],
1086
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1002
self.doMerge([b'aaa'],
1005
[b'<<<<<<< ', b'xxx', b'=======', b'yyy', b'zzz',
1089
1008
def testNonClashInsert1(self):
1090
self.doMerge(['aaa'],
1093
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1009
self.doMerge([b'aaa'],
1012
[b'<<<<<<< ', b'xxx', b'aaa', b'=======', b'yyy', b'zzz',
1096
1015
def testNonClashInsert2(self):
1097
self.doMerge(['aaa'],
1016
self.doMerge([b'aaa'],
1103
1021
def testDeleteAndModify(self):
1104
1022
"""Clashing delete and modification.
1111
1029
# skippd, not working yet
1114
self.doMerge(['aaa', 'bbb', 'ccc'],
1115
['aaa', 'ddd', 'ccc'],
1117
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1032
self.doMerge([b'aaa', b'bbb', b'ccc'],
1033
[b'aaa', b'ddd', b'ccc'],
1035
[b'<<<<<<<< ', b'aaa', b'=======', b'>>>>>>> ', b'ccc'])
1119
1037
def _test_merge_from_strings(self, base, a, b, expected):
1120
1038
w = self.get_file()
1121
w.add_lines('text0', [], base.splitlines(True))
1122
w.add_lines('text1', ['text0'], a.splitlines(True))
1123
w.add_lines('text2', ['text0'], b.splitlines(True))
1039
w.add_lines(b'text0', [], base.splitlines(True))
1040
w.add_lines(b'text1', [b'text0'], a.splitlines(True))
1041
w.add_lines(b'text2', [b'text0'], b.splitlines(True))
1124
1042
self.log('merge plan:')
1125
p = list(w.plan_merge('text1', 'text2'))
1043
p = list(w.plan_merge(b'text1', b'text2'))
1126
1044
for state, line in p:
1128
1046
self.log('%12s | %s' % (state, line[:-1]))
1129
1047
self.log('merge result:')
1130
result_text = ''.join(w.weave_merge(p))
1048
result_text = b''.join(w.weave_merge(p))
1131
1049
self.log(result_text)
1132
1050
self.assertEqualDiff(result_text, expected)
1134
1052
def test_weave_merge_conflicts(self):
1135
1053
# does weave merge properly handle plans that end with unchanged?
1136
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1137
self.assertEqual(result, 'hello\n')
1054
result = b''.join(self.get_file().weave_merge([('new-a', b'hello\n')]))
1055
self.assertEqual(result, b'hello\n')
1139
1057
def test_deletion_extended(self):
1140
1058
"""One side deletes, the other deletes more.
1430
1349
def test_identity_mapper(self):
1431
1350
mapper = versionedfile.ConstantMapper("inventory")
1432
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1433
self.assertEqual("inventory", mapper.map(('quux',)))
1351
self.assertEqual("inventory", mapper.map((b'foo@ar',)))
1352
self.assertEqual("inventory", mapper.map((b'quux',)))
1435
1354
def test_prefix_mapper(self):
1436
1355
#format5: plain
1437
1356
mapper = versionedfile.PrefixMapper()
1438
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1439
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1440
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1441
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1357
self.assertEqual("file-id", mapper.map((b"file-id", b"revision-id")))
1358
self.assertEqual("new-id", mapper.map((b"new-id", b"revision-id")))
1359
self.assertEqual((b'file-id',), mapper.unmap("file-id"))
1360
self.assertEqual((b'new-id',), mapper.unmap("new-id"))
1443
1362
def test_hash_prefix_mapper(self):
1444
1363
#format6: hash + plain
1445
1364
mapper = versionedfile.HashPrefixMapper()
1446
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1447
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1448
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1449
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1365
self.assertEqual("9b/file-id", mapper.map((b"file-id", b"revision-id")))
1366
self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1367
self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1368
self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
1451
1370
def test_hash_escaped_mapper(self):
1452
1371
#knit1: hash + escaped
1453
1372
mapper = versionedfile.HashEscapedPrefixMapper()
1454
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1455
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1457
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1459
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1460
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1373
self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1374
self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1376
self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1378
self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1379
self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1463
1382
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
1383
"""Tests for the multiple-file variant of VersionedFile."""
1385
# We want to be sure of behaviour for:
1386
# weaves prefix layout (weave texts)
1387
# individually named weaves (weave inventories)
1388
# annotated knits - prefix|hash|hash-escape layout, we test the third only
1389
# as it is the most complex mapper.
1390
# individually named knits
1391
# individual no-graph knits in packs (signatures)
1392
# individual graph knits in packs (inventories)
1393
# individual graph nocompression knits in packs (revisions)
1394
# plain text knits in packs (texts)
1395
len_one_scenarios = [
1398
'factory': make_versioned_files_factory(WeaveFile,
1399
ConstantMapper('inventory')),
1402
'support_partial_insertion': False,
1406
'factory': make_file_factory(False, ConstantMapper('revisions')),
1409
'support_partial_insertion': False,
1411
('named-nograph-nodelta-knit-pack', {
1412
'cleanup': cleanup_pack_knit,
1413
'factory': make_pack_factory(False, False, 1),
1416
'support_partial_insertion': False,
1418
('named-graph-knit-pack', {
1419
'cleanup': cleanup_pack_knit,
1420
'factory': make_pack_factory(True, True, 1),
1423
'support_partial_insertion': True,
1425
('named-graph-nodelta-knit-pack', {
1426
'cleanup': cleanup_pack_knit,
1427
'factory': make_pack_factory(True, False, 1),
1430
'support_partial_insertion': False,
1432
('groupcompress-nograph', {
1433
'cleanup': groupcompress.cleanup_pack_group,
1434
'factory': groupcompress.make_pack_factory(False, False, 1),
1437
'support_partial_insertion': False,
1440
len_two_scenarios = [
1443
'factory': make_versioned_files_factory(WeaveFile,
1447
'support_partial_insertion': False,
1449
('annotated-knit-escape', {
1451
'factory': make_file_factory(True, HashEscapedPrefixMapper()),
1454
'support_partial_insertion': False,
1456
('plain-knit-pack', {
1457
'cleanup': cleanup_pack_knit,
1458
'factory': make_pack_factory(True, True, 2),
1461
'support_partial_insertion': True,
1464
'cleanup': groupcompress.cleanup_pack_group,
1465
'factory': groupcompress.make_pack_factory(True, False, 1),
1468
'support_partial_insertion': False,
1472
scenarios = len_one_scenarios + len_two_scenarios
1466
1474
def get_versionedfiles(self, relpath='files'):
1467
1475
transport = self.get_transport(relpath)
1468
1476
if relpath != '.':
1477
1485
if self.key_length == 1:
1478
1486
return (suffix,)
1480
return ('FileA',) + (suffix,)
1488
return (b'FileA',) + (suffix,)
1490
def test_add_fallback_implies_without_fallbacks(self):
1491
f = self.get_versionedfiles('files')
1492
if getattr(f, 'add_fallback_versioned_files', None) is None:
1493
raise TestNotApplicable("%s doesn't support fallbacks"
1494
% (f.__class__.__name__,))
1495
g = self.get_versionedfiles('fallback')
1496
key_a = self.get_simple_key(b'a')
1497
g.add_lines(key_a, [], [b'\n'])
1498
f.add_fallback_versioned_files(g)
1499
self.assertTrue(key_a in f.get_parent_map([key_a]))
1500
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
1482
1502
def test_add_lines(self):
1483
1503
f = self.get_versionedfiles()
1484
key0 = self.get_simple_key('r0')
1485
key1 = self.get_simple_key('r1')
1486
key2 = self.get_simple_key('r2')
1487
keyf = self.get_simple_key('foo')
1488
f.add_lines(key0, [], ['a\n', 'b\n'])
1490
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1492
f.add_lines(key1, [], ['b\n', 'c\n'])
1494
self.assertTrue(key0 in keys)
1495
self.assertTrue(key1 in keys)
1497
for record in f.get_record_stream([key0, key1], 'unordered', True):
1498
records.append((record.key, record.get_bytes_as('fulltext')))
1500
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1502
def test__add_text(self):
1503
f = self.get_versionedfiles()
1504
key0 = self.get_simple_key('r0')
1505
key1 = self.get_simple_key('r1')
1506
key2 = self.get_simple_key('r2')
1507
keyf = self.get_simple_key('foo')
1508
f._add_text(key0, [], 'a\nb\n')
1510
f._add_text(key1, [key0], 'b\nc\n')
1512
f._add_text(key1, [], 'b\nc\n')
1514
self.assertTrue(key0 in keys)
1515
self.assertTrue(key1 in keys)
1517
for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
records.append((record.key, record.get_bytes_as('fulltext')))
1520
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1504
key0 = self.get_simple_key(b'r0')
1505
key1 = self.get_simple_key(b'r1')
1506
key2 = self.get_simple_key(b'r2')
1507
keyf = self.get_simple_key(b'foo')
1508
f.add_lines(key0, [], [b'a\n', b'b\n'])
1510
f.add_lines(key1, [key0], [b'b\n', b'c\n'])
1512
f.add_lines(key1, [], [b'b\n', b'c\n'])
1514
self.assertTrue(key0 in keys)
1515
self.assertTrue(key1 in keys)
1517
for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
records.append((record.key, record.get_bytes_as('fulltext')))
1520
self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1522
1522
def test_annotate(self):
1523
1523
files = self.get_versionedfiles()
1525
1525
if self.key_length == 1:
1528
prefix = (b'FileA',)
1529
1529
# introduced full text
1530
origins = files.annotate(prefix + ('origin',))
1530
origins = files.annotate(prefix + (b'origin',))
1531
1531
self.assertEqual([
1532
(prefix + ('origin',), 'origin\n')],
1532
(prefix + (b'origin',), b'origin\n')],
1535
origins = files.annotate(prefix + ('base',))
1535
origins = files.annotate(prefix + (b'base',))
1536
1536
self.assertEqual([
1537
(prefix + ('base',), 'base\n')],
1537
(prefix + (b'base',), b'base\n')],
1540
origins = files.annotate(prefix + ('merged',))
1540
origins = files.annotate(prefix + (b'merged',))
1542
1542
self.assertEqual([
1543
(prefix + ('base',), 'base\n'),
1544
(prefix + ('left',), 'left\n'),
1545
(prefix + ('right',), 'right\n'),
1546
(prefix + ('merged',), 'merged\n')
1543
(prefix + (b'base',), b'base\n'),
1544
(prefix + (b'left',), b'left\n'),
1545
(prefix + (b'right',), b'right\n'),
1546
(prefix + (b'merged',), b'merged\n')
1550
1550
# Without a graph everything is new.
1551
1551
self.assertEqual([
1552
(prefix + ('merged',), 'base\n'),
1553
(prefix + ('merged',), 'left\n'),
1554
(prefix + ('merged',), 'right\n'),
1555
(prefix + ('merged',), 'merged\n')
1552
(prefix + (b'merged',), b'base\n'),
1553
(prefix + (b'merged',), b'left\n'),
1554
(prefix + (b'merged',), b'right\n'),
1555
(prefix + (b'merged',), b'merged\n')
1558
1558
self.assertRaises(RevisionNotPresent,
1597
1597
def _add_content_nostoresha(self, add_lines):
1598
1598
"""When nostore_sha is supplied using old content raises."""
1599
1599
vf = self.get_versionedfiles()
1600
empty_text = ('a', [])
1601
sample_text_nl = ('b', ["foo\n", "bar\n"])
1602
sample_text_no_nl = ('c', ["foo\n", "bar"])
1600
empty_text = (b'a', [])
1601
sample_text_nl = (b'b', [b"foo\n", b"bar\n"])
1602
sample_text_no_nl = (b'c', [b"foo\n", b"bar"])
1604
1604
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1606
1606
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1609
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1609
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1611
1611
shas.append(sha)
1612
1612
# we now have a copy of all the lines in the vf.
1613
1613
for sha, (version, lines) in zip(
1614
1614
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1615
new_key = self.get_simple_key(version + "2")
1615
new_key = self.get_simple_key(version + b"2")
1616
1616
self.assertRaises(errors.ExistingContent,
1617
1617
vf.add_lines, new_key, [], lines,
1618
1618
nostore_sha=sha)
1619
1619
self.assertRaises(errors.ExistingContent,
1620
vf._add_text, new_key, [], ''.join(lines),
1620
vf.add_lines, new_key, [], lines,
1621
1621
nostore_sha=sha)
1622
1622
# and no new version should have been added.
1623
record = vf.get_record_stream([new_key], 'unordered', True).next()
1623
record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1624
self.assertEqual('absent', record.storage_kind)
1626
1626
def test_add_lines_nostoresha(self):
1627
1627
self._add_content_nostoresha(add_lines=True)
1629
def test__add_text_nostoresha(self):
1630
self._add_content_nostoresha(add_lines=False)
1632
1629
def test_add_lines_return(self):
1633
1630
files = self.get_versionedfiles()
1634
1631
# save code by using the stock data insertion helper.
1640
1637
results.append(add[:2])
1641
1638
if self.key_length == 1:
1642
1639
self.assertEqual([
1643
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1644
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1645
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1646
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1647
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1640
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1641
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1642
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1643
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1644
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1649
1646
elif self.key_length == 2:
1650
1647
self.assertEqual([
1651
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1652
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1653
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1654
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1655
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1656
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1657
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1658
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1659
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1660
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1648
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1649
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1650
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1651
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1652
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1653
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1654
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1655
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1656
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1657
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1663
1660
def test_add_lines_no_key_generates_chk_key(self):
1671
1668
results.append(add[:2])
1672
1669
if self.key_length == 1:
1673
1670
self.assertEqual([
1674
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1675
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1676
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1677
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1678
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1671
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1672
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1673
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1674
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1675
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1680
1677
# Check the added items got CHK keys.
1681
self.assertEqual(set([
1682
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1683
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1684
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1685
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1686
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1679
(b'sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1680
(b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1681
(b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1682
(b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1683
(b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1689
1686
elif self.key_length == 2:
1690
1687
self.assertEqual([
1691
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1692
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1693
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1694
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1695
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1696
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1697
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1698
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1699
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1700
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1688
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1689
(b'00e364d235126be43292ab09cb4686cf703ddc17', 7),
1690
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1691
(b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1692
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1693
(b'a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1694
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1695
(b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1696
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1697
(b'ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1702
1699
# Check the added items got CHK keys.
1703
self.assertEqual(set([
1704
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1705
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1706
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1707
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1708
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1709
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1710
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1711
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1712
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1713
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1701
(b'FileA', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1702
(b'FileA', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1703
(b'FileA', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1704
(b'FileA', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1705
(b'FileA', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1706
(b'FileB', b'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1707
(b'FileB', b'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1708
(b'FileB', b'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1709
(b'FileB', b'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1710
(b'FileB', b'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1717
1714
def test_empty_lines(self):
1718
1715
"""Empty files can be stored."""
1719
1716
f = self.get_versionedfiles()
1720
key_a = self.get_simple_key('a')
1717
key_a = self.get_simple_key(b'a')
1721
1718
f.add_lines(key_a, [], [])
1722
self.assertEqual('',
1723
f.get_record_stream([key_a], 'unordered', True
1724
).next().get_bytes_as('fulltext'))
1725
key_b = self.get_simple_key('b')
1719
self.assertEqual(b'',
1720
next(f.get_record_stream([key_a], 'unordered', True
1721
)).get_bytes_as('fulltext'))
1722
key_b = self.get_simple_key(b'b')
1726
1723
f.add_lines(key_b, self.get_parents([key_a]), [])
1727
self.assertEqual('',
1728
f.get_record_stream([key_b], 'unordered', True
1729
).next().get_bytes_as('fulltext'))
1724
self.assertEqual(b'',
1725
next(f.get_record_stream([key_b], 'unordered', True
1726
)).get_bytes_as('fulltext'))
1731
1728
def test_newline_only(self):
1732
1729
f = self.get_versionedfiles()
1733
key_a = self.get_simple_key('a')
1734
f.add_lines(key_a, [], ['\n'])
1735
self.assertEqual('\n',
1736
f.get_record_stream([key_a], 'unordered', True
1737
).next().get_bytes_as('fulltext'))
1738
key_b = self.get_simple_key('b')
1739
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1740
self.assertEqual('\n',
1741
f.get_record_stream([key_b], 'unordered', True
1742
).next().get_bytes_as('fulltext'))
1730
key_a = self.get_simple_key(b'a')
1731
f.add_lines(key_a, [], [b'\n'])
1732
self.assertEqual(b'\n',
1733
next(f.get_record_stream([key_a], 'unordered', True
1734
)).get_bytes_as('fulltext'))
1735
key_b = self.get_simple_key(b'b')
1736
f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1737
self.assertEqual(b'\n',
1738
next(f.get_record_stream([key_b], 'unordered', True
1739
)).get_bytes_as('fulltext'))
1744
1741
def test_get_known_graph_ancestry(self):
1745
1742
f = self.get_versionedfiles()
1746
1743
if not self.graph:
1747
1744
raise TestNotApplicable('ancestry info only relevant with graph.')
1748
key_a = self.get_simple_key('a')
1749
key_b = self.get_simple_key('b')
1750
key_c = self.get_simple_key('c')
1745
key_a = self.get_simple_key(b'a')
1746
key_b = self.get_simple_key(b'b')
1747
key_c = self.get_simple_key(b'c')
1756
f.add_lines(key_a, [], ['\n'])
1757
f.add_lines(key_b, [key_a], ['\n'])
1758
f.add_lines(key_c, [key_a, key_b], ['\n'])
1753
f.add_lines(key_a, [], [b'\n'])
1754
f.add_lines(key_b, [key_a], [b'\n'])
1755
f.add_lines(key_c, [key_a, key_b], [b'\n'])
1759
1756
kg = f.get_known_graph_ancestry([key_c])
1760
1757
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1761
1758
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1828
1825
def get_keys_and_sort_order(self):
1829
1826
"""Get diamond test keys list, and their sort ordering."""
1830
1827
if self.key_length == 1:
1831
keys = [('merged',), ('left',), ('right',), ('base',)]
1832
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1828
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1829
sort_order = {(b'merged',):2, (b'left',):1, (b'right',):1, (b'base',):0}
1835
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1837
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1832
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1833
(b'FileA', b'base'),
1834
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1835
(b'FileB', b'base'),
1841
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1842
('FileA', 'base'):0,
1843
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1844
('FileB', 'base'):0,
1838
(b'FileA', b'merged'): 2, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1839
(b'FileA', b'base'): 0,
1840
(b'FileB', b'merged'): 2, (b'FileB', b'left'): 1, (b'FileB', b'right'): 1,
1841
(b'FileB', b'base'): 0,
1846
1843
return keys, sort_order
1848
1845
def get_keys_and_groupcompress_sort_order(self):
1849
1846
"""Get diamond test keys list, and their groupcompress sort ordering."""
1850
1847
if self.key_length == 1:
1851
keys = [('merged',), ('left',), ('right',), ('base',)]
1852
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1848
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1849
sort_order = {(b'merged',): 0, (b'left',): 1, (b'right',): 1, (b'base',):2}
1855
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1857
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1852
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1853
(b'FileA', b'base'),
1854
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1855
(b'FileB', b'base'),
1861
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1862
('FileA', 'base'):2,
1863
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1864
('FileB', 'base'):5,
1858
(b'FileA', b'merged'): 0, (b'FileA', b'left'): 1, (b'FileA', b'right'): 1,
1859
(b'FileA', b'base'): 2,
1860
(b'FileB', b'merged'): 3, (b'FileB', b'left'): 4, (b'FileB', b'right'): 4,
1861
(b'FileB', b'base'): 5,
1866
1863
return keys, sort_order
1953
1950
self.assertRaises(errors.UnavailableRepresentation,
1954
1951
factory.get_bytes_as, 'mpdiff')
1955
1952
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1957
1954
self.assertEqual(set(keys), seen)
1959
1956
def test_get_record_stream_missing_records_are_absent(self):
1960
1957
files = self.get_versionedfiles()
1961
1958
self.get_diamond_files(files)
1962
1959
if self.key_length == 1:
1963
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1960
keys = [(b'merged',), (b'left',), (b'right',), (b'absent',), (b'base',)]
1966
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1967
('FileA', 'absent'), ('FileA', 'base'),
1968
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1969
('FileB', 'absent'), ('FileB', 'base'),
1970
('absent', 'absent'),
1963
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1964
(b'FileA', b'absent'), (b'FileA', b'base'),
1965
(b'FileB', b'merged'), (b'FileB', b'left'), (b'FileB', b'right'),
1966
(b'FileB', b'absent'), (b'FileB', b'base'),
1967
(b'absent', b'absent'),
1972
1969
parent_map = files.get_parent_map(keys)
1973
1970
entries = files.get_record_stream(keys, 'unordered', False)
2229
2226
self.assertRaises(RevisionNotPresent,
2230
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2227
files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
2232
2229
def test_get_parent_map(self):
2233
2230
files = self.get_versionedfiles()
2234
2231
if self.key_length == 1:
2235
2232
parent_details = [
2236
(('r0',), self.get_parents(())),
2237
(('r1',), self.get_parents((('r0',),))),
2238
(('r2',), self.get_parents(())),
2239
(('r3',), self.get_parents(())),
2240
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2233
((b'r0',), self.get_parents(())),
2234
((b'r1',), self.get_parents(((b'r0',),))),
2235
((b'r2',), self.get_parents(())),
2236
((b'r3',), self.get_parents(())),
2237
((b'm',), self.get_parents(((b'r0',), (b'r1',), (b'r2',), (b'r3',)))),
2243
2240
parent_details = [
2244
(('FileA', 'r0'), self.get_parents(())),
2245
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2246
(('FileA', 'r2'), self.get_parents(())),
2247
(('FileA', 'r3'), self.get_parents(())),
2248
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2249
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2241
((b'FileA', b'r0'), self.get_parents(())),
2242
((b'FileA', b'r1'), self.get_parents(((b'FileA', b'r0'),))),
2243
((b'FileA', b'r2'), self.get_parents(())),
2244
((b'FileA', b'r3'), self.get_parents(())),
2245
((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2246
(b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2251
2248
for key, parents in parent_details:
2252
2249
files.add_lines(key, parents, [])
2270
2267
files = self.get_versionedfiles()
2271
2268
self.get_diamond_files(files)
2272
2269
if self.key_length == 1:
2273
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2270
keys = [(b'base',), (b'origin',), (b'left',), (b'merged',), (b'right',)]
2275
2272
# ask for shas from different prefixes.
2277
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2278
('FileA', 'merged'), ('FileB', 'right'),
2274
(b'FileA', b'base'), (b'FileB', b'origin'), (b'FileA', b'left'),
2275
(b'FileA', b'merged'), (b'FileB', b'right'),
2280
2277
self.assertEqual({
2281
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2282
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2283
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2284
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2285
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2278
keys[0]: b'51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2279
keys[1]: b'00e364d235126be43292ab09cb4686cf703ddc17',
2280
keys[2]: b'a8478686da38e370e32e42e8a0c220e33ee9132f',
2281
keys[3]: b'ed8bce375198ea62444dc71952b22cfc2b09226d',
2282
keys[4]: b'9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2287
2284
files.get_sha1s(keys))
2416
2413
source = self.get_versionedfiles('source')
2417
2414
self.get_diamond_files(source)
2418
2415
if self.key_length == 1:
2419
origin_keys = [('origin',)]
2420
end_keys = [('merged',), ('left',)]
2421
start_keys = [('right',), ('base',)]
2416
origin_keys = [(b'origin',)]
2417
end_keys = [(b'merged',), (b'left',)]
2418
start_keys = [(b'right',), (b'base',)]
2423
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2424
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2425
('FileB', 'merged',), ('FileB', 'left',)]
2426
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2427
('FileB', 'right',), ('FileB', 'base',)]
2420
origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2421
end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2422
(b'FileB', b'merged',), (b'FileB', b'left',)]
2423
start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2424
(b'FileB', b'right',), (b'FileB', b'base',)]
2428
2425
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2429
2426
end_entries = source.get_record_stream(end_keys, 'topological', False)
2430
2427
start_entries = source.get_record_stream(start_keys, 'topological', False)
2431
entries = chain(origin_entries, end_entries, start_entries)
2428
entries = itertools.chain(origin_entries, end_entries, start_entries)
2433
2430
files.insert_record_stream(entries)
2434
2431
except RevisionNotPresent:
2559
2558
files = self.get_versionedfiles()
2560
2559
# add a base to get included
2561
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2560
files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
2562
2561
# add a ancestor to be included on one side
2563
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2562
files.add_lines(self.get_simple_key(b'lancestor'), (), [b'lancestor\n'])
2564
2563
# add a ancestor to be included on the other side
2565
files.add_lines(self.get_simple_key('rancestor'),
2566
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2564
files.add_lines(self.get_simple_key(b'rancestor'),
2565
self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2567
2566
# add a child of rancestor with no eofile-nl
2568
files.add_lines(self.get_simple_key('child'),
2569
self.get_parents([self.get_simple_key('rancestor')]),
2570
['base\n', 'child\n'])
2567
files.add_lines(self.get_simple_key(b'child'),
2568
self.get_parents([self.get_simple_key(b'rancestor')]),
2569
[b'base\n', b'child\n'])
2571
2570
# add a child of lancestor and base to join the two roots
2572
files.add_lines(self.get_simple_key('otherchild'),
2573
self.get_parents([self.get_simple_key('lancestor'),
2574
self.get_simple_key('base')]),
2575
['base\n', 'lancestor\n', 'otherchild\n'])
2571
files.add_lines(self.get_simple_key(b'otherchild'),
2572
self.get_parents([self.get_simple_key(b'lancestor'),
2573
self.get_simple_key(b'base')]),
2574
[b'base\n', b'lancestor\n', b'otherchild\n'])
2576
2575
def iter_with_keys(keys, expected):
2577
2576
# now we need to see what lines are returned, and how often.
2605
2604
('Walking content', 4, 5),
2606
2605
('Walking content', 5, 5)])
2607
2606
# all lines must be seen at least once
2608
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2610
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2612
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2613
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2615
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2607
self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2609
lines[(b'lancestor\n', self.get_simple_key(b'lancestor'))] > 0)
2611
lines[(b'rancestor\n', self.get_simple_key(b'rancestor'))] > 0)
2612
self.assertTrue(lines[(b'child\n', self.get_simple_key(b'child'))] > 0)
2614
lines[(b'otherchild\n', self.get_simple_key(b'otherchild'))] > 0)
2617
2616
def test_make_mpdiffs(self):
2618
from bzrlib import multiparent
2617
from breezy import multiparent
2619
2618
files = self.get_versionedfiles('source')
2620
2619
# add texts that should trip the knit maximum delta chain threshold
2621
2620
# as well as doing parallel chains of data in knits.
2622
2621
# this is done by two chains of 25 insertions
2623
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2624
files.add_lines(self.get_simple_key('noeol'),
2625
self.get_parents([self.get_simple_key('base')]), ['line'])
2622
files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2623
files.add_lines(self.get_simple_key(b'noeol'),
2624
self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2626
2625
# detailed eol tests:
2627
2626
# shared last line with parent no-eol
2628
files.add_lines(self.get_simple_key('noeolsecond'),
2629
self.get_parents([self.get_simple_key('noeol')]),
2627
files.add_lines(self.get_simple_key(b'noeolsecond'),
2628
self.get_parents([self.get_simple_key(b'noeol')]),
2629
[b'line\n', b'line'])
2631
2630
# differing last line with parent, both no-eol
2632
files.add_lines(self.get_simple_key('noeolnotshared'),
2633
self.get_parents([self.get_simple_key('noeolsecond')]),
2634
['line\n', 'phone'])
2631
files.add_lines(self.get_simple_key(b'noeolnotshared'),
2632
self.get_parents([self.get_simple_key(b'noeolsecond')]),
2633
[b'line\n', b'phone'])
2635
2634
# add eol following a noneol parent, change content
2636
files.add_lines(self.get_simple_key('eol'),
2637
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2635
files.add_lines(self.get_simple_key(b'eol'),
2636
self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2638
2637
# add eol following a noneol parent, no change content
2639
files.add_lines(self.get_simple_key('eolline'),
2640
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2638
files.add_lines(self.get_simple_key(b'eolline'),
2639
self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2641
2640
# noeol with no parents:
2642
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2641
files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
2643
2642
# noeol preceeding its leftmost parent in the output:
2644
2643
# this is done by making it a merge of two parents with no common
2645
2644
# anestry: noeolbase and noeol with the
2646
2645
# later-inserted parent the leftmost.
2647
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2648
self.get_parents([self.get_simple_key('noeolbase'),
2649
self.get_simple_key('noeol')]),
2646
files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2647
self.get_parents([self.get_simple_key(b'noeolbase'),
2648
self.get_simple_key(b'noeol')]),
2651
2650
# two identical eol texts
2652
files.add_lines(self.get_simple_key('noeoldup'),
2653
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2654
next_parent = self.get_simple_key('base')
2655
text_name = 'chain1-'
2657
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2658
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2659
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2660
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2661
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2662
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2663
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2664
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2665
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2666
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2667
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2668
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2669
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2670
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2671
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2672
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2673
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2674
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2675
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2676
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2677
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2678
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2679
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2680
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2681
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2682
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2651
files.add_lines(self.get_simple_key(b'noeoldup'),
2652
self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2653
next_parent = self.get_simple_key(b'base')
2654
text_name = b'chain1-'
2656
sha1s = {0: b'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2657
1: b'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2658
2: b'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2659
3: b'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2660
4: b'e28a5510be25ba84d31121cff00956f9970ae6f6',
2661
5: b'd63ec0ce22e11dcf65a931b69255d3ac747a318d',
2662
6: b'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2663
7: b'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2664
8: b'779e9a0b28f9f832528d4b21e17e168c67697272',
2665
9: b'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2666
10: b'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2667
11: b'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2668
12: b'31a2286267f24d8bedaa43355f8ad7129509ea85',
2669
13: b'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2670
14: b'2c4b1736566b8ca6051e668de68650686a3922f2',
2671
15: b'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2672
16: b'b0d2e18d3559a00580f6b49804c23fea500feab3',
2673
17: b'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2674
18: b'5cf64a3459ae28efa60239e44b20312d25b253f3',
2675
19: b'1ebed371807ba5935958ad0884595126e8c4e823',
2676
20: b'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2677
21: b'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2678
22: b'd8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2679
23: b'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2680
24: b'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2681
25: b'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2684
2683
for depth in range(26):
2685
new_version = self.get_simple_key(text_name + '%s' % depth)
2686
text = text + ['line\n']
2684
new_version = self.get_simple_key(text_name + b'%d' % depth)
2685
text = text + [b'line\n']
2687
2686
files.add_lines(new_version, self.get_parents([next_parent]), text)
2688
2687
next_parent = new_version
2689
next_parent = self.get_simple_key('base')
2690
text_name = 'chain2-'
2688
next_parent = self.get_simple_key(b'base')
2689
text_name = b'chain2-'
2692
2691
for depth in range(26):
2693
new_version = self.get_simple_key(text_name + '%s' % depth)
2694
text = text + ['line\n']
2692
new_version = self.get_simple_key(text_name + b'%d' % depth)
2693
text = text + [b'line\n']
2695
2694
files.add_lines(new_version, self.get_parents([next_parent]), text)
2696
2695
next_parent = new_version
2697
2696
target = self.get_versionedfiles('target')
2755
2754
def test_get_sha1s_nonexistent(self):
2756
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2755
self.assertEqual({}, self.texts.get_sha1s([(b"NONEXISTENT",)]))
2758
2757
def test_get_sha1s(self):
2759
self._lines["key"] = ["dataline1", "dataline2"]
2760
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2761
self.texts.get_sha1s([("key",)]))
2758
self._lines[b"key"] = [b"dataline1", b"dataline2"]
2759
self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2760
self.texts.get_sha1s([(b"key",)]))
2763
2762
def test_get_parent_map(self):
2764
self._parent_map = {"G": ("A", "B")}
2765
self.assertEquals({("G",): (("A",),("B",))},
2766
self.texts.get_parent_map([("G",), ("L",)]))
2763
self._parent_map = {b"G": (b"A", b"B")}
2764
self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2765
self.texts.get_parent_map([(b"G",), (b"L",)]))
2768
2767
def test_get_record_stream(self):
2769
self._lines["A"] = ["FOO", "BAR"]
2770
it = self.texts.get_record_stream([("A",)], "unordered", True)
2772
self.assertEquals("chunked", record.storage_kind)
2773
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2768
self._lines[b"A"] = [b"FOO", b"BAR"]
2769
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2771
self.assertEqual("chunked", record.storage_kind)
2772
self.assertEqual(b"FOOBAR", record.get_bytes_as("fulltext"))
2773
self.assertEqual([b"FOO", b"BAR"], record.get_bytes_as("chunked"))
2776
2775
def test_get_record_stream_absent(self):
2777
it = self.texts.get_record_stream([("A",)], "unordered", True)
2779
self.assertEquals("absent", record.storage_kind)
2776
it = self.texts.get_record_stream([(b"A",)], "unordered", True)
2778
self.assertEqual("absent", record.storage_kind)
2781
2780
def test_iter_lines_added_or_present_in_keys(self):
2782
self._lines["A"] = ["FOO", "BAR"]
2783
self._lines["B"] = ["HEY"]
2784
self._lines["C"] = ["Alberta"]
2785
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2781
self._lines[b"A"] = [b"FOO", b"BAR"]
2782
self._lines[b"B"] = [b"HEY"]
2783
self._lines[b"C"] = [b"Alberta"]
2784
it = self.texts.iter_lines_added_or_present_in_keys([(b"A",), (b"B",)])
2785
self.assertEqual(sorted([(b"FOO", b"A"), (b"BAR", b"A"), (b"HEY", b"B")]),
2787
2786
sorted(list(it)))
2809
2809
self.assertEqual([], vf.calls)
2811
2811
def test_get_record_stream_topological(self):
2812
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2813
request_keys = [('B',), ('C',), ('D',), ('A',)]
2812
vf = self.get_ordering_vf({(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2813
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2814
2814
keys = [r.key for r in vf.get_record_stream(request_keys,
2815
2815
'topological', False)]
2816
2816
# We should have gotten the keys in topological order
2817
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2817
self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
2818
2818
# And recorded that the request was made
2819
2819
self.assertEqual([('get_record_stream', request_keys, 'topological',
2820
2820
False)], vf.calls)
2822
2822
def test_get_record_stream_ordered(self):
2823
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2824
request_keys = [('B',), ('C',), ('D',), ('A',)]
2823
vf = self.get_ordering_vf({(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2824
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2825
2825
keys = [r.key for r in vf.get_record_stream(request_keys,
2826
2826
'unordered', False)]
2827
2827
# They should be returned based on their priority
2828
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2828
self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
2829
2829
# And the request recorded
2830
2830
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2831
2831
False)], vf.calls)
2833
2833
def test_get_record_stream_implicit_order(self):
2834
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2835
request_keys = [('B',), ('C',), ('D',), ('A',)]
2834
vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2835
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2836
2836
keys = [r.key for r in vf.get_record_stream(request_keys,
2837
2837
'unordered', False)]
2838
2838
# A and C are not in the map, so they get sorted to the front. A comes
2839
2839
# before C alphabetically, so it comes back first
2840
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2840
self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)
2841
2841
# And the request recorded
2842
2842
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2843
2843
False)], vf.calls)