98
98
f.add_lines(b'left', [b'base'], [b'base\n', b'left' + last_char])
100
100
f.add_lines(b'right', [b'base'],
101
[b'base\n', b'right' + last_char])
101
[b'base\n', b'right' + last_char])
102
102
f.add_lines(b'merged', [b'left', b'right'],
103
[b'base\n', b'left\n', b'right\n', b'merged' + last_char])
103
[b'base\n', b'left\n', b'right\n', b'merged' + last_char])
104
104
return f, parents
107
107
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
108
nograph=False, nokeys=False):
108
nograph=False, nokeys=False):
109
109
"""Get a diamond graph to exercise deltas and merges.
111
111
This creates a 5-node graph in files. If files supports 2-length keys two
149
151
# which is how commit operates.
150
152
for prefix in prefixes:
151
153
result.append(files.add_lines(prefix + get_key(b'origin'), (),
152
[b'origin' + last_char]))
154
[b'origin' + last_char]))
153
155
for prefix in prefixes:
154
156
result.append(files.add_lines(prefix + get_key(b'base'),
155
get_parents([(b'origin',)]), [b'base' + last_char]))
157
get_parents([(b'origin',)]), [b'base' + last_char]))
156
158
for prefix in prefixes:
157
159
result.append(files.add_lines(prefix + get_key(b'left'),
158
get_parents([(b'base',)]),
159
[b'base\n', b'left' + last_char]))
160
get_parents([(b'base',)]),
161
[b'base\n', b'left' + last_char]))
160
162
if not left_only:
161
163
for prefix in prefixes:
162
164
result.append(files.add_lines(prefix + get_key(b'right'),
163
get_parents([(b'base',)]),
164
[b'base\n', b'right' + last_char]))
165
get_parents([(b'base',)]),
166
[b'base\n', b'right' + last_char]))
165
167
for prefix in prefixes:
166
168
result.append(files.add_lines(prefix + get_key(b'merged'),
167
get_parents([(b'left',), (b'right',)]),
168
[b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
170
[(b'left',), (b'right',)]),
171
[b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
197
201
self.assertEqual(2, f.num_versions())
199
203
self.assertRaises(RevisionNotPresent,
200
f.add_lines, b'r2', [b'foo'], [])
204
f.add_lines, b'r2', [b'foo'], [])
201
205
self.assertRaises(RevisionAlreadyPresent,
202
f.add_lines, b'r1', [], [])
206
f.add_lines, b'r1', [], [])
204
208
# this checks that reopen with create=True does not break anything.
205
209
f = self.reopen_file(create=True)
211
215
_, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
213
217
_, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
214
[b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
218
[b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
215
219
except NotImplementedError:
216
220
# if the format doesn't support ghosts, just add normally.
217
221
_, _, parent_texts[b'r1'] = f.add_lines(b'r1',
218
[b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
219
f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'], parent_texts=parent_texts)
222
[b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
223
f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'],
224
parent_texts=parent_texts)
220
225
self.assertNotEqual(None, parent_texts[b'r0'])
221
226
self.assertNotEqual(None, parent_texts[b'r1'])
222
228
def verify_file(f):
223
229
versions = f.versions()
224
230
self.assertTrue(b'r0' in versions)
244
250
# versioned files version sequences of bytes only.
245
251
vf = self.get_file()
246
252
self.assertRaises(errors.BzrBadParameterUnicode,
247
vf.add_lines, b'a', [], [b'a\n', u'b\n', b'c\n'])
253
vf.add_lines, b'a', [], [b'a\n', u'b\n', b'c\n'])
248
254
self.assertRaises(
249
255
(errors.BzrBadParameterUnicode, NotImplementedError),
250
256
vf.add_lines_with_ghosts, b'a', [], [b'a\n', u'b\n', b'c\n'])
270
276
# \r characters are not permitted in lines being added
271
277
vf = self.get_file()
272
278
self.assertRaises(errors.BzrBadParameterContainsNewline,
273
vf.add_lines, b'a', [], [b'a\n\n'])
279
vf.add_lines, b'a', [], [b'a\n\n'])
274
280
self.assertRaises(
275
281
(errors.BzrBadParameterContainsNewline, NotImplementedError),
276
282
vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
284
290
def test_add_reserved(self):
285
291
vf = self.get_file()
286
292
self.assertRaises(errors.ReservedId,
287
vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
293
vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
289
295
def test_add_lines_nostoresha(self):
290
296
"""When nostore_sha is supplied using old content raises."""
299
305
# we now have a copy of all the lines in the vf.
300
306
for sha, (version, lines) in zip(
301
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
307
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
302
308
self.assertRaises(errors.ExistingContent,
303
vf.add_lines, version + b"2", [], lines,
309
vf.add_lines, version + b"2", [], lines,
305
311
# and no new version should have been added.
306
312
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
309
315
def test_add_lines_with_ghosts_nostoresha(self):
310
316
"""When nostore_sha is supplied using old content raises."""
323
329
except NotImplementedError:
324
330
raise TestSkipped("add_lines_with_ghosts is optional")
325
331
for sha, (version, lines) in zip(
326
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
332
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
327
333
self.assertRaises(errors.ExistingContent,
328
vf.add_lines_with_ghosts, version + b"2", [], lines,
334
vf.add_lines_with_ghosts, version + b"2", [], lines,
330
336
# and no new version should have been added.
331
337
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
334
340
def test_add_lines_return_value(self):
335
341
# add_lines should return the sha1 and the text size.
345
351
result = vf.add_lines(version, [], lines)
346
352
self.assertEqual(3, len(result))
347
353
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
349
355
# parents should not affect the result:
350
356
lines = sample_text_nl[1]
351
357
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
352
vf.add_lines(b'd', [b'b', b'c'], lines)[0:2])
358
vf.add_lines(b'd', [b'b', b'c'], lines)[0:2])
354
360
def test_get_reserved(self):
355
361
vf = self.get_file()
403
409
# (which is what this test tests) will generate a correct line
404
410
# delta (which is to say, an empty delta).
405
411
vf.add_lines(version, parents, lines,
406
left_matching_blocks=[(0, 0, 1)])
412
left_matching_blocks=[(0, 0, 1)])
407
413
parents = [version]
408
414
versions.append(version)
409
415
version_lines[version] = lines
423
429
vf = self.get_file('fulltext')
424
430
vf.add_lines(b'noeol', [], [b'line'])
425
431
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
426
left_matching_blocks=[(0, 1, 1)])
432
left_matching_blocks=[(0, 1, 1)])
427
433
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
428
434
# On top of a delta
429
435
vf = self.get_file('delta')
430
436
vf.add_lines(b'base', [], [b'line'])
431
437
vf.add_lines(b'noeol', [b'base'], [b'prelude\n', b'line'])
432
438
vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
433
left_matching_blocks=[(1, 1, 1)])
439
left_matching_blocks=[(1, 1, 1)])
434
440
self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
436
442
def test_make_mpdiffs(self):
452
458
except NotImplementedError:
453
459
# old Weave formats do not allow ghosts
455
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, [b'ghost'])
461
self.assertRaises(errors.RevisionNotPresent,
462
vf.make_mpdiffs, [b'ghost'])
457
464
def _setup_for_deltas(self, f):
458
465
self.assertFalse(f.has_version('base'))
476
483
# this is done by making it a merge of two parents with no common
477
484
# anestry: noeolbase and noeol with the
478
485
# later-inserted parent the leftmost.
479
f.add_lines(b'eolbeforefirstparent', [b'noeolbase', b'noeol'], [b'line'])
486
f.add_lines(b'eolbeforefirstparent', [
487
b'noeolbase', b'noeol'], [b'line'])
480
488
# two identical eol texts
481
489
f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
482
490
next_parent = b'base'
550
558
self.assertTrue(r2 < rM)
552
560
self.assertRaises(RevisionNotPresent,
553
f.get_ancestry, [b'rM', b'rX'])
561
f.get_ancestry, [b'rM', b'rX'])
555
563
self.assertEqual(set(f.get_ancestry(b'rM')),
556
set(f.get_ancestry(b'rM', topo_sorted=False)))
564
set(f.get_ancestry(b'rM', topo_sorted=False)))
558
566
def test_mutate_after_finish(self):
559
567
self._transaction = 'before'
560
568
f = self.get_file()
561
569
self._transaction = 'after'
562
570
self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
563
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, b'', [], [])
571
self.assertRaises(errors.OutSideTransaction,
572
f.add_lines_with_ghosts, b'', [], [])
565
574
def test_copy_to(self):
566
575
f = self.get_file()
579
588
f = self.get_file()
580
589
f.add_lines(b'r0', [], [b'a\n', b'b\n'])
581
590
self.assertEqual(
582
{b'r0':()}, f.get_parent_map([b'r0']))
591
{b'r0': ()}, f.get_parent_map([b'r0']))
583
592
f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
584
593
self.assertEqual(
585
{b'r1':(b'r0',)}, f.get_parent_map([b'r1']))
594
{b'r1': (b'r0',)}, f.get_parent_map([b'r1']))
586
595
self.assertEqual(
589
598
f.get_parent_map([b'r0', b'r1']))
590
599
f.add_lines(b'r2', [], [b'a\n', b'b\n'])
591
600
f.add_lines(b'r3', [], [b'a\n', b'b\n'])
592
601
f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
593
602
self.assertEqual(
594
{b'm':(b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
603
{b'm': (b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
595
604
self.assertEqual({}, f.get_parent_map(b'y'))
596
605
self.assertEqual(
599
608
f.get_parent_map([b'r0', b'y', b'r1']))
601
610
def test_annotate(self):
666
675
vf.add_lines(b'otherchild',
667
676
[b'lancestor', b'base'],
668
677
[b'base\n', b'lancestor\n', b'otherchild\n'])
669
679
def iter_with_versions(versions, expected):
670
680
# now we need to see what lines are returned, and how often.
672
682
progress = InstrumentedProgress()
673
683
# iterate over the lines
674
684
for line in vf.iter_lines_added_or_present_in_versions(versions,
676
686
lines.setdefault(line, 0)
678
if []!= progress.updates:
688
if [] != progress.updates:
679
689
self.assertEqual(expected, progress.updates)
681
691
lines = iter_with_versions([b'child', b'otherchild'],
715
725
vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
716
726
except NotImplementedError:
717
727
# check the other ghost apis are also not implemented
718
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, [b'foo'])
719
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, b'foo')
728
self.assertRaises(NotImplementedError,
729
vf.get_ancestry_with_ghosts, [b'foo'])
730
self.assertRaises(NotImplementedError,
731
vf.get_parents_with_ghosts, b'foo')
721
733
vf = self.reopen_file()
722
734
# test key graph related apis: getncestry, _graph, get_parents
725
737
self.assertEqual([b'notbxbfse'], vf.get_ancestry(b'notbxbfse'))
726
738
self.assertFalse(vf.has_version(parent_id_utf8))
727
739
# we have _with_ghost apis to give us ghost information.
728
self.assertEqual([parent_id_utf8, b'notbxbfse'], vf.get_ancestry_with_ghosts([b'notbxbfse']))
729
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts(b'notbxbfse'))
740
self.assertEqual([parent_id_utf8, b'notbxbfse'],
741
vf.get_ancestry_with_ghosts([b'notbxbfse']))
742
self.assertEqual([parent_id_utf8],
743
vf.get_parents_with_ghosts(b'notbxbfse'))
730
744
# if we add something that is a ghost of another, it should correct the
731
745
# results of the prior apis
732
746
vf.add_lines(parent_id_utf8, [], [])
733
self.assertEqual([parent_id_utf8, b'notbxbfse'], vf.get_ancestry([b'notbxbfse']))
734
self.assertEqual({b'notbxbfse':(parent_id_utf8,)},
735
vf.get_parent_map([b'notbxbfse']))
747
self.assertEqual([parent_id_utf8, b'notbxbfse'],
748
vf.get_ancestry([b'notbxbfse']))
749
self.assertEqual({b'notbxbfse': (parent_id_utf8,)},
750
vf.get_parent_map([b'notbxbfse']))
736
751
self.assertTrue(vf.has_version(parent_id_utf8))
737
752
# we have _with_ghost apis to give us ghost information.
738
753
self.assertEqual([parent_id_utf8, b'notbxbfse'],
739
vf.get_ancestry_with_ghosts([b'notbxbfse']))
740
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts(b'notbxbfse'))
754
vf.get_ancestry_with_ghosts([b'notbxbfse']))
755
self.assertEqual([parent_id_utf8],
756
vf.get_parents_with_ghosts(b'notbxbfse'))
742
758
def test_add_lines_with_ghosts_after_normal_revs(self):
743
759
# some versioned file formats allow lines to be added with parent
804
820
# We are going to invasively corrupt the text
805
821
# Make sure the internals of weave are the same
806
self.assertEqual([(b'{', 0)
822
self.assertEqual([(b'{', 0), b'hello\n', (b'}', None), (b'{', 1), b'there\n', (b'}', None)
814
self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f'
815
, b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
825
self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f', b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
860
870
def test_add_lines(self):
861
871
self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
862
872
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
863
(b'root', b'a'), [], [])
864
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
865
(b'root', b'a:'), None, [])
866
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
867
(b'root', b'a:'), [], None)
873
(b'root', b'a'), [], [])
874
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
875
(b'root', b'a:'), None, [])
876
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
877
(b'root', b'a:'), [], None)
869
879
def setup_abcde(self):
870
880
self.vf1.add_lines((b'root', b'A'), [], [b'a'])
872
882
self.vf2.add_lines((b'root', b'C'), [], [b'c'])
873
883
self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
874
884
self.plan_merge_vf.add_lines((b'root', b'E:'),
875
[(b'root', b'B'), (b'root', b'D')], [b'e'])
885
[(b'root', b'B'), (b'root', b'D')], [b'e'])
877
887
def test_get_parents(self):
878
888
self.setup_abcde()
879
889
self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
880
self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
890
self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
881
891
self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
882
self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
892
self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
883
893
self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
884
self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
894
self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
885
895
self.assertEqual({},
886
self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
896
self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
887
897
self.assertEqual({
888
(b'root', b'B'): ((b'root', b'A'),),
889
(b'root', b'D'): ((b'root', b'C'),),
890
(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
898
(b'root', b'B'): ((b'root', b'A'),),
899
(b'root', b'D'): ((b'root', b'C'),),
900
(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
892
902
self.plan_merge_vf.get_parent_map(
893
903
[(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
895
905
def test_get_record_stream(self):
896
906
self.setup_abcde()
897
908
def get_record(suffix):
898
909
return next(self.plan_merge_vf.get_record_stream(
899
910
[(b'root', suffix)], 'unordered', True))
923
934
vf.add_lines(b'1', [], [b'a\n'])
924
935
vf.add_lines(b'2', [b'1'], [b'b\n', b'a\n'])
925
936
readonly_vf = self.get_factory()('foo',
926
transport.get_transport_from_url(self.get_readonly_url('.')))
937
transport.get_transport_from_url(self.get_readonly_url('.')))
927
938
self.assertEqual([b'1', b'2'], vf.versions())
928
939
self.assertEqual([b'1', b'2'], readonly_vf.versions())
929
940
for version in readonly_vf.versions():
989
1000
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
990
1001
[b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
991
1002
overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
992
1004
def testOverlappedInsert(self):
993
1005
self.doMerge([b'aaa', b'bbb'],
994
1006
[b'aaa', b'xxx', b'yyy', b'bbb'],
1242
1253
f = self.get_knit()
1243
1254
get_diamond_files(f, 1, trailing_eol=False)
1244
1255
ft_data, delta_data = self.helpGetBytes(f,
1245
_mod_knit.FTAnnotatedToUnannotated(None),
1246
_mod_knit.DeltaAnnotatedToUnannotated(None))
1256
_mod_knit.FTAnnotatedToUnannotated(
1258
_mod_knit.DeltaAnnotatedToUnannotated(None))
1247
1259
self.assertEqual(
1248
1260
b'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1281
1294
# must have the base lines requested from it.
1282
1295
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1283
1296
ft_data, delta_data = self.helpGetBytes(f,
1284
_mod_knit.FTAnnotatedToFullText(None),
1285
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1297
_mod_knit.FTAnnotatedToFullText(
1299
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1286
1300
self.assertEqual(b'origin', ft_data)
1287
1301
self.assertEqual(b'base\nleft\nright\nmerged', delta_data)
1288
1302
self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1289
True)], logged_vf.calls)
1303
True)], logged_vf.calls)
1291
1305
def test_annotated_to_fulltext(self):
1292
1306
"""Test adapting annotated knits to full texts (for -> weaves)."""
1297
1311
# must have the base lines requested from it.
1298
1312
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1299
1313
ft_data, delta_data = self.helpGetBytes(f,
1300
_mod_knit.FTAnnotatedToFullText(None),
1301
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1314
_mod_knit.FTAnnotatedToFullText(
1316
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1302
1317
self.assertEqual(b'origin\n', ft_data)
1303
1318
self.assertEqual(b'base\nleft\nright\nmerged\n', delta_data)
1304
1319
self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1305
True)], logged_vf.calls)
1320
True)], logged_vf.calls)
1307
1322
def test_unannotated_to_fulltext(self):
1308
1323
"""Test adapting unannotated knits to full texts.
1316
1331
# must have the base lines requested from it.
1317
1332
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1318
1333
ft_data, delta_data = self.helpGetBytes(f,
1319
_mod_knit.FTPlainToFullText(None),
1320
_mod_knit.DeltaPlainToFullText(logged_vf))
1334
_mod_knit.FTPlainToFullText(
1336
_mod_knit.DeltaPlainToFullText(logged_vf))
1321
1337
self.assertEqual(b'origin\n', ft_data)
1322
1338
self.assertEqual(b'base\nleft\nright\nmerged\n', delta_data)
1323
1339
self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1324
True)], logged_vf.calls)
1340
True)], logged_vf.calls)
1326
1342
def test_unannotated_to_fulltext_no_eol(self):
1327
1343
"""Test adapting unannotated knits to full texts.
1335
1351
# must have the base lines requested from it.
1336
1352
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1337
1353
ft_data, delta_data = self.helpGetBytes(f,
1338
_mod_knit.FTPlainToFullText(None),
1339
_mod_knit.DeltaPlainToFullText(logged_vf))
1354
_mod_knit.FTPlainToFullText(
1356
_mod_knit.DeltaPlainToFullText(logged_vf))
1340
1357
self.assertEqual(b'origin', ft_data)
1341
1358
self.assertEqual(b'base\nleft\nright\nmerged', delta_data)
1342
1359
self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1343
True)], logged_vf.calls)
1360
True)], logged_vf.calls)
1346
1363
class TestKeyMapper(TestCaseWithMemoryTransport):
1362
1379
def test_hash_prefix_mapper(self):
1363
1380
#format6: hash + plain
1364
1381
mapper = versionedfile.HashPrefixMapper()
1365
self.assertEqual("9b/file-id", mapper.map((b"file-id", b"revision-id")))
1383
"9b/file-id", mapper.map((b"file-id", b"revision-id")))
1366
1384
self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1367
1385
self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1368
1386
self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
1372
1390
mapper = versionedfile.HashEscapedPrefixMapper()
1373
1391
self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1374
1392
self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1376
1394
self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1378
1396
self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1379
1397
self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1589
1608
files = self.get_versionedfiles()
1591
1610
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1593
1612
return get_diamond_files(files, self.key_length,
1594
trailing_eol=trailing_eol, nograph=not self.graph,
1595
left_only=left_only, nokeys=nokeys)
1613
trailing_eol=trailing_eol, nograph=not self.graph,
1614
left_only=left_only, nokeys=nokeys)
1597
1616
def _add_content_nostoresha(self, add_lines):
1598
1617
"""When nostore_sha is supplied using old content raises."""
1611
1630
shas.append(sha)
1612
1631
# we now have a copy of all the lines in the vf.
1613
1632
for sha, (version, lines) in zip(
1614
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1633
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1615
1634
new_key = self.get_simple_key(version + b"2")
1616
1635
self.assertRaises(errors.ExistingContent,
1617
vf.add_lines, new_key, [], lines,
1636
vf.add_lines, new_key, [], lines,
1619
1638
self.assertRaises(errors.ExistingContent,
1620
vf.add_lines, new_key, [], lines,
1639
vf.add_lines, new_key, [], lines,
1622
1641
# and no new version should have been added.
1623
1642
record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1643
self.assertEqual('absent', record.storage_kind)
1717
1736
key_a = self.get_simple_key(b'a')
1718
1737
f.add_lines(key_a, [], [])
1719
1738
self.assertEqual(b'',
1720
next(f.get_record_stream([key_a], 'unordered', True
1721
)).get_bytes_as('fulltext'))
1739
next(f.get_record_stream([key_a], 'unordered', True
1740
)).get_bytes_as('fulltext'))
1722
1741
key_b = self.get_simple_key(b'b')
1723
1742
f.add_lines(key_b, self.get_parents([key_a]), [])
1724
1743
self.assertEqual(b'',
1725
next(f.get_record_stream([key_b], 'unordered', True
1726
)).get_bytes_as('fulltext'))
1744
next(f.get_record_stream([key_b], 'unordered', True
1745
)).get_bytes_as('fulltext'))
1728
1747
def test_newline_only(self):
1729
1748
f = self.get_versionedfiles()
1730
1749
key_a = self.get_simple_key(b'a')
1731
1750
f.add_lines(key_a, [], [b'\n'])
1732
1751
self.assertEqual(b'\n',
1733
next(f.get_record_stream([key_a], 'unordered', True
1734
)).get_bytes_as('fulltext'))
1752
next(f.get_record_stream([key_a], 'unordered', True
1753
)).get_bytes_as('fulltext'))
1735
1754
key_b = self.get_simple_key(b'b')
1736
1755
f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1737
1756
self.assertEqual(b'\n',
1738
next(f.get_record_stream([key_b], 'unordered', True
1739
)).get_bytes_as('fulltext'))
1757
next(f.get_record_stream([key_b], 'unordered', True
1758
)).get_bytes_as('fulltext'))
1741
1760
def test_get_known_graph_ancestry(self):
1742
1761
f = self.get_versionedfiles()
1789
1808
def assertValidStorageKind(self, storage_kind):
1790
1809
"""Assert that storage_kind is a valid storage_kind."""
1791
1810
self.assertSubset([storage_kind],
1792
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1793
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1794
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1796
'knit-delta-closure', 'knit-delta-closure-ref',
1797
'groupcompress-block', 'groupcompress-block-ref'])
1811
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1812
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1813
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1815
'knit-delta-closure', 'knit-delta-closure-ref',
1816
'groupcompress-block', 'groupcompress-block-ref'])
1799
1818
def capture_stream(self, f, entries, on_seen, parents,
1800
require_fulltext=False):
1819
require_fulltext=False):
1801
1820
"""Capture a stream for testing."""
1802
1821
for factory in entries:
1803
1822
on_seen(factory.key)
1804
1823
self.assertValidStorageKind(factory.storage_kind)
1805
1824
if factory.sha1 is not None:
1806
1825
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1808
1827
self.assertEqual(parents[factory.key], factory.parents)
1809
1828
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1811
1830
if require_fulltext:
1812
1831
factory.get_bytes_as('fulltext')
1826
1845
"""Get diamond test keys list, and their sort ordering."""
1827
1846
if self.key_length == 1:
1828
1847
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1829
sort_order = {(b'merged',):2, (b'left',):1, (b'right',):1, (b'base',):0}
1848
sort_order = {(b'merged',): 2, (b'left',): 1,
1849
(b'right',): 1, (b'base',): 0}
1832
1852
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1846
1866
"""Get diamond test keys list, and their groupcompress sort ordering."""
1847
1867
if self.key_length == 1:
1848
1868
keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1849
sort_order = {(b'merged',): 0, (b'left',): 1, (b'right',): 1, (b'base',):2}
1869
sort_order = {(b'merged',): 0, (b'left',): 1,
1870
(b'right',): 1, (b'base',): 2}
1852
1873
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1885
1906
seen.append(factory.key)
1886
1907
self.assertValidStorageKind(factory.storage_kind)
1887
1908
self.assertSubset([factory.sha1],
1888
[None, files.get_sha1s([factory.key])[factory.key]])
1909
[None, files.get_sha1s([factory.key])[factory.key]])
1889
1910
self.assertEqual(parent_map[factory.key], factory.parents)
1890
1911
# self.assertEqual(files.get_text(factory.key),
1891
1912
ft_bytes = factory.get_bytes_as('fulltext')
1909
1930
def assertStreamOrder(self, sort_order, seen, keys):
1910
1931
self.assertEqual(len(set(seen)), len(keys))
1911
1932
if self.key_length == 1:
1914
lows = {(b'FileA',):0, (b'FileB',):0}
1935
lows = {(b'FileA',): 0, (b'FileB',): 0}
1915
1936
if not self.graph:
1916
1937
self.assertEqual(set(keys), set(seen))
1918
1939
for key in seen:
1919
1940
sort_pos = sort_order[key]
1920
1941
self.assertTrue(sort_pos >= lows[key[:-1]],
1921
"Out of order in sorted stream: %r, %r" % (key, seen))
1942
"Out of order in sorted stream: %r, %r" % (key, seen))
1922
1943
lows[key[:-1]] = sort_pos
1924
1945
def test_get_record_stream_unknown_storage_kind_raises(self):
1948
1969
self.assertEqual(parent_map[factory.key], factory.parents)
1949
1970
# currently no stream emits mpdiff
1950
1971
self.assertRaises(errors.UnavailableRepresentation,
1951
factory.get_bytes_as, 'mpdiff')
1972
factory.get_bytes_as, 'mpdiff')
1952
1973
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1954
1975
self.assertEqual(set(keys), seen)
1956
1977
def test_get_record_stream_missing_records_are_absent(self):
1957
1978
files = self.get_versionedfiles()
1958
1979
self.get_diamond_files(files)
1959
1980
if self.key_length == 1:
1960
keys = [(b'merged',), (b'left',), (b'right',), (b'absent',), (b'base',)]
1981
keys = [(b'merged',), (b'left',), (b'right',),
1982
(b'absent',), (b'base',)]
1963
1985
(b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
2047
2069
delta_parents = (key,)
2049
2071
delta_parents = ()
2050
files.add_lines(key_delta, delta_parents, [b'different\n', b'content\n'])
2072
files.add_lines(key_delta, delta_parents, [
2073
b'different\n', b'content\n'])
2051
2074
local = files.get_record_stream([key, key_delta], 'unordered', False)
2052
2075
ref = files.get_record_stream([key, key_delta], 'unordered', False)
2053
2076
skipped_records = [0]
2081
2104
delta_parents = (key,)
2083
2106
delta_parents = ()
2084
files.add_lines(key_delta, delta_parents, [b'different\n', b'content\n'])
2107
files.add_lines(key_delta, delta_parents, [
2108
b'different\n', b'content\n'])
2085
2109
# Copy the basis text across so we can reconstruct the delta during
2086
2110
# insertion into target.
2087
2111
target_files.insert_record_stream(files.get_record_stream([key],
2088
'unordered', False))
2112
'unordered', False))
2089
2113
local = files.get_record_stream([key_delta], 'unordered', False)
2090
2114
ref = files.get_record_stream([key_delta], 'unordered', False)
2091
2115
skipped_records = [0]
2117
2141
delta_parents = (key,)
2119
2143
delta_parents = ()
2120
files.add_lines(key_delta, delta_parents, [b'different\n', b'content\n'])
2144
files.add_lines(key_delta, delta_parents, [
2145
b'different\n', b'content\n'])
2121
2146
local = files.get_record_stream([key_delta], 'unordered', True)
2122
2147
ref = files.get_record_stream([key_delta], 'unordered', True)
2123
2148
skipped_records = [0]
2243
2268
((b'FileA', b'r2'), self.get_parents(())),
2244
2269
((b'FileA', b'r3'), self.get_parents(())),
2245
2270
((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2246
(b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2271
(b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2248
2273
for key, parents in parent_details:
2249
2274
files.add_lines(key, parents, [])
2250
2275
# immediately after adding it should be queryable.
2251
self.assertEqual({key:parents}, files.get_parent_map([key]))
2276
self.assertEqual({key: parents}, files.get_parent_map([key]))
2252
2277
# We can ask for an empty set
2253
2278
self.assertEqual({}, files.get_parent_map([]))
2254
2279
# We can ask for many keys
2267
2292
files = self.get_versionedfiles()
2268
2293
self.get_diamond_files(files)
2269
2294
if self.key_length == 1:
2270
keys = [(b'base',), (b'origin',), (b'left',), (b'merged',), (b'right',)]
2295
keys = [(b'base',), (b'origin',), (b'left',),
2296
(b'merged',), (b'right',)]
2272
2298
# ask for shas from different prefixes.
2420
2447
origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2421
2448
end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2422
(b'FileB', b'merged',), (b'FileB', b'left',)]
2449
(b'FileB', b'merged',), (b'FileB', b'left',)]
2423
2450
start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2424
(b'FileB', b'right',), (b'FileB', b'base',)]
2425
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2451
(b'FileB', b'right',), (b'FileB', b'base',)]
2452
origin_entries = source.get_record_stream(
2453
origin_keys, 'unordered', False)
2426
2454
end_entries = source.get_record_stream(end_keys, 'topological', False)
2427
start_entries = source.get_record_stream(start_keys, 'topological', False)
2455
start_entries = source.get_record_stream(
2456
start_keys, 'topological', False)
2428
2457
entries = itertools.chain(origin_entries, end_entries, start_entries)
2430
2459
files.insert_record_stream(entries)
2482
2511
source_transport.mkdir('.')
2483
2512
source = make_file_factory(False, mapper)(source_transport)
2484
2513
get_diamond_files(source, self.key_length, trailing_eol=True,
2485
nograph=False, left_only=False)
2514
nograph=False, left_only=False)
2488
2517
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2496
2525
files = self.get_versionedfiles()
2497
2526
if self.support_partial_insertion:
2498
2527
self.assertEqual([],
2499
list(files.get_missing_compression_parent_keys()))
2528
list(files.get_missing_compression_parent_keys()))
2500
2529
files.insert_record_stream(entries)
2501
2530
missing_bases = files.get_missing_compression_parent_keys()
2502
2531
self.assertEqual({self.get_simple_key(b'left')},
2504
2533
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2506
2535
self.assertRaises(
2519
2548
'versioned file scenario does not support partial insertion')
2520
2549
source = self.get_knit_delta_source()
2521
2550
entries = source.get_record_stream([self.get_simple_key(b'origin'),
2522
self.get_simple_key(b'merged')], 'unordered', False)
2551
self.get_simple_key(b'merged')], 'unordered', False)
2523
2552
files = self.get_versionedfiles()
2524
2553
files.insert_record_stream(entries)
2525
2554
missing_bases = files.get_missing_compression_parent_keys()
2526
2555
self.assertEqual({self.get_simple_key(b'left')},
2528
2557
# 'merged' is inserted (although a commit of a write group involving
2529
2558
# this versionedfiles would fail).
2530
2559
merged_key = self.get_simple_key(b'merged')
2559
2588
# add a base to get included
2560
2589
files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
2561
2590
# add a ancestor to be included on one side
2562
files.add_lines(self.get_simple_key(b'lancestor'), (), [b'lancestor\n'])
2591
files.add_lines(self.get_simple_key(
2592
b'lancestor'), (), [b'lancestor\n'])
2563
2593
# add a ancestor to be included on the other side
2564
2594
files.add_lines(self.get_simple_key(b'rancestor'),
2565
self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2595
self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2566
2596
# add a child of rancestor with no eofile-nl
2567
2597
files.add_lines(self.get_simple_key(b'child'),
2568
self.get_parents([self.get_simple_key(b'rancestor')]),
2569
[b'base\n', b'child\n'])
2598
self.get_parents([self.get_simple_key(b'rancestor')]),
2599
[b'base\n', b'child\n'])
2570
2600
# add a child of lancestor and base to join the two roots
2571
2601
files.add_lines(self.get_simple_key(b'otherchild'),
2572
self.get_parents([self.get_simple_key(b'lancestor'),
2573
self.get_simple_key(b'base')]),
2574
[b'base\n', b'lancestor\n', b'otherchild\n'])
2602
self.get_parents([self.get_simple_key(b'lancestor'),
2603
self.get_simple_key(b'base')]),
2604
[b'base\n', b'lancestor\n', b'otherchild\n'])
2575
2606
def iter_with_keys(keys, expected):
2576
2607
# now we need to see what lines are returned, and how often.
2578
2609
progress = InstrumentedProgress()
2579
2610
# iterate over the lines
2580
2611
for line in files.iter_lines_added_or_present_in_keys(keys,
2582
2613
lines.setdefault(line, 0)
2583
2614
lines[line] += 1
2584
if []!= progress.updates:
2615
if [] != progress.updates:
2585
2616
self.assertEqual(expected, progress.updates)
2587
2618
lines = iter_with_keys(
2588
[self.get_simple_key(b'child'), self.get_simple_key(b'otherchild')],
2619
[self.get_simple_key(b'child'),
2620
self.get_simple_key(b'otherchild')],
2589
2621
[('Walking content', 0, 2),
2590
2622
('Walking content', 1, 2),
2591
2623
('Walking content', 2, 2)])
2598
2630
# test all lines
2599
2631
lines = iter_with_keys(files.keys(),
2600
[('Walking content', 0, 5),
2601
('Walking content', 1, 5),
2602
('Walking content', 2, 5),
2603
('Walking content', 3, 5),
2604
('Walking content', 4, 5),
2605
('Walking content', 5, 5)])
2632
[('Walking content', 0, 5),
2633
('Walking content', 1, 5),
2634
('Walking content', 2, 5),
2635
('Walking content', 3, 5),
2636
('Walking content', 4, 5),
2637
('Walking content', 5, 5)])
2606
2638
# all lines must be seen at least once
2607
2639
self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2608
2640
self.assertTrue(
2621
2653
# this is done by two chains of 25 insertions
2622
2654
files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2623
2655
files.add_lines(self.get_simple_key(b'noeol'),
2624
self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2656
self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2625
2657
# detailed eol tests:
2626
2658
# shared last line with parent no-eol
2627
2659
files.add_lines(self.get_simple_key(b'noeolsecond'),
2628
self.get_parents([self.get_simple_key(b'noeol')]),
2629
[b'line\n', b'line'])
2660
self.get_parents([self.get_simple_key(b'noeol')]),
2661
[b'line\n', b'line'])
2630
2662
# differing last line with parent, both no-eol
2631
2663
files.add_lines(self.get_simple_key(b'noeolnotshared'),
2632
self.get_parents([self.get_simple_key(b'noeolsecond')]),
2633
[b'line\n', b'phone'])
2665
[self.get_simple_key(b'noeolsecond')]),
2666
[b'line\n', b'phone'])
2634
2667
# add eol following a noneol parent, change content
2635
2668
files.add_lines(self.get_simple_key(b'eol'),
2636
self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2669
self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2637
2670
# add eol following a noneol parent, no change content
2638
2671
files.add_lines(self.get_simple_key(b'eolline'),
2639
self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2672
self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2640
2673
# noeol with no parents:
2641
2674
files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
2642
2675
# noeol preceeding its leftmost parent in the output:
2644
2677
# anestry: noeolbase and noeol with the
2645
2678
# later-inserted parent the leftmost.
2646
2679
files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2647
self.get_parents([self.get_simple_key(b'noeolbase'),
2648
self.get_simple_key(b'noeol')]),
2680
self.get_parents([self.get_simple_key(b'noeolbase'),
2681
self.get_simple_key(b'noeol')]),
2650
2683
# two identical eol texts
2651
2684
files.add_lines(self.get_simple_key(b'noeoldup'),
2652
self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2685
self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2653
2686
next_parent = self.get_simple_key(b'base')
2654
2687
text_name = b'chain1-'
2655
2688
text = [b'line\n']
2701
2734
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2702
2735
self.assertEqualDiff(
2703
2736
next(files.get_record_stream([key], 'unordered',
2704
True)).get_bytes_as('fulltext'),
2737
True)).get_bytes_as('fulltext'),
2705
2738
next(target.get_record_stream([key], 'unordered',
2706
True)).get_bytes_as('fulltext')
2739
True)).get_bytes_as('fulltext')
2709
2742
def test_keys(self):
2739
2772
def test_add_lines(self):
2740
2773
self.assertRaises(NotImplementedError,
2741
self.texts.add_lines, b"foo", [], [])
2774
self.texts.add_lines, b"foo", [], [])
2743
2776
def test_add_mpdiffs(self):
2744
2777
self.assertRaises(NotImplementedError,
2745
self.texts.add_mpdiffs, [])
2778
self.texts.add_mpdiffs, [])
2747
2780
def test_check_noerrors(self):
2748
2781
self.texts.check()
2757
2790
def test_get_sha1s(self):
2758
2791
self._lines[b"key"] = [b"dataline1", b"dataline2"]
2759
2792
self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2760
self.texts.get_sha1s([(b"key",)]))
2793
self.texts.get_sha1s([(b"key",)]))
2762
2795
def test_get_parent_map(self):
2763
2796
self._parent_map = {b"G": (b"A", b"B")}
2764
2797
self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2765
self.texts.get_parent_map([(b"G",), (b"L",)]))
2798
self.texts.get_parent_map([(b"G",), (b"L",)]))
2767
2800
def test_get_record_stream(self):
2768
2801
self._lines[b"A"] = [b"FOO", b"BAR"]
2809
2842
self.assertEqual([], vf.calls)
2811
2844
def test_get_record_stream_topological(self):
2812
vf = self.get_ordering_vf({(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2845
vf = self.get_ordering_vf(
2846
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2813
2847
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2814
2848
keys = [r.key for r in vf.get_record_stream(request_keys,
2815
'topological', False)]
2849
'topological', False)]
2816
2850
# We should have gotten the keys in topological order
2817
2851
self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
2818
2852
# And recorded that the request was made
2820
2854
False)], vf.calls)
2822
2856
def test_get_record_stream_ordered(self):
2823
vf = self.get_ordering_vf({(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2857
vf = self.get_ordering_vf(
2858
{(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2824
2859
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2825
2860
keys = [r.key for r in vf.get_record_stream(request_keys,
2826
'unordered', False)]
2861
'unordered', False)]
2827
2862
# They should be returned based on their priority
2828
2863
self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
2829
2864
# And the request recorded
2834
2869
vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2835
2870
request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2836
2871
keys = [r.key for r in vf.get_record_stream(request_keys,
2837
'unordered', False)]
2872
'unordered', False)]
2838
2873
# A and C are not in the map, so they get sorted to the front. A comes
2839
2874
# before C alphabetically, so it comes back first
2840
2875
self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)