242
247
# empty revision-store directory
243
248
# empty weaves directory
244
249
t = control.get_repository_transport(None)
245
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
246
t.get('format').read())
250
with t.get('format') as f:
251
self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
247
253
# XXX: no locks left when unlocked at the moment
248
254
# self.assertEqualDiff('', t.get('lock').read())
249
255
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
250
256
self.check_knits(t)
251
257
# Check per-file knits.
252
branch = control.create_branch()
258
control.create_branch()
253
259
tree = control.create_workingtree()
254
tree.add(['foo'], ['Nasty-IdC:'], ['file'])
255
tree.put_file_bytes_non_atomic('foo', '')
256
tree.commit('1st post', rev_id='foo')
260
tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
261
tree.put_file_bytes_non_atomic('foo', b'')
262
tree.commit('1st post', rev_id=b'foo')
257
263
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
258
'\nfoo fulltext 0 81 :')
264
b'\nfoo fulltext 0 81 :')
260
def assertHasKnit(self, t, knit_name, extra_content=''):
266
def assertHasKnit(self, t, knit_name, extra_content=b''):
261
267
"""Assert that knit_name exists on t."""
262
self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
268
self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
263
269
t.get(knit_name + '.kndx').read())
265
271
def check_knits(self, t):
317
326
the whole inventory. So we grab the one from the expected text. Which
318
327
is valid when the api is not being abused.
320
repo = self.make_repository('.',
321
format=controldir.format_registry.get('knit')())
322
inv_xml = '<inventory format="5">\n</inventory>\n'
323
inv = repo._deserialise_inventory('test-rev-id', inv_xml)
324
self.assertEqual('test-rev-id', inv.root.revision)
329
repo = self.make_repository(
330
'.', format=controldir.format_registry.get('knit')())
331
inv_xml = b'<inventory format="5">\n</inventory>\n'
332
inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
333
self.assertEqual(b'test-rev-id', inv.root.revision)
326
335
def test_deserialise_uses_global_revision_id(self):
327
336
"""If it is set, then we re-use the global revision id"""
328
repo = self.make_repository('.',
329
format=controldir.format_registry.get('knit')())
330
inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
337
repo = self.make_repository(
338
'.', format=controldir.format_registry.get('knit')())
339
inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
332
341
# Arguably, the deserialise_inventory should detect a mismatch, and
333
342
# raise an error, rather than silently using one revision_id over the
335
344
self.assertRaises(AssertionError, repo._deserialise_inventory,
336
'test-rev-id', inv_xml)
337
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
338
self.assertEqual('other-rev-id', inv.root.revision)
345
b'test-rev-id', [inv_xml])
346
inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
347
self.assertEqual(b'other-rev-id', inv.root.revision)
340
349
def test_supports_external_lookups(self):
341
repo = self.make_repository('.',
342
format=controldir.format_registry.get('knit')())
350
repo = self.make_repository(
351
'.', format=controldir.format_registry.get('knit')())
343
352
self.assertFalse(repo._format.supports_external_lookups)
503
513
format = bzrdir.BzrDirMetaFormat1()
504
514
format.repository_format = knitrepo.RepositoryFormatKnit1()
505
515
tree = self.make_branch_and_tree('.', format)
506
tree.commit("Dull commit", rev_id="dull")
507
revision_tree = tree.branch.repository.revision_tree('dull')
508
revision_tree.lock_read()
510
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
511
u'', revision_tree.get_root_id())
513
revision_tree.unlock()
516
tree.commit("Dull commit", rev_id=b"dull")
517
revision_tree = tree.branch.repository.revision_tree(b'dull')
518
with revision_tree.lock_read():
520
errors.NoSuchFile, revision_tree.get_file_lines, u'')
514
521
format = bzrdir.BzrDirMetaFormat1()
515
522
format.repository_format = knitrepo.RepositoryFormatKnit3()
516
523
upgrade.Convert('.', format)
517
524
tree = workingtree.WorkingTree.open('.')
518
revision_tree = tree.branch.repository.revision_tree('dull')
519
revision_tree.lock_read()
521
revision_tree.get_file_lines(u'', revision_tree.get_root_id())
523
revision_tree.unlock()
524
tree.commit("Another dull commit", rev_id='dull2')
525
revision_tree = tree.branch.repository.revision_tree('dull2')
525
revision_tree = tree.branch.repository.revision_tree(b'dull')
526
with revision_tree.lock_read():
527
revision_tree.get_file_lines(u'')
528
tree.commit("Another dull commit", rev_id=b'dull2')
529
revision_tree = tree.branch.repository.revision_tree(b'dull2')
526
530
revision_tree.lock_read()
527
531
self.addCleanup(revision_tree.unlock)
528
self.assertEqual('dull',
529
revision_tree.get_file_revision(u'', revision_tree.get_root_id()))
532
self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
531
534
def test_supports_external_lookups(self):
532
535
format = bzrdir.BzrDirMetaFormat1()
556
559
builder = self.make_branch_builder('source', format='2a')
557
560
builder.start_series()
558
561
builder.build_snapshot(None, [
559
('add', ('', 'root-id', 'directory', '')),
560
('add', ('file', 'file-id', 'file', 'content\n'))],
562
builder.build_snapshot(['1'], [
563
('modify', ('file-id', 'content-2\n'))],
565
builder.finish_series()
566
source = builder.get_branch()
567
target = self.make_repository('target', format='2a')
568
target.fetch(source.repository)
570
self.addCleanup(target.unlock)
571
details = target.texts._index.get_build_details(
572
[('file-id', '1',), ('file-id', '2',)])
573
file_1_details = details[('file-id', '1')]
574
file_2_details = details[('file-id', '2')]
575
# The index, and what to read off disk, should be the same for both
576
# versions of the file.
577
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
579
def test_fetch_combines_groups(self):
580
builder = self.make_branch_builder('source', format='2a')
581
builder.start_series()
582
builder.build_snapshot(None, [
583
('add', ('', 'root-id', 'directory', '')),
584
('add', ('file', 'file-id', 'file', 'content\n'))],
586
builder.build_snapshot(['1'], [
587
('modify', ('file-id', 'content-2\n'))],
589
builder.finish_series()
590
source = builder.get_branch()
591
target = self.make_repository('target', format='2a')
592
target.fetch(source.repository)
594
self.addCleanup(target.unlock)
595
details = target.texts._index.get_build_details(
596
[('file-id', '1',), ('file-id', '2',)])
597
file_1_details = details[('file-id', '1')]
598
file_2_details = details[('file-id', '2')]
599
# The index, and what to read off disk, should be the same for both
600
# versions of the file.
601
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
603
def test_fetch_combines_groups(self):
604
builder = self.make_branch_builder('source', format='2a')
605
builder.start_series()
606
builder.build_snapshot(None, [
607
('add', ('', 'root-id', 'directory', '')),
608
('add', ('file', 'file-id', 'file', 'content\n'))],
610
builder.build_snapshot(['1'], [
611
('modify', ('file-id', 'content-2\n'))],
613
builder.finish_series()
614
source = builder.get_branch()
615
target = self.make_repository('target', format='2a')
616
target.fetch(source.repository)
618
self.addCleanup(target.unlock)
619
details = target.texts._index.get_build_details(
620
[('file-id', '1',), ('file-id', '2',)])
621
file_1_details = details[('file-id', '1')]
622
file_2_details = details[('file-id', '2')]
562
('add', ('', b'root-id', 'directory', '')),
563
('add', ('file', b'file-id', 'file', b'content\n'))],
565
builder.build_snapshot([b'1'], [
566
('modify', ('file', b'content-2\n'))],
568
builder.finish_series()
569
source = builder.get_branch()
570
target = self.make_repository('target', format='2a')
571
target.fetch(source.repository)
573
self.addCleanup(target.unlock)
574
details = target.texts._index.get_build_details(
575
[(b'file-id', b'1',), (b'file-id', b'2',)])
576
file_1_details = details[(b'file-id', b'1')]
577
file_2_details = details[(b'file-id', b'2')]
578
# The index, and what to read off disk, should be the same for both
579
# versions of the file.
580
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
582
def test_fetch_combines_groups(self):
583
builder = self.make_branch_builder('source', format='2a')
584
builder.start_series()
585
builder.build_snapshot(None, [
586
('add', ('', b'root-id', 'directory', '')),
587
('add', ('file', b'file-id', 'file', b'content\n'))],
589
builder.build_snapshot([b'1'], [
590
('modify', ('file', b'content-2\n'))],
592
builder.finish_series()
593
source = builder.get_branch()
594
target = self.make_repository('target', format='2a')
595
target.fetch(source.repository)
597
self.addCleanup(target.unlock)
598
details = target.texts._index.get_build_details(
599
[(b'file-id', b'1',), (b'file-id', b'2',)])
600
file_1_details = details[(b'file-id', b'1')]
601
file_2_details = details[(b'file-id', b'2')]
602
# The index, and what to read off disk, should be the same for both
603
# versions of the file.
604
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
606
def test_fetch_combines_groups(self):
607
builder = self.make_branch_builder('source', format='2a')
608
builder.start_series()
609
builder.build_snapshot(None, [
610
('add', ('', b'root-id', 'directory', '')),
611
('add', ('file', b'file-id', 'file', b'content\n'))],
613
builder.build_snapshot([b'1'], [
614
('modify', ('file', b'content-2\n'))],
616
builder.finish_series()
617
source = builder.get_branch()
618
target = self.make_repository('target', format='2a')
619
target.fetch(source.repository)
621
self.addCleanup(target.unlock)
622
details = target.texts._index.get_build_details(
623
[(b'file-id', b'1',), (b'file-id', b'2',)])
624
file_1_details = details[(b'file-id', b'1')]
625
file_2_details = details[(b'file-id', b'2')]
623
626
# The index, and what to read off disk, should be the same for both
624
627
# versions of the file.
625
628
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
694
697
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
695
698
source_builder = self.make_branch_builder('source',
697
700
# We have to build a fairly large tree, so that we are sure the chk
698
701
# pages will have split into multiple pages.
699
entries = [('add', ('', 'a-root-id', 'directory', None))]
702
entries = [('add', ('', b'a-root-id', 'directory', None))]
700
703
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
701
704
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
704
content = 'content for %s\n' % (fname,)
706
fid = fname.encode('utf-8') + b'-id'
707
content = b'content for %s\n' % (fname.encode('utf-8'),)
705
708
entries.append(('add', (fname, fid, 'file', content)))
706
709
source_builder.start_series()
707
source_builder.build_snapshot(None, entries, revision_id='rev-1')
710
source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
708
711
# Now change a few of them, so we get a few new pages for the second
710
source_builder.build_snapshot(['rev-1'], [
711
('modify', ('aa-id', 'new content for aa-id\n')),
712
('modify', ('cc-id', 'new content for cc-id\n')),
713
('modify', ('zz-id', 'new content for zz-id\n')),
714
], revision_id='rev-2')
713
source_builder.build_snapshot([b'rev-1'], [
714
('modify', ('aa', b'new content for aa-id\n')),
715
('modify', ('cc', b'new content for cc-id\n')),
716
('modify', ('zz', b'new content for zz-id\n')),
717
], revision_id=b'rev-2')
715
718
source_builder.finish_series()
716
719
source_branch = source_builder.get_branch()
717
720
source_branch.lock_read()
723
726
# On a regular pass, getting the inventories and chk pages for rev-2
724
727
# would only get the newly created chk pages
725
search = vf_search.SearchResult({'rev-2'}, {'rev-1'}, 1,
727
simple_chk_records = []
728
search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
730
simple_chk_records = set()
728
731
for vf_name, substream in source.get_stream(search):
729
732
if vf_name == 'chk_bytes':
730
733
for record in substream:
731
simple_chk_records.append(record.key)
734
simple_chk_records.add(record.key)
733
736
for _ in substream:
735
738
# 3 pages, the root (InternalNode), + 2 pages which actually changed
736
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
737
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
738
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
739
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
739
self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
740
(b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
741
(b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
742
(b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
743
set(simple_chk_records))
741
744
# Now, when we do a similar call using 'get_stream_for_missing_keys'
742
745
# we should get a much larger set of pages.
743
missing = [('inventories', 'rev-2')]
744
full_chk_records = []
746
missing = [('inventories', b'rev-2')]
747
full_chk_records = set()
745
748
for vf_name, substream in source.get_stream_for_missing_keys(missing):
746
749
if vf_name == 'inventories':
747
750
for record in substream:
748
self.assertEqual(('rev-2',), record.key)
751
self.assertEqual((b'rev-2',), record.key)
749
752
elif vf_name == 'chk_bytes':
750
753
for record in substream:
751
full_chk_records.append(record.key)
754
full_chk_records.add(record.key)
753
756
self.fail('Should not be getting a stream of %s' % (vf_name,))
754
757
# We have 257 records now. This is because we have 1 root page, and 256
771
774
source = self.make_repository('source', format='pack-0.92')
772
775
target = self.make_repository('target', format='pack-0.92')
773
776
stream_source = source._get_source(target._format)
774
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
777
self.assertIsInstance(
778
stream_source, knitpack_repo.KnitPackStreamSource)
776
780
def test_source_to_exact_pack_rich_root_pack(self):
777
781
source = self.make_repository('source', format='rich-root-pack')
778
782
target = self.make_repository('target', format='rich-root-pack')
779
783
stream_source = source._get_source(target._format)
780
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
784
self.assertIsInstance(
785
stream_source, knitpack_repo.KnitPackStreamSource)
782
787
def test_source_to_exact_pack_19(self):
783
788
source = self.make_repository('source', format='1.9')
784
789
target = self.make_repository('target', format='1.9')
785
790
stream_source = source._get_source(target._format)
786
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
791
self.assertIsInstance(
792
stream_source, knitpack_repo.KnitPackStreamSource)
788
794
def test_source_to_exact_pack_19_rich_root(self):
789
795
source = self.make_repository('source', format='1.9-rich-root')
790
796
target = self.make_repository('target', format='1.9-rich-root')
791
797
stream_source = source._get_source(target._format)
792
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
798
self.assertIsInstance(
799
stream_source, knitpack_repo.KnitPackStreamSource)
794
801
def test_source_to_remote_exact_pack_19(self):
795
802
trans = self.make_smart_server('target')
835
843
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
836
844
self.builder = self.make_branch_builder('source')
837
845
self.builder.start_series()
838
self.builder.build_snapshot(None,
839
[('add', ('', 'tree-root', 'directory', None))],
840
revision_id='initial')
846
self.builder.build_snapshot(
848
[('add', ('', b'tree-root', 'directory', None))],
849
revision_id=b'initial')
841
850
self.repo = self.builder.get_branch().repository
842
851
self.addCleanup(self.builder.finish_series)
844
853
def assertParentIds(self, expected_result, rev_set):
845
self.assertEqual(sorted(expected_result),
855
sorted(expected_result),
846
856
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
848
858
def test_simple(self):
849
self.builder.build_snapshot(None, [], revision_id='revid1')
850
self.builder.build_snapshot(['revid1'], [], revision_id='revid2')
852
self.assertParentIds(['revid1'], rev_set)
859
self.builder.build_snapshot(None, [], revision_id=b'revid1')
860
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
861
rev_set = [b'revid2']
862
self.assertParentIds([b'revid1'], rev_set)
854
864
def test_not_first_parent(self):
855
self.builder.build_snapshot(None, [], revision_id='revid1')
856
self.builder.build_snapshot(['revid1'], [], revision_id='revid2')
857
self.builder.build_snapshot(['revid2'], [], revision_id='revid3')
858
rev_set = ['revid3', 'revid2']
859
self.assertParentIds(['revid1'], rev_set)
865
self.builder.build_snapshot(None, [], revision_id=b'revid1')
866
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
867
self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
868
rev_set = [b'revid3', b'revid2']
869
self.assertParentIds([b'revid1'], rev_set)
861
871
def test_not_null(self):
862
rev_set = ['initial']
872
rev_set = [b'initial']
863
873
self.assertParentIds([], rev_set)
865
875
def test_not_null_set(self):
866
self.builder.build_snapshot(None, [], revision_id='revid1')
876
self.builder.build_snapshot(None, [], revision_id=b'revid1')
867
877
rev_set = [_mod_revision.NULL_REVISION]
868
878
self.assertParentIds([], rev_set)
870
880
def test_ghost(self):
871
self.builder.build_snapshot(None, [], revision_id='revid1')
872
rev_set = ['ghost', 'revid1']
873
self.assertParentIds(['initial'], rev_set)
881
self.builder.build_snapshot(None, [], revision_id=b'revid1')
882
rev_set = [b'ghost', b'revid1']
883
self.assertParentIds([b'initial'], rev_set)
875
885
def test_ghost_parent(self):
876
self.builder.build_snapshot(None, [], revision_id='revid1')
877
self.builder.build_snapshot(['revid1', 'ghost'], [], revision_id='revid2')
878
rev_set = ['revid2', 'revid1']
879
self.assertParentIds(['ghost', 'initial'], rev_set)
886
self.builder.build_snapshot(None, [], revision_id=b'revid1')
887
self.builder.build_snapshot(
888
[b'revid1', b'ghost'], [], revision_id=b'revid2')
889
rev_set = [b'revid2', b'revid1']
890
self.assertParentIds([b'ghost', b'initial'], rev_set)
881
892
def test_righthand_parent(self):
882
self.builder.build_snapshot(None, [], revision_id='revid1')
883
self.builder.build_snapshot(['revid1'], [], revision_id='revid2a')
884
self.builder.build_snapshot(['revid1'], [], revision_id='revid2b')
885
self.builder.build_snapshot(['revid2a', 'revid2b'], [],
886
revision_id='revid3')
887
rev_set = ['revid3', 'revid2a']
888
self.assertParentIds(['revid1', 'revid2b'], rev_set)
893
self.builder.build_snapshot(None, [], revision_id=b'revid1')
894
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
895
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
896
self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
897
revision_id=b'revid3')
898
rev_set = [b'revid3', b'revid2a']
899
self.assertParentIds([b'revid1', b'revid2b'], rev_set)
891
902
class TestWithBrokenRepo(TestCaseWithTransport):
903
914
repo.start_write_group()
904
915
cleanups.append(repo.commit_write_group)
905
916
# make rev1a: A well-formed revision, containing 'file1'
906
inv = inventory.Inventory(revision_id='rev1a')
907
inv.root.revision = 'rev1a'
908
self.add_file(repo, inv, 'file1', 'rev1a', [])
909
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
910
repo.add_inventory('rev1a', inv, [])
911
revision = _mod_revision.Revision('rev1a',
917
inv = inventory.Inventory(revision_id=b'rev1a')
918
inv.root.revision = b'rev1a'
919
self.add_file(repo, inv, 'file1', b'rev1a', [])
920
repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
921
repo.add_inventory(b'rev1a', inv, [])
922
revision = _mod_revision.Revision(
912
924
committer='jrandom@example.com', timestamp=0,
913
925
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
914
repo.add_revision('rev1a', revision, inv)
926
repo.add_revision(b'rev1a', revision, inv)
916
928
# make rev1b, which has no Revision, but has an Inventory, and
918
inv = inventory.Inventory(revision_id='rev1b')
919
inv.root.revision = 'rev1b'
920
self.add_file(repo, inv, 'file1', 'rev1b', [])
921
repo.add_inventory('rev1b', inv, [])
930
inv = inventory.Inventory(revision_id=b'rev1b')
931
inv.root.revision = b'rev1b'
932
self.add_file(repo, inv, 'file1', b'rev1b', [])
933
repo.add_inventory(b'rev1b', inv, [])
923
935
# make rev2, with file1 and file2
925
937
# file1 has 'rev1b' as an ancestor, even though this is not
926
938
# mentioned by 'rev1a', making it an unreferenced ancestor
927
939
inv = inventory.Inventory()
928
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
929
self.add_file(repo, inv, 'file2', 'rev2', [])
930
self.add_revision(repo, 'rev2', inv, ['rev1a'])
940
self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
941
self.add_file(repo, inv, 'file2', b'rev2', [])
942
self.add_revision(repo, b'rev2', inv, [b'rev1a'])
932
944
# make ghost revision rev1c
933
945
inv = inventory.Inventory()
934
self.add_file(repo, inv, 'file2', 'rev1c', [])
946
self.add_file(repo, inv, 'file2', b'rev1c', [])
936
948
# make rev3 with file2
937
949
# file2 refers to 'rev1c', which is a ghost in this repository, so
938
950
# file2 cannot have rev1c as its ancestor.
939
951
inv = inventory.Inventory()
940
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
941
self.add_revision(repo, 'rev3', inv, ['rev1c'])
952
self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
953
self.add_revision(repo, b'rev3', inv, [b'rev1c'])
944
956
for cleanup in reversed(cleanups):
949
961
inv.root.revision = revision_id
950
962
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
951
963
repo.add_inventory(revision_id, inv, parent_ids)
952
revision = _mod_revision.Revision(revision_id,
964
revision = _mod_revision.Revision(
953
966
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
954
967
timezone=0, message='foo', parent_ids=parent_ids)
955
968
repo.add_revision(revision_id, revision, inv)
957
970
def add_file(self, repo, inv, filename, revision, parents):
958
file_id = filename + '-id'
959
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
971
file_id = filename.encode('utf-8') + b'-id'
972
content = [b'line\n']
973
entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
960
974
entry.revision = revision
975
entry.text_sha1 = osutils.sha_strings(content)
961
976
entry.text_size = 0
963
978
text_key = (file_id, revision)
964
979
parent_keys = [(file_id, parent) for parent in parents]
965
repo.texts.add_lines(text_key, parent_keys, ['line\n'])
980
repo.texts.add_lines(text_key, parent_keys, content)
967
982
def test_insert_from_broken_repo(self):
968
983
"""Inserting a data stream from a broken repository won't silently
1117
1132
def test_pack_distribution_one_to_nine(self):
1118
1133
packs = self.get_packs()
1119
1134
self.assertEqual([1],
1120
packs.pack_distribution(1))
1135
packs.pack_distribution(1))
1121
1136
self.assertEqual([1, 1],
1122
packs.pack_distribution(2))
1137
packs.pack_distribution(2))
1123
1138
self.assertEqual([1, 1, 1],
1124
packs.pack_distribution(3))
1139
packs.pack_distribution(3))
1125
1140
self.assertEqual([1, 1, 1, 1],
1126
packs.pack_distribution(4))
1141
packs.pack_distribution(4))
1127
1142
self.assertEqual([1, 1, 1, 1, 1],
1128
packs.pack_distribution(5))
1143
packs.pack_distribution(5))
1129
1144
self.assertEqual([1, 1, 1, 1, 1, 1],
1130
packs.pack_distribution(6))
1145
packs.pack_distribution(6))
1131
1146
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1132
packs.pack_distribution(7))
1147
packs.pack_distribution(7))
1133
1148
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1134
packs.pack_distribution(8))
1149
packs.pack_distribution(8))
1135
1150
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1136
packs.pack_distribution(9))
1151
packs.pack_distribution(9))
1138
1153
def test_pack_distribution_stable_at_boundaries(self):
1139
1154
"""When there are multi-rev packs the counts are stable."""
1299
1316
packs._remove_pack_from_memory(removed_pack)
1300
1317
names = packs.names()
1301
1318
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1302
new_names = {x[0][0] for x in new_nodes}
1303
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1319
new_names = {x[0] for x in new_nodes}
1320
self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1304
1321
self.assertEqual(set(names) - set(orig_names), new_names)
1305
1322
self.assertEqual({new_pack.name}, new_names)
1306
1323
self.assertEqual([to_remove_name],
1307
sorted([x[0][0] for x in deleted_nodes]))
1324
sorted([x[0] for x in deleted_nodes]))
1308
1325
packs.reload_pack_names()
1309
1326
reloaded_names = packs.names()
1310
1327
self.assertEqual(orig_at_load, packs._packs_at_load)
1311
1328
self.assertEqual(names, reloaded_names)
1312
1329
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1313
new_names = {x[0][0] for x in new_nodes}
1314
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1330
new_names = {x[0] for x in new_nodes}
1331
self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1315
1332
self.assertEqual(set(names) - set(orig_names), new_names)
1316
1333
self.assertEqual({new_pack.name}, new_names)
1317
1334
self.assertEqual([to_remove_name],
1318
sorted([x[0][0] for x in deleted_nodes]))
1335
sorted([x[0] for x in deleted_nodes]))
1320
1337
def test_autopack_obsoletes_new_pack(self):
1321
1338
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1476
1494
builder = self.make_branch_builder('.', format="1.9")
1477
1495
builder.start_series()
1478
1496
builder.build_snapshot(None, [
1479
('add', ('', 'root-id', 'directory', None)),
1480
('add', ('f', 'f-id', 'file', 'content\n'))],
1482
builder.build_snapshot(['A'],
1483
[('modify', ('f-id', 'new-content\n'))],
1485
builder.build_snapshot(['B'],
1486
[('modify', ('f-id', 'third-content\n'))],
1488
builder.build_snapshot(['C'],
1489
[('modify', ('f-id', 'fourth-content\n'))],
1497
('add', ('', b'root-id', 'directory', None)),
1498
('add', ('f', b'f-id', 'file', b'content\n'))],
1500
builder.build_snapshot([b'A'],
1501
[('modify', ('f', b'new-content\n'))],
1503
builder.build_snapshot([b'B'],
1504
[('modify', ('f', b'third-content\n'))],
1506
builder.build_snapshot([b'C'],
1507
[('modify', ('f', b'fourth-content\n'))],
1491
1509
b = builder.get_branch()
1493
1511
builder.finish_series()
1533
1551
builder = self.make_branch_builder('source')
1534
1552
builder.start_series()
1535
1553
builder.build_snapshot(None, [
1536
('add', ('', 'root-id', 'directory', None)),
1537
('add', ('file', 'file-id', 'file', 'content\n')),
1539
builder.build_snapshot(['A'], [
1540
('add', ('dir', 'dir-id', 'directory', None))],
1542
builder.build_snapshot(['B'], [
1543
('modify', ('file-id', 'new content\n'))],
1554
('add', ('', b'root-id', 'directory', None)),
1555
('add', ('file', b'file-id', 'file', b'content\n')),
1556
], revision_id=b'A')
1557
builder.build_snapshot([b'A'], [
1558
('add', ('dir', b'dir-id', 'directory', None))],
1560
builder.build_snapshot([b'B'], [
1561
('modify', ('file', b'new content\n'))],
1545
1563
builder.finish_series()
1546
1564
return builder.get_branch()
1569
1589
self.addCleanup(repo_not_stacked.unlock)
1570
1590
# Now we should have a pack file with A's inventory, but not its
1572
self.assertEqual([('A',), ('B',)],
1592
self.assertEqual([(b'A',), (b'B',)],
1573
1593
sorted(repo_not_stacked.inventories.keys()))
1574
self.assertEqual([('B',)],
1594
self.assertEqual([(b'B',)],
1575
1595
sorted(repo_not_stacked.revisions.keys()))
1576
1596
stacked_pack_names = repo_not_stacked._pack_collection.names()
1577
1597
# We have a couple names here, figure out which has A's inventory
1578
1598
for name in stacked_pack_names:
1579
1599
pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1580
1600
keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1582
1602
inv_a_pack_name = name
1585
1605
self.fail('Could not find pack containing A\'s inventory')
1586
repo_not_stacked.fetch(b_source.repository, 'A')
1587
self.assertEqual([('A',), ('B',)],
1606
repo_not_stacked.fetch(b_source.repository, b'A')
1607
self.assertEqual([(b'A',), (b'B',)],
1588
1608
sorted(repo_not_stacked.revisions.keys()))
1589
1609
new_pack_names = set(repo_not_stacked._pack_collection.names())
1590
1610
rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1591
1611
self.assertEqual(1, len(rev_a_pack_names))
1592
1612
rev_a_pack_name = list(rev_a_pack_names)[0]
1593
1613
# Now fetch 'C', so we have a couple pack files to join
1594
repo_not_stacked.fetch(b_source.repository, 'C')
1614
repo_not_stacked.fetch(b_source.repository, b'C')
1595
1615
rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1596
1616
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1597
1617
self.assertEqual(1, len(rev_c_pack_names))