234
238
pack_names = [node[1][0] for node in index.iter_all_entries()]
235
239
self.assertTrue(large_pack_name in pack_names)
241
def test_commit_write_group_returns_new_pack_names(self):
242
format = self.get_format()
243
tree = self.make_branch_and_tree('foo', format=format)
244
tree.commit('first post')
245
repo = tree.branch.repository
248
repo.start_write_group()
250
inv = inventory.Inventory(revision_id="A")
251
inv.root.revision = "A"
252
repo.texts.add_lines((inv.root.file_id, "A"), [], [])
253
rev = _mod_revision.Revision(timestamp=0, timezone=None,
254
committer="Foo Bar <foo@example.com>", message="Message",
257
repo.add_revision("A", rev, inv=inv)
259
repo.abort_write_group()
262
old_names = repo._pack_collection._names.keys()
263
result = repo.commit_write_group()
264
cur_names = repo._pack_collection._names.keys()
265
new_names = list(set(cur_names) - set(old_names))
266
self.assertEqual(new_names, result)
237
270
def test_fail_obsolete_deletion(self):
238
271
# failing to delete obsolete packs is not fatal
239
272
format = self.get_format()
556
589
missing_ghost.get_inventory, 'ghost')
558
591
def make_write_ready_repo(self):
559
repo = self.make_repository('.', format=self.get_format())
592
format = self.get_format()
593
if isinstance(format.repository_format, RepositoryFormatCHK1):
594
raise TestNotApplicable("No missing compression parents")
595
repo = self.make_repository('.', format=format)
560
596
repo.lock_write()
597
self.addCleanup(repo.unlock)
561
598
repo.start_write_group()
599
self.addCleanup(repo.abort_write_group)
564
602
def test_missing_inventories_compression_parent_prevents_commit(self):
565
603
repo = self.make_write_ready_repo()
567
if not getattr(repo.inventories._index, '_missing_compression_parents',
569
raise TestSkipped("No missing compression parents")
570
605
repo.inventories._index._missing_compression_parents.add(key)
571
606
self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
572
607
self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
573
repo.abort_write_group()
576
609
def test_missing_revisions_compression_parent_prevents_commit(self):
577
610
repo = self.make_write_ready_repo()
579
if not getattr(repo.inventories._index, '_missing_compression_parents',
581
raise TestSkipped("No missing compression parents")
582
612
repo.revisions._index._missing_compression_parents.add(key)
583
613
self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
584
614
self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
585
repo.abort_write_group()
588
616
def test_missing_signatures_compression_parent_prevents_commit(self):
589
617
repo = self.make_write_ready_repo()
591
if not getattr(repo.inventories._index, '_missing_compression_parents',
593
raise TestSkipped("No missing compression parents")
594
619
repo.signatures._index._missing_compression_parents.add(key)
595
620
self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
596
621
self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
597
repo.abort_write_group()
600
623
def test_missing_text_compression_parent_prevents_commit(self):
601
624
repo = self.make_write_ready_repo()
602
625
key = ('some', 'junk')
603
if not getattr(repo.inventories._index, '_missing_compression_parents',
605
raise TestSkipped("No missing compression parents")
606
626
repo.texts._index._missing_compression_parents.add(key)
607
627
self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
608
628
e = self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
609
repo.abort_write_group()
612
630
def test_supports_external_lookups(self):
613
631
repo = self.make_repository('.', format=self.get_format())
651
669
def test_suspend_write_group(self):
652
670
self.vfs_transport_factory = memory.MemoryServer
653
repo = self.make_repository('repo')
671
repo = self.make_repository('repo', format=self.get_format())
654
672
token = repo.lock_write()
655
673
self.addCleanup(repo.unlock)
656
674
repo.start_write_group()
657
675
repo.texts.add_lines(('file-id', 'revid'), (), ['lines'])
658
676
wg_tokens = repo.suspend_write_group()
659
677
expected_pack_name = wg_tokens[0] + '.pack'
678
expected_names = [wg_tokens[0] + ext for ext in
679
('.rix', '.iix', '.tix', '.six')]
680
if repo.chk_bytes is not None:
681
expected_names.append(wg_tokens[0] + '.cix')
682
expected_names.append(expected_pack_name)
660
683
upload_transport = repo._pack_collection._upload_transport
661
684
limbo_files = upload_transport.list_dir('')
662
self.assertTrue(expected_pack_name in limbo_files, limbo_files)
685
self.assertEqual(sorted(expected_names), sorted(limbo_files))
663
686
md5 = osutils.md5(upload_transport.get_bytes(expected_pack_name))
664
687
self.assertEqual(wg_tokens[0], md5.hexdigest())
689
def test_resume_chk_bytes(self):
690
self.vfs_transport_factory = memory.MemoryServer
691
repo = self.make_repository('repo', format=self.get_format())
692
if repo.chk_bytes is None:
693
raise TestNotApplicable('no chk_bytes for this repository')
694
token = repo.lock_write()
695
self.addCleanup(repo.unlock)
696
repo.start_write_group()
697
text = 'a bit of text\n'
698
key = ('sha1:' + osutils.sha_string(text),)
699
repo.chk_bytes.add_lines(key, (), [text])
700
wg_tokens = repo.suspend_write_group()
701
same_repo = repo.bzrdir.open_repository()
702
same_repo.lock_write()
703
self.addCleanup(same_repo.unlock)
704
same_repo.resume_write_group(wg_tokens)
705
self.assertEqual([key], list(same_repo.chk_bytes.keys()))
707
text, same_repo.chk_bytes.get_record_stream([key],
708
'unordered', True).next().get_bytes_as('fulltext'))
709
same_repo.abort_write_group()
710
self.assertEqual([], list(same_repo.chk_bytes.keys()))
666
712
def test_resume_write_group_then_abort(self):
667
713
# Create a repo, start a write group, insert some data, suspend.
668
714
self.vfs_transport_factory = memory.MemoryServer
669
repo = self.make_repository('repo')
715
repo = self.make_repository('repo', format=self.get_format())
670
716
token = repo.lock_write()
671
717
self.addCleanup(repo.unlock)
672
718
repo.start_write_group()
685
731
self.assertEqual(
686
732
[], same_repo._pack_collection._pack_transport.list_dir(''))
734
def test_commit_resumed_write_group(self):
735
self.vfs_transport_factory = memory.MemoryServer
736
repo = self.make_repository('repo', format=self.get_format())
737
token = repo.lock_write()
738
self.addCleanup(repo.unlock)
739
repo.start_write_group()
740
text_key = ('file-id', 'revid')
741
repo.texts.add_lines(text_key, (), ['lines'])
742
wg_tokens = repo.suspend_write_group()
743
# Get a fresh repository object for the repo on the filesystem.
744
same_repo = repo.bzrdir.open_repository()
746
same_repo.lock_write()
747
self.addCleanup(same_repo.unlock)
748
same_repo.resume_write_group(wg_tokens)
749
same_repo.commit_write_group()
750
expected_pack_name = wg_tokens[0] + '.pack'
751
expected_names = [wg_tokens[0] + ext for ext in
752
('.rix', '.iix', '.tix', '.six')]
753
if repo.chk_bytes is not None:
754
expected_names.append(wg_tokens[0] + '.cix')
756
[], same_repo._pack_collection._upload_transport.list_dir(''))
757
index_names = repo._pack_collection._index_transport.list_dir('')
758
self.assertEqual(sorted(expected_names), sorted(index_names))
759
pack_names = repo._pack_collection._pack_transport.list_dir('')
760
self.assertEqual([expected_pack_name], pack_names)
688
762
def test_resume_malformed_token(self):
689
763
self.vfs_transport_factory = memory.MemoryServer
690
764
# Make a repository with a suspended write group
691
repo = self.make_repository('repo')
765
repo = self.make_repository('repo', format=self.get_format())
692
766
token = repo.lock_write()
693
767
self.addCleanup(repo.unlock)
694
768
repo.start_write_group()
732
806
# can only stack on repositories that have compatible internal
734
808
if getattr(repo._format, 'supports_tree_reference', False):
809
matching_format_name = 'pack-0.92-subtree'
735
811
if repo._format.supports_chks:
736
812
matching_format_name = 'development6-rich-root'
738
matching_format_name = 'pack-0.92-subtree'
740
matching_format_name = 'rich-root-pack'
814
matching_format_name = 'rich-root-pack'
741
815
mismatching_format_name = 'pack-0.92'
743
817
# We don't have a non-rich-root CHK format.
763
837
if getattr(repo._format, 'supports_tree_reference', False):
764
838
# can only stack on repositories that have compatible internal
766
if repo._format.supports_chks:
767
# No CHK subtree formats in bzr.dev, so this doesn't execute.
768
matching_format_name = 'development6-subtree'
770
matching_format_name = 'pack-0.92-subtree'
840
matching_format_name = 'pack-0.92-subtree'
771
841
mismatching_format_name = 'rich-root-pack'
773
843
if repo.supports_rich_root():
774
matching_format_name = 'rich-root-pack'
844
if repo._format.supports_chks:
845
matching_format_name = 'development6-rich-root'
847
matching_format_name = 'rich-root-pack'
775
848
mismatching_format_name = 'pack-0.92-subtree'
777
850
raise TestNotApplicable('No formats use non-v5 serializer'
844
917
self.assertTrue(large_pack_name in pack_names)
920
class TestKeyDependencies(TestCaseWithTransport):
922
def get_format(self):
923
return bzrdir.format_registry.make_bzrdir(self.format_name)
925
def create_source_and_target(self):
926
builder = self.make_branch_builder('source', format=self.get_format())
927
builder.start_series()
928
builder.build_snapshot('A-id', None, [
929
('add', ('', 'root-id', 'directory', None))])
930
builder.build_snapshot('B-id', ['A-id', 'ghost-id'], [])
931
builder.finish_series()
932
repo = self.make_repository('target')
933
b = builder.get_branch()
935
self.addCleanup(b.unlock)
937
self.addCleanup(repo.unlock)
938
return b.repository, repo
940
def test_key_dependencies_cleared_on_abort(self):
941
source_repo, target_repo = self.create_source_and_target()
942
target_repo.start_write_group()
944
stream = source_repo.revisions.get_record_stream([('B-id',)],
946
target_repo.revisions.insert_record_stream(stream)
947
key_refs = target_repo.revisions._index._key_dependencies
948
self.assertEqual([('B-id',)], sorted(key_refs.get_referrers()))
950
target_repo.abort_write_group()
951
self.assertEqual([], sorted(key_refs.get_referrers()))
953
def test_key_dependencies_cleared_on_suspend(self):
954
source_repo, target_repo = self.create_source_and_target()
955
target_repo.start_write_group()
957
stream = source_repo.revisions.get_record_stream([('B-id',)],
959
target_repo.revisions.insert_record_stream(stream)
960
key_refs = target_repo.revisions._index._key_dependencies
961
self.assertEqual([('B-id',)], sorted(key_refs.get_referrers()))
963
target_repo.suspend_write_group()
964
self.assertEqual([], sorted(key_refs.get_referrers()))
966
def test_key_dependencies_cleared_on_commit(self):
967
source_repo, target_repo = self.create_source_and_target()
968
target_repo.start_write_group()
970
stream = source_repo.revisions.get_record_stream([('B-id',)],
972
target_repo.revisions.insert_record_stream(stream)
973
key_refs = target_repo.revisions._index._key_dependencies
974
self.assertEqual([('B-id',)], sorted(key_refs.get_referrers()))
976
target_repo.commit_write_group()
977
self.assertEqual([], sorted(key_refs.get_referrers()))
847
980
class TestSmartServerAutopack(TestCaseWithTransport):
931
1064
dict(format_name='development6-rich-root',
932
1065
format_string='Bazaar development format - group compression '
933
1066
'and chk inventory (needs bzr.dev from 1.14)\n',
934
format_supports_external_lookups=False,
1067
format_supports_external_lookups=True,
935
1068
index_class=BTreeGraphIndex),
937
1070
# name of the scenario is the format name