/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_pack_repository.py

  • Committer: John Arbash Meinel
  • Date: 2009-07-08 14:37:25 UTC
  • mfrom: (4516 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4517.
  • Revision ID: john@arbash-meinel.com-20090708143725-sc9sjy3mz4cxwxzz
Merge bzr.dev 4516

Show diffs side-by-side

added added

removed removed

Lines of Context:
38
38
    upgrade,
39
39
    workingtree,
40
40
    )
 
41
from bzrlib.repofmt import (
 
42
    pack_repo,
 
43
    groupcompress_repo,
 
44
    )
41
45
from bzrlib.repofmt.groupcompress_repo import RepositoryFormatCHK1
42
46
from bzrlib.smart import (
43
47
    client,
234
238
        pack_names = [node[1][0] for node in index.iter_all_entries()]
235
239
        self.assertTrue(large_pack_name in pack_names)
236
240
 
 
241
    def test_commit_write_group_returns_new_pack_names(self):
 
242
        format = self.get_format()
 
243
        tree = self.make_branch_and_tree('foo', format=format)
 
244
        tree.commit('first post')
 
245
        repo = tree.branch.repository
 
246
        repo.lock_write()
 
247
        try:
 
248
            repo.start_write_group()
 
249
            try:
 
250
                inv = inventory.Inventory(revision_id="A")
 
251
                inv.root.revision = "A"
 
252
                repo.texts.add_lines((inv.root.file_id, "A"), [], [])
 
253
                rev = _mod_revision.Revision(timestamp=0, timezone=None,
 
254
                    committer="Foo Bar <foo@example.com>", message="Message",
 
255
                    revision_id="A")
 
256
                rev.parent_ids = ()
 
257
                repo.add_revision("A", rev, inv=inv)
 
258
            except:
 
259
                repo.abort_write_group()
 
260
                raise
 
261
            else:
 
262
                old_names = repo._pack_collection._names.keys()
 
263
                result = repo.commit_write_group()
 
264
                cur_names = repo._pack_collection._names.keys()
 
265
                new_names = list(set(cur_names) - set(old_names))
 
266
                self.assertEqual(new_names, result)
 
267
        finally:
 
268
            repo.unlock()
 
269
 
237
270
    def test_fail_obsolete_deletion(self):
238
271
        # failing to delete obsolete packs is not fatal
239
272
        format = self.get_format()
556
589
            missing_ghost.get_inventory, 'ghost')
557
590
 
558
591
    def make_write_ready_repo(self):
559
 
        repo = self.make_repository('.', format=self.get_format())
 
592
        format = self.get_format()
 
593
        if isinstance(format.repository_format, RepositoryFormatCHK1):
 
594
            raise TestNotApplicable("No missing compression parents")
 
595
        repo = self.make_repository('.', format=format)
560
596
        repo.lock_write()
 
597
        self.addCleanup(repo.unlock)
561
598
        repo.start_write_group()
 
599
        self.addCleanup(repo.abort_write_group)
562
600
        return repo
563
601
 
564
602
    def test_missing_inventories_compression_parent_prevents_commit(self):
565
603
        repo = self.make_write_ready_repo()
566
604
        key = ('junk',)
567
 
        if not getattr(repo.inventories._index, '_missing_compression_parents',
568
 
            None):
569
 
            raise TestSkipped("No missing compression parents")
570
605
        repo.inventories._index._missing_compression_parents.add(key)
571
606
        self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
572
607
        self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
573
 
        repo.abort_write_group()
574
 
        repo.unlock()
575
608
 
576
609
    def test_missing_revisions_compression_parent_prevents_commit(self):
577
610
        repo = self.make_write_ready_repo()
578
611
        key = ('junk',)
579
 
        if not getattr(repo.inventories._index, '_missing_compression_parents',
580
 
            None):
581
 
            raise TestSkipped("No missing compression parents")
582
612
        repo.revisions._index._missing_compression_parents.add(key)
583
613
        self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
584
614
        self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
585
 
        repo.abort_write_group()
586
 
        repo.unlock()
587
615
 
588
616
    def test_missing_signatures_compression_parent_prevents_commit(self):
589
617
        repo = self.make_write_ready_repo()
590
618
        key = ('junk',)
591
 
        if not getattr(repo.inventories._index, '_missing_compression_parents',
592
 
            None):
593
 
            raise TestSkipped("No missing compression parents")
594
619
        repo.signatures._index._missing_compression_parents.add(key)
595
620
        self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
596
621
        self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
597
 
        repo.abort_write_group()
598
 
        repo.unlock()
599
622
 
600
623
    def test_missing_text_compression_parent_prevents_commit(self):
601
624
        repo = self.make_write_ready_repo()
602
625
        key = ('some', 'junk')
603
 
        if not getattr(repo.inventories._index, '_missing_compression_parents',
604
 
            None):
605
 
            raise TestSkipped("No missing compression parents")
606
626
        repo.texts._index._missing_compression_parents.add(key)
607
627
        self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
608
628
        e = self.assertRaises(errors.BzrCheckError, repo.commit_write_group)
609
 
        repo.abort_write_group()
610
 
        repo.unlock()
611
629
 
612
630
    def test_supports_external_lookups(self):
613
631
        repo = self.make_repository('.', format=self.get_format())
620
638
        Also requires that the exception is logged.
621
639
        """
622
640
        self.vfs_transport_factory = memory.MemoryServer
623
 
        repo = self.make_repository('repo')
 
641
        repo = self.make_repository('repo', format=self.get_format())
624
642
        token = repo.lock_write()
625
643
        self.addCleanup(repo.unlock)
626
644
        repo.start_write_group()
637
655
 
638
656
    def test_abort_write_group_does_raise_when_not_suppressed(self):
639
657
        self.vfs_transport_factory = memory.MemoryServer
640
 
        repo = self.make_repository('repo')
 
658
        repo = self.make_repository('repo', format=self.get_format())
641
659
        token = repo.lock_write()
642
660
        self.addCleanup(repo.unlock)
643
661
        repo.start_write_group()
650
668
 
651
669
    def test_suspend_write_group(self):
652
670
        self.vfs_transport_factory = memory.MemoryServer
653
 
        repo = self.make_repository('repo')
 
671
        repo = self.make_repository('repo', format=self.get_format())
654
672
        token = repo.lock_write()
655
673
        self.addCleanup(repo.unlock)
656
674
        repo.start_write_group()
657
675
        repo.texts.add_lines(('file-id', 'revid'), (), ['lines'])
658
676
        wg_tokens = repo.suspend_write_group()
659
677
        expected_pack_name = wg_tokens[0] + '.pack'
 
678
        expected_names = [wg_tokens[0] + ext for ext in
 
679
                            ('.rix', '.iix', '.tix', '.six')]
 
680
        if repo.chk_bytes is not None:
 
681
            expected_names.append(wg_tokens[0] + '.cix')
 
682
        expected_names.append(expected_pack_name)
660
683
        upload_transport = repo._pack_collection._upload_transport
661
684
        limbo_files = upload_transport.list_dir('')
662
 
        self.assertTrue(expected_pack_name in limbo_files, limbo_files)
 
685
        self.assertEqual(sorted(expected_names), sorted(limbo_files))
663
686
        md5 = osutils.md5(upload_transport.get_bytes(expected_pack_name))
664
687
        self.assertEqual(wg_tokens[0], md5.hexdigest())
665
688
 
 
689
    def test_resume_chk_bytes(self):
 
690
        self.vfs_transport_factory = memory.MemoryServer
 
691
        repo = self.make_repository('repo', format=self.get_format())
 
692
        if repo.chk_bytes is None:
 
693
            raise TestNotApplicable('no chk_bytes for this repository')
 
694
        token = repo.lock_write()
 
695
        self.addCleanup(repo.unlock)
 
696
        repo.start_write_group()
 
697
        text = 'a bit of text\n'
 
698
        key = ('sha1:' + osutils.sha_string(text),)
 
699
        repo.chk_bytes.add_lines(key, (), [text])
 
700
        wg_tokens = repo.suspend_write_group()
 
701
        same_repo = repo.bzrdir.open_repository()
 
702
        same_repo.lock_write()
 
703
        self.addCleanup(same_repo.unlock)
 
704
        same_repo.resume_write_group(wg_tokens)
 
705
        self.assertEqual([key], list(same_repo.chk_bytes.keys()))
 
706
        self.assertEqual(
 
707
            text, same_repo.chk_bytes.get_record_stream([key],
 
708
                'unordered', True).next().get_bytes_as('fulltext'))
 
709
        same_repo.abort_write_group()
 
710
        self.assertEqual([], list(same_repo.chk_bytes.keys()))
 
711
 
666
712
    def test_resume_write_group_then_abort(self):
667
713
        # Create a repo, start a write group, insert some data, suspend.
668
714
        self.vfs_transport_factory = memory.MemoryServer
669
 
        repo = self.make_repository('repo')
 
715
        repo = self.make_repository('repo', format=self.get_format())
670
716
        token = repo.lock_write()
671
717
        self.addCleanup(repo.unlock)
672
718
        repo.start_write_group()
685
731
        self.assertEqual(
686
732
            [], same_repo._pack_collection._pack_transport.list_dir(''))
687
733
 
 
734
    def test_commit_resumed_write_group(self):
 
735
        self.vfs_transport_factory = memory.MemoryServer
 
736
        repo = self.make_repository('repo', format=self.get_format())
 
737
        token = repo.lock_write()
 
738
        self.addCleanup(repo.unlock)
 
739
        repo.start_write_group()
 
740
        text_key = ('file-id', 'revid')
 
741
        repo.texts.add_lines(text_key, (), ['lines'])
 
742
        wg_tokens = repo.suspend_write_group()
 
743
        # Get a fresh repository object for the repo on the filesystem.
 
744
        same_repo = repo.bzrdir.open_repository()
 
745
        # Resume
 
746
        same_repo.lock_write()
 
747
        self.addCleanup(same_repo.unlock)
 
748
        same_repo.resume_write_group(wg_tokens)
 
749
        same_repo.commit_write_group()
 
750
        expected_pack_name = wg_tokens[0] + '.pack'
 
751
        expected_names = [wg_tokens[0] + ext for ext in
 
752
                            ('.rix', '.iix', '.tix', '.six')]
 
753
        if repo.chk_bytes is not None:
 
754
            expected_names.append(wg_tokens[0] + '.cix')
 
755
        self.assertEqual(
 
756
            [], same_repo._pack_collection._upload_transport.list_dir(''))
 
757
        index_names = repo._pack_collection._index_transport.list_dir('')
 
758
        self.assertEqual(sorted(expected_names), sorted(index_names))
 
759
        pack_names = repo._pack_collection._pack_transport.list_dir('')
 
760
        self.assertEqual([expected_pack_name], pack_names)
 
761
 
688
762
    def test_resume_malformed_token(self):
689
763
        self.vfs_transport_factory = memory.MemoryServer
690
764
        # Make a repository with a suspended write group
691
 
        repo = self.make_repository('repo')
 
765
        repo = self.make_repository('repo', format=self.get_format())
692
766
        token = repo.lock_write()
693
767
        self.addCleanup(repo.unlock)
694
768
        repo.start_write_group()
696
770
        repo.texts.add_lines(text_key, (), ['lines'])
697
771
        wg_tokens = repo.suspend_write_group()
698
772
        # Make a new repository
699
 
        new_repo = self.make_repository('new_repo')
 
773
        new_repo = self.make_repository('new_repo', format=self.get_format())
700
774
        token = new_repo.lock_write()
701
775
        self.addCleanup(new_repo.unlock)
702
776
        hacked_wg_token = (
732
806
            # can only stack on repositories that have compatible internal
733
807
            # metadata
734
808
            if getattr(repo._format, 'supports_tree_reference', False):
 
809
                matching_format_name = 'pack-0.92-subtree'
 
810
            else:
735
811
                if repo._format.supports_chks:
736
812
                    matching_format_name = 'development6-rich-root'
737
813
                else:
738
 
                    matching_format_name = 'pack-0.92-subtree'
739
 
            else:
740
 
                matching_format_name = 'rich-root-pack'
 
814
                    matching_format_name = 'rich-root-pack'
741
815
            mismatching_format_name = 'pack-0.92'
742
816
        else:
743
817
            # We don't have a non-rich-root CHK format.
763
837
        if getattr(repo._format, 'supports_tree_reference', False):
764
838
            # can only stack on repositories that have compatible internal
765
839
            # metadata
766
 
            if repo._format.supports_chks:
767
 
                # No CHK subtree formats in bzr.dev, so this doesn't execute.
768
 
                matching_format_name = 'development6-subtree'
769
 
            else:
770
 
                matching_format_name = 'pack-0.92-subtree'
 
840
            matching_format_name = 'pack-0.92-subtree'
771
841
            mismatching_format_name = 'rich-root-pack'
772
842
        else:
773
843
            if repo.supports_rich_root():
774
 
                matching_format_name = 'rich-root-pack'
 
844
                if repo._format.supports_chks:
 
845
                    matching_format_name = 'development6-rich-root'
 
846
                else:
 
847
                    matching_format_name = 'rich-root-pack'
775
848
                mismatching_format_name = 'pack-0.92-subtree'
776
849
            else:
777
850
                raise TestNotApplicable('No formats use non-v5 serializer'
844
917
        self.assertTrue(large_pack_name in pack_names)
845
918
 
846
919
 
 
920
class TestKeyDependencies(TestCaseWithTransport):
 
921
 
 
922
    def get_format(self):
 
923
        return bzrdir.format_registry.make_bzrdir(self.format_name)
 
924
 
 
925
    def create_source_and_target(self):
 
926
        builder = self.make_branch_builder('source', format=self.get_format())
 
927
        builder.start_series()
 
928
        builder.build_snapshot('A-id', None, [
 
929
            ('add', ('', 'root-id', 'directory', None))])
 
930
        builder.build_snapshot('B-id', ['A-id', 'ghost-id'], [])
 
931
        builder.finish_series()
 
932
        repo = self.make_repository('target')
 
933
        b = builder.get_branch()
 
934
        b.lock_read()
 
935
        self.addCleanup(b.unlock)
 
936
        repo.lock_write()
 
937
        self.addCleanup(repo.unlock)
 
938
        return b.repository, repo
 
939
 
 
940
    def test_key_dependencies_cleared_on_abort(self):
 
941
        source_repo, target_repo = self.create_source_and_target()
 
942
        target_repo.start_write_group()
 
943
        try:
 
944
            stream = source_repo.revisions.get_record_stream([('B-id',)],
 
945
                                                             'unordered', True)
 
946
            target_repo.revisions.insert_record_stream(stream)
 
947
            key_refs = target_repo.revisions._index._key_dependencies
 
948
            self.assertEqual([('B-id',)], sorted(key_refs.get_referrers()))
 
949
        finally:
 
950
            target_repo.abort_write_group()
 
951
        self.assertEqual([], sorted(key_refs.get_referrers()))
 
952
 
 
953
    def test_key_dependencies_cleared_on_suspend(self):
 
954
        source_repo, target_repo = self.create_source_and_target()
 
955
        target_repo.start_write_group()
 
956
        try:
 
957
            stream = source_repo.revisions.get_record_stream([('B-id',)],
 
958
                                                             'unordered', True)
 
959
            target_repo.revisions.insert_record_stream(stream)
 
960
            key_refs = target_repo.revisions._index._key_dependencies
 
961
            self.assertEqual([('B-id',)], sorted(key_refs.get_referrers()))
 
962
        finally:
 
963
            target_repo.suspend_write_group()
 
964
        self.assertEqual([], sorted(key_refs.get_referrers()))
 
965
 
 
966
    def test_key_dependencies_cleared_on_commit(self):
 
967
        source_repo, target_repo = self.create_source_and_target()
 
968
        target_repo.start_write_group()
 
969
        try:
 
970
            stream = source_repo.revisions.get_record_stream([('B-id',)],
 
971
                                                             'unordered', True)
 
972
            target_repo.revisions.insert_record_stream(stream)
 
973
            key_refs = target_repo.revisions._index._key_dependencies
 
974
            self.assertEqual([('B-id',)], sorted(key_refs.get_referrers()))
 
975
        finally:
 
976
            target_repo.commit_write_group()
 
977
        self.assertEqual([], sorted(key_refs.get_referrers()))
 
978
 
 
979
 
847
980
class TestSmartServerAutopack(TestCaseWithTransport):
848
981
 
849
982
    def setUp(self):
931
1064
         dict(format_name='development6-rich-root',
932
1065
              format_string='Bazaar development format - group compression '
933
1066
                  'and chk inventory (needs bzr.dev from 1.14)\n',
934
 
              format_supports_external_lookups=False,
 
1067
              format_supports_external_lookups=True,
935
1068
              index_class=BTreeGraphIndex),
936
1069
         ]
937
1070
    # name of the scenario is the format name