/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Martin Pool
  • Date: 2009-09-14 01:48:28 UTC
  • mfrom: (4685 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4688.
  • Revision ID: mbp@sourcefrog.net-20090914014828-ydr9rlkdfq2sv57z
Merge news

Show diffs side-by-side

added added

removed removed

Lines of Context:
486
486
    _serializer = None
487
487
 
488
488
    def supports_rich_root(self):
 
489
        if self._format is not None:
 
490
            return self._format.rich_root_data
489
491
        return False
490
492
 
491
493
    def get_graph(self):
542
544
        # pair that it returns true on for the is_compatible static method
543
545
        # check
544
546
        dummy_a = DummyRepository()
 
547
        dummy_a._format = RepositoryFormat()
545
548
        dummy_b = DummyRepository()
 
549
        dummy_b._format = RepositoryFormat()
546
550
        repo = self.make_repository('.')
547
551
        # hack dummies to look like repo somewhat.
548
552
        dummy_a._serializer = repo._serializer
 
553
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
554
        dummy_a._format.rich_root_data = repo._format.rich_root_data
549
555
        dummy_b._serializer = repo._serializer
 
556
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
557
        dummy_b._format.rich_root_data = repo._format.rich_root_data
550
558
        repository.InterRepository.register_optimiser(InterDummy)
551
559
        try:
552
560
            # we should get the default for something InterDummy returns False
673
681
        self.assertFalse(repo._format.supports_external_lookups)
674
682
 
675
683
 
676
 
class Test2a(TestCaseWithTransport):
 
684
class Test2a(tests.TestCaseWithMemoryTransport):
 
685
 
 
686
    def test_fetch_combines_groups(self):
 
687
        builder = self.make_branch_builder('source', format='2a')
 
688
        builder.start_series()
 
689
        builder.build_snapshot('1', None, [
 
690
            ('add', ('', 'root-id', 'directory', '')),
 
691
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
692
        builder.build_snapshot('2', ['1'], [
 
693
            ('modify', ('file-id', 'content-2\n'))])
 
694
        builder.finish_series()
 
695
        source = builder.get_branch()
 
696
        target = self.make_repository('target', format='2a')
 
697
        target.fetch(source.repository)
 
698
        target.lock_read()
 
699
        self.addCleanup(target.unlock)
 
700
        details = target.texts._index.get_build_details(
 
701
            [('file-id', '1',), ('file-id', '2',)])
 
702
        file_1_details = details[('file-id', '1')]
 
703
        file_2_details = details[('file-id', '2')]
 
704
        # The index, and what to read off disk, should be the same for both
 
705
        # versions of the file.
 
706
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
707
 
 
708
    def test_fetch_combines_groups(self):
 
709
        builder = self.make_branch_builder('source', format='2a')
 
710
        builder.start_series()
 
711
        builder.build_snapshot('1', None, [
 
712
            ('add', ('', 'root-id', 'directory', '')),
 
713
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
714
        builder.build_snapshot('2', ['1'], [
 
715
            ('modify', ('file-id', 'content-2\n'))])
 
716
        builder.finish_series()
 
717
        source = builder.get_branch()
 
718
        target = self.make_repository('target', format='2a')
 
719
        target.fetch(source.repository)
 
720
        target.lock_read()
 
721
        self.addCleanup(target.unlock)
 
722
        details = target.texts._index.get_build_details(
 
723
            [('file-id', '1',), ('file-id', '2',)])
 
724
        file_1_details = details[('file-id', '1')]
 
725
        file_2_details = details[('file-id', '2')]
 
726
        # The index, and what to read off disk, should be the same for both
 
727
        # versions of the file.
 
728
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
677
729
 
678
730
    def test_format_pack_compresses_True(self):
679
731
        repo = self.make_repository('repo', format='2a')
680
732
        self.assertTrue(repo._format.pack_compresses)
681
733
 
682
734
    def test_inventories_use_chk_map_with_parent_base_dict(self):
683
 
        tree = self.make_branch_and_tree('repo', format="2a")
 
735
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
736
        tree.lock_write()
 
737
        tree.add([''], ['TREE_ROOT'])
684
738
        revid = tree.commit("foo")
 
739
        tree.unlock()
685
740
        tree.lock_read()
686
741
        self.addCleanup(tree.unlock)
687
742
        inv = tree.branch.repository.get_inventory(revid)
696
751
        # at 20 unchanged commits, chk pages are packed that are split into
697
752
        # two groups such that the new pack being made doesn't have all its
698
753
        # pages in the source packs (though they are in the repository).
699
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
754
        # Use a memory backed repository, we don't need to hit disk for this
 
755
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
756
        tree.lock_write()
 
757
        self.addCleanup(tree.unlock)
 
758
        tree.add([''], ['TREE_ROOT'])
700
759
        for pos in range(20):
701
760
            tree.commit(str(pos))
702
761
 
703
762
    def test_pack_with_hint(self):
704
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
763
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
764
        tree.lock_write()
 
765
        self.addCleanup(tree.unlock)
 
766
        tree.add([''], ['TREE_ROOT'])
705
767
        # 1 commit to leave untouched
706
768
        tree.commit('1')
707
769
        to_keep = tree.branch.repository._pack_collection.names()
946
1008
            inv = inventory.Inventory(revision_id='rev1a')
947
1009
            inv.root.revision = 'rev1a'
948
1010
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1011
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
949
1012
            repo.add_inventory('rev1a', inv, [])
950
1013
            revision = _mod_revision.Revision('rev1a',
951
1014
                committer='jrandom@example.com', timestamp=0,
986
1049
    def add_revision(self, repo, revision_id, inv, parent_ids):
987
1050
        inv.revision_id = revision_id
988
1051
        inv.root.revision = revision_id
 
1052
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
989
1053
        repo.add_inventory(revision_id, inv, parent_ids)
990
1054
        revision = _mod_revision.Revision(revision_id,
991
1055
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1008
1072
        """
1009
1073
        broken_repo = self.make_broken_repository()
1010
1074
        empty_repo = self.make_repository('empty-repo')
1011
 
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
 
        # about why this was turned into expectFailure
1013
 
        self.expectFailure('new Stream fetch fills in missing compression'
1014
 
           ' parents (bug #389141)',
1015
 
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
 
                              empty_repo.fetch, broken_repo)
1017
 
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
 
                          empty_repo.fetch, broken_repo)
 
1075
        try:
 
1076
            empty_repo.fetch(broken_repo)
 
1077
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1078
            # Test successful: compression parent not being copied leads to
 
1079
            # error.
 
1080
            return
 
1081
        empty_repo.lock_read()
 
1082
        self.addCleanup(empty_repo.unlock)
 
1083
        text = empty_repo.texts.get_record_stream(
 
1084
            [('file2-id', 'rev3')], 'topological', True).next()
 
1085
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1019
1086
 
1020
1087
 
1021
1088
class TestRepositoryPackCollection(TestCaseWithTransport):
1030
1097
 
1031
1098
    def make_packs_and_alt_repo(self, write_lock=False):
1032
1099
        """Create a pack repo with 3 packs, and access it via a second repo."""
1033
 
        tree = self.make_branch_and_tree('.')
 
1100
        tree = self.make_branch_and_tree('.', format=self.get_format())
1034
1101
        tree.lock_write()
1035
1102
        self.addCleanup(tree.unlock)
1036
1103
        rev1 = tree.commit('one')
1346
1413
    """Tests for the packs repository Packer class."""
1347
1414
 
1348
1415
    def test_pack_optimizes_pack_order(self):
1349
 
        builder = self.make_branch_builder('.')
 
1416
        builder = self.make_branch_builder('.', format="1.9")
1350
1417
        builder.start_series()
1351
1418
        builder.build_snapshot('A', None, [
1352
1419
            ('add', ('', 'root-id', 'directory', None)),