477
424
        self.assertFalse(repo._format.supports_external_lookups)
 
 
427
class KnitRepositoryStreamTests(test_knit.KnitTests):
 
 
428
    """Tests for knitrepo._get_stream_as_bytes."""
 
 
430
    def test_get_stream_as_bytes(self):
 
 
432
        k1 = self.make_test_knit()
 
 
433
        k1.add_lines('text-a', [], test_knit.split_lines(test_knit.TEXT_1))
 
 
435
        # Serialise it, check the output.
 
 
436
        bytes = knitrepo._get_stream_as_bytes(k1, ['text-a'])
 
 
437
        data = bencode.bdecode(bytes)
 
 
438
        format, record = data
 
 
439
        self.assertEqual('knit-plain', format)
 
 
440
        self.assertEqual(['text-a', ['fulltext'], []], record[:3])
 
 
441
        self.assertRecordContentEqual(k1, 'text-a', record[3])
 
 
443
    def test_get_stream_as_bytes_all(self):
 
 
444
        """Get a serialised data stream for all the records in a knit.
 
 
446
        Much like test_get_stream_all, except for get_stream_as_bytes.
 
 
448
        k1 = self.make_test_knit()
 
 
449
        # Insert the same data as BasicKnitTests.test_knit_join, as they seem
 
 
450
        # to cover a range of cases (no parents, one parent, multiple parents).
 
 
452
            ('text-a', [], test_knit.TEXT_1),
 
 
453
            ('text-b', ['text-a'], test_knit.TEXT_1),
 
 
454
            ('text-c', [], test_knit.TEXT_1),
 
 
455
            ('text-d', ['text-c'], test_knit.TEXT_1),
 
 
456
            ('text-m', ['text-b', 'text-d'], test_knit.TEXT_1),
 
 
458
        # This test is actually a bit strict as the order in which they're
 
 
459
        # returned is not defined.  This matches the current (deterministic)
 
 
461
        expected_data_list = [
 
 
462
            # version, options, parents
 
 
463
            ('text-a', ['fulltext'], []),
 
 
464
            ('text-b', ['line-delta'], ['text-a']),
 
 
465
            ('text-m', ['line-delta'], ['text-b', 'text-d']),
 
 
466
            ('text-c', ['fulltext'], []),
 
 
467
            ('text-d', ['line-delta'], ['text-c']),
 
 
469
        for version_id, parents, lines in test_data:
 
 
470
            k1.add_lines(version_id, parents, test_knit.split_lines(lines))
 
 
472
        bytes = knitrepo._get_stream_as_bytes(
 
 
473
            k1, ['text-a', 'text-b', 'text-m', 'text-c', 'text-d', ])
 
 
475
        data = bencode.bdecode(bytes)
 
 
477
        self.assertEqual('knit-plain', format)
 
 
479
        for expected, actual in zip(expected_data_list, data):
 
 
480
            expected_version = expected[0]
 
 
481
            expected_options = expected[1]
 
 
482
            expected_parents = expected[2]
 
 
483
            version, options, parents, bytes = actual
 
 
484
            self.assertEqual(expected_version, version)
 
 
485
            self.assertEqual(expected_options, options)
 
 
486
            self.assertEqual(expected_parents, parents)
 
 
487
            self.assertRecordContentEqual(k1, version, bytes)
 
480
490
class DummyRepository(object):
 
481
491
    """A dummy repository for testing."""
 
484
493
    _serializer = None
 
486
495
    def supports_rich_root(self):
 
487
 
        if self._format is not None:
 
488
 
            return self._format.rich_root_data
 
492
 
        raise NotImplementedError
 
494
 
    def get_parent_map(self, revision_ids):
 
495
 
        raise NotImplementedError
 
498
499
class InterDummy(repository.InterRepository):
 
499
500
    """An inter-repository optimised code path for DummyRepository.
 
 
679
703
        self.assertFalse(repo._format.supports_external_lookups)
 
682
 
class Test2a(tests.TestCaseWithMemoryTransport):
 
684
 
    def test_fetch_combines_groups(self):
 
685
 
        builder = self.make_branch_builder('source', format='2a')
 
686
 
        builder.start_series()
 
687
 
        builder.build_snapshot('1', None, [
 
688
 
            ('add', ('', 'root-id', 'directory', '')),
 
689
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
690
 
        builder.build_snapshot('2', ['1'], [
 
691
 
            ('modify', ('file-id', 'content-2\n'))])
 
692
 
        builder.finish_series()
 
693
 
        source = builder.get_branch()
 
694
 
        target = self.make_repository('target', format='2a')
 
695
 
        target.fetch(source.repository)
 
697
 
        self.addCleanup(target.unlock)
 
698
 
        details = target.texts._index.get_build_details(
 
699
 
            [('file-id', '1',), ('file-id', '2',)])
 
700
 
        file_1_details = details[('file-id', '1')]
 
701
 
        file_2_details = details[('file-id', '2')]
 
702
 
        # The index, and what to read off disk, should be the same for both
 
703
 
        # versions of the file.
 
704
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
706
 
    def test_fetch_combines_groups(self):
 
707
 
        builder = self.make_branch_builder('source', format='2a')
 
708
 
        builder.start_series()
 
709
 
        builder.build_snapshot('1', None, [
 
710
 
            ('add', ('', 'root-id', 'directory', '')),
 
711
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
712
 
        builder.build_snapshot('2', ['1'], [
 
713
 
            ('modify', ('file-id', 'content-2\n'))])
 
714
 
        builder.finish_series()
 
715
 
        source = builder.get_branch()
 
716
 
        target = self.make_repository('target', format='2a')
 
717
 
        target.fetch(source.repository)
 
719
 
        self.addCleanup(target.unlock)
 
720
 
        details = target.texts._index.get_build_details(
 
721
 
            [('file-id', '1',), ('file-id', '2',)])
 
722
 
        file_1_details = details[('file-id', '1')]
 
723
 
        file_2_details = details[('file-id', '2')]
 
724
 
        # The index, and what to read off disk, should be the same for both
 
725
 
        # versions of the file.
 
726
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
728
 
    def test_fetch_combines_groups(self):
 
729
 
        builder = self.make_branch_builder('source', format='2a')
 
730
 
        builder.start_series()
 
731
 
        builder.build_snapshot('1', None, [
 
732
 
            ('add', ('', 'root-id', 'directory', '')),
 
733
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
734
 
        builder.build_snapshot('2', ['1'], [
 
735
 
            ('modify', ('file-id', 'content-2\n'))])
 
736
 
        builder.finish_series()
 
737
 
        source = builder.get_branch()
 
738
 
        target = self.make_repository('target', format='2a')
 
739
 
        target.fetch(source.repository)
 
741
 
        self.addCleanup(target.unlock)
 
742
 
        details = target.texts._index.get_build_details(
 
743
 
            [('file-id', '1',), ('file-id', '2',)])
 
744
 
        file_1_details = details[('file-id', '1')]
 
745
 
        file_2_details = details[('file-id', '2')]
 
746
 
        # The index, and what to read off disk, should be the same for both
 
747
 
        # versions of the file.
 
748
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
750
 
    def test_format_pack_compresses_True(self):
 
751
 
        repo = self.make_repository('repo', format='2a')
 
752
 
        self.assertTrue(repo._format.pack_compresses)
 
754
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
755
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
757
 
        tree.add([''], ['TREE_ROOT'])
 
758
 
        revid = tree.commit("foo")
 
761
 
        self.addCleanup(tree.unlock)
 
762
 
        inv = tree.branch.repository.get_inventory(revid)
 
763
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
764
 
        inv.parent_id_basename_to_file_id._ensure_root()
 
765
 
        inv.id_to_entry._ensure_root()
 
766
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
767
 
        self.assertEqual(65536,
 
768
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
770
 
    def test_autopack_unchanged_chk_nodes(self):
 
771
 
        # at 20 unchanged commits, chk pages are packed that are split into
 
772
 
        # two groups such that the new pack being made doesn't have all its
 
773
 
        # pages in the source packs (though they are in the repository).
 
774
 
        # Use a memory backed repository, we don't need to hit disk for this
 
775
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
777
 
        self.addCleanup(tree.unlock)
 
778
 
        tree.add([''], ['TREE_ROOT'])
 
779
 
        for pos in range(20):
 
780
 
            tree.commit(str(pos))
 
782
 
    def test_pack_with_hint(self):
 
783
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
785
 
        self.addCleanup(tree.unlock)
 
786
 
        tree.add([''], ['TREE_ROOT'])
 
787
 
        # 1 commit to leave untouched
 
789
 
        to_keep = tree.branch.repository._pack_collection.names()
 
793
 
        all = tree.branch.repository._pack_collection.names()
 
794
 
        combine = list(set(all) - set(to_keep))
 
795
 
        self.assertLength(3, all)
 
796
 
        self.assertLength(2, combine)
 
797
 
        tree.branch.repository.pack(hint=combine)
 
798
 
        final = tree.branch.repository._pack_collection.names()
 
799
 
        self.assertLength(2, final)
 
800
 
        self.assertFalse(combine[0] in final)
 
801
 
        self.assertFalse(combine[1] in final)
 
802
 
        self.assertSubset(to_keep, final)
 
804
 
    def test_stream_source_to_gc(self):
 
805
 
        source = self.make_repository('source', format='2a')
 
806
 
        target = self.make_repository('target', format='2a')
 
807
 
        stream = source._get_source(target._format)
 
808
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
810
 
    def test_stream_source_to_non_gc(self):
 
811
 
        source = self.make_repository('source', format='2a')
 
812
 
        target = self.make_repository('target', format='rich-root-pack')
 
813
 
        stream = source._get_source(target._format)
 
814
 
        # We don't want the child GroupCHKStreamSource
 
815
 
        self.assertIs(type(stream), repository.StreamSource)
 
817
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
818
 
        source_builder = self.make_branch_builder('source',
 
820
 
        # We have to build a fairly large tree, so that we are sure the chk
 
821
 
        # pages will have split into multiple pages.
 
822
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
823
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
824
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
827
 
                content = 'content for %s\n' % (fname,)
 
828
 
                entries.append(('add', (fname, fid, 'file', content)))
 
829
 
        source_builder.start_series()
 
830
 
        source_builder.build_snapshot('rev-1', None, entries)
 
831
 
        # Now change a few of them, so we get a few new pages for the second
 
833
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
834
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
835
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
836
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
838
 
        source_builder.finish_series()
 
839
 
        source_branch = source_builder.get_branch()
 
840
 
        source_branch.lock_read()
 
841
 
        self.addCleanup(source_branch.unlock)
 
842
 
        target = self.make_repository('target', format='2a')
 
843
 
        source = source_branch.repository._get_source(target._format)
 
844
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
846
 
        # On a regular pass, getting the inventories and chk pages for rev-2
 
847
 
        # would only get the newly created chk pages
 
848
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
850
 
        simple_chk_records = []
 
851
 
        for vf_name, substream in source.get_stream(search):
 
852
 
            if vf_name == 'chk_bytes':
 
853
 
                for record in substream:
 
854
 
                    simple_chk_records.append(record.key)
 
858
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
859
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
860
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
861
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
862
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
864
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
865
 
        # we should get a much larger set of pages.
 
866
 
        missing = [('inventories', 'rev-2')]
 
867
 
        full_chk_records = []
 
868
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
869
 
            if vf_name == 'inventories':
 
870
 
                for record in substream:
 
871
 
                    self.assertEqual(('rev-2',), record.key)
 
872
 
            elif vf_name == 'chk_bytes':
 
873
 
                for record in substream:
 
874
 
                    full_chk_records.append(record.key)
 
876
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
877
 
        # We have 257 records now. This is because we have 1 root page, and 256
 
878
 
        # leaf pages in a complete listing.
 
879
 
        self.assertEqual(257, len(full_chk_records))
 
880
 
        self.assertSubset(simple_chk_records, full_chk_records)
 
882
 
    def test_inconsistency_fatal(self):
 
883
 
        repo = self.make_repository('repo', format='2a')
 
884
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
885
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
886
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
887
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
888
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
891
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
893
 
    def test_source_to_exact_pack_092(self):
 
894
 
        source = self.make_repository('source', format='pack-0.92')
 
895
 
        target = self.make_repository('target', format='pack-0.92')
 
896
 
        stream_source = source._get_source(target._format)
 
897
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
899
 
    def test_source_to_exact_pack_rich_root_pack(self):
 
900
 
        source = self.make_repository('source', format='rich-root-pack')
 
901
 
        target = self.make_repository('target', format='rich-root-pack')
 
902
 
        stream_source = source._get_source(target._format)
 
903
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
905
 
    def test_source_to_exact_pack_19(self):
 
906
 
        source = self.make_repository('source', format='1.9')
 
907
 
        target = self.make_repository('target', format='1.9')
 
908
 
        stream_source = source._get_source(target._format)
 
909
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
911
 
    def test_source_to_exact_pack_19_rich_root(self):
 
912
 
        source = self.make_repository('source', format='1.9-rich-root')
 
913
 
        target = self.make_repository('target', format='1.9-rich-root')
 
914
 
        stream_source = source._get_source(target._format)
 
915
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
917
 
    def test_source_to_remote_exact_pack_19(self):
 
918
 
        trans = self.make_smart_server('target')
 
920
 
        source = self.make_repository('source', format='1.9')
 
921
 
        target = self.make_repository('target', format='1.9')
 
922
 
        target = repository.Repository.open(trans.base)
 
923
 
        stream_source = source._get_source(target._format)
 
924
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
926
 
    def test_stream_source_to_non_exact(self):
 
927
 
        source = self.make_repository('source', format='pack-0.92')
 
928
 
        target = self.make_repository('target', format='1.9')
 
929
 
        stream = source._get_source(target._format)
 
930
 
        self.assertIs(type(stream), repository.StreamSource)
 
932
 
    def test_stream_source_to_non_exact_rich_root(self):
 
933
 
        source = self.make_repository('source', format='1.9')
 
934
 
        target = self.make_repository('target', format='1.9-rich-root')
 
935
 
        stream = source._get_source(target._format)
 
936
 
        self.assertIs(type(stream), repository.StreamSource)
 
938
 
    def test_source_to_remote_non_exact_pack_19(self):
 
939
 
        trans = self.make_smart_server('target')
 
941
 
        source = self.make_repository('source', format='1.9')
 
942
 
        target = self.make_repository('target', format='1.6')
 
943
 
        target = repository.Repository.open(trans.base)
 
944
 
        stream_source = source._get_source(target._format)
 
945
 
        self.assertIs(type(stream_source), repository.StreamSource)
 
947
 
    def test_stream_source_to_knit(self):
 
948
 
        source = self.make_repository('source', format='pack-0.92')
 
949
 
        target = self.make_repository('target', format='dirstate')
 
950
 
        stream = source._get_source(target._format)
 
951
 
        self.assertIs(type(stream), repository.StreamSource)
 
954
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
955
 
    """Tests for _find_parent_ids_of_revisions."""
 
958
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
959
 
        self.builder = self.make_branch_builder('source',
 
960
 
            format='development6-rich-root')
 
961
 
        self.builder.start_series()
 
962
 
        self.builder.build_snapshot('initial', None,
 
963
 
            [('add', ('', 'tree-root', 'directory', None))])
 
964
 
        self.repo = self.builder.get_branch().repository
 
965
 
        self.addCleanup(self.builder.finish_series)
 
967
 
    def assertParentIds(self, expected_result, rev_set):
 
968
 
        self.assertEqual(sorted(expected_result),
 
969
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
971
 
    def test_simple(self):
 
972
 
        self.builder.build_snapshot('revid1', None, [])
 
973
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
975
 
        self.assertParentIds(['revid1'], rev_set)
 
977
 
    def test_not_first_parent(self):
 
978
 
        self.builder.build_snapshot('revid1', None, [])
 
979
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
980
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
981
 
        rev_set = ['revid3', 'revid2']
 
982
 
        self.assertParentIds(['revid1'], rev_set)
 
984
 
    def test_not_null(self):
 
985
 
        rev_set = ['initial']
 
986
 
        self.assertParentIds([], rev_set)
 
988
 
    def test_not_null_set(self):
 
989
 
        self.builder.build_snapshot('revid1', None, [])
 
990
 
        rev_set = [_mod_revision.NULL_REVISION]
 
991
 
        self.assertParentIds([], rev_set)
 
993
 
    def test_ghost(self):
 
994
 
        self.builder.build_snapshot('revid1', None, [])
 
995
 
        rev_set = ['ghost', 'revid1']
 
996
 
        self.assertParentIds(['initial'], rev_set)
 
998
 
    def test_ghost_parent(self):
 
999
 
        self.builder.build_snapshot('revid1', None, [])
 
1000
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
1001
 
        rev_set = ['revid2', 'revid1']
 
1002
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
1004
 
    def test_righthand_parent(self):
 
1005
 
        self.builder.build_snapshot('revid1', None, [])
 
1006
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
1007
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
1008
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
1009
 
        rev_set = ['revid3', 'revid2a']
 
1010
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
1013
706
class TestWithBrokenRepo(TestCaseWithTransport):
 
1014
707
    """These tests seem to be more appropriate as interface tests?"""
 
 
1093
784
        broken_repo = self.make_broken_repository()
 
1094
785
        empty_repo = self.make_repository('empty-repo')
 
1096
 
            empty_repo.fetch(broken_repo)
 
1097
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1098
 
            # Test successful: compression parent not being copied leads to
 
1101
 
        empty_repo.lock_read()
 
 
786
        search = graph.SearchResult(set(['rev1a', 'rev2', 'rev3']),
 
 
787
            set(), 3, ['rev1a', 'rev2', 'rev3'])
 
 
788
        broken_repo.lock_read()
 
 
789
        self.addCleanup(broken_repo.unlock)
 
 
790
        stream = broken_repo.get_data_stream_for_search(search)
 
 
791
        empty_repo.lock_write()
 
1102
792
        self.addCleanup(empty_repo.unlock)
 
1103
 
        text = empty_repo.texts.get_record_stream(
 
1104
 
            [('file2-id', 'rev3')], 'topological', True).next()
 
1105
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
 
793
        empty_repo.start_write_group()
 
 
796
                errors.KnitCorrupt, empty_repo.insert_data_stream, stream)
 
 
798
            empty_repo.abort_write_group()
 
 
801
class TestKnitPackNoSubtrees(TestCaseWithTransport):
 
 
803
    def get_format(self):
 
 
804
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
 
806
    def test_disk_layout(self):
 
 
807
        format = self.get_format()
 
 
808
        repo = self.make_repository('.', format=format)
 
 
809
        # in case of side effects of locking.
 
 
812
        t = repo.bzrdir.get_repository_transport(None)
 
 
814
        # XXX: no locks left when unlocked at the moment
 
 
815
        # self.assertEqualDiff('', t.get('lock').read())
 
 
816
        self.check_databases(t)
 
 
818
    def check_format(self, t):
 
 
819
        self.assertEqualDiff(
 
 
820
            "Bazaar pack repository format 1 (needs bzr 0.92)\n",
 
 
821
                             t.get('format').read())
 
 
823
    def assertHasKndx(self, t, knit_name):
 
 
824
        """Assert that knit_name exists on t."""
 
 
825
        self.assertEqualDiff('# bzr knit index 8\n',
 
 
826
                             t.get(knit_name + '.kndx').read())
 
 
828
    def assertHasNoKndx(self, t, knit_name):
 
 
829
        """Assert that knit_name has no index on t."""
 
 
830
        self.assertFalse(t.has(knit_name + '.kndx'))
 
 
832
    def assertHasNoKnit(self, t, knit_name):
 
 
833
        """Assert that knit_name exists on t."""
 
 
835
        self.assertFalse(t.has(knit_name + '.knit'))
 
 
837
    def check_databases(self, t):
 
 
838
        """check knit content for a repository."""
 
 
839
        # check conversion worked
 
 
840
        self.assertHasNoKndx(t, 'inventory')
 
 
841
        self.assertHasNoKnit(t, 'inventory')
 
 
842
        self.assertHasNoKndx(t, 'revisions')
 
 
843
        self.assertHasNoKnit(t, 'revisions')
 
 
844
        self.assertHasNoKndx(t, 'signatures')
 
 
845
        self.assertHasNoKnit(t, 'signatures')
 
 
846
        self.assertFalse(t.has('knits'))
 
 
847
        # revision-indexes file-container directory
 
 
849
            list(GraphIndex(t, 'pack-names', None).iter_all_entries()))
 
 
850
        self.assertTrue(S_ISDIR(t.stat('packs').st_mode))
 
 
851
        self.assertTrue(S_ISDIR(t.stat('upload').st_mode))
 
 
852
        self.assertTrue(S_ISDIR(t.stat('indices').st_mode))
 
 
853
        self.assertTrue(S_ISDIR(t.stat('obsolete_packs').st_mode))
 
 
855
    def test_shared_disk_layout(self):
 
 
856
        format = self.get_format()
 
 
857
        repo = self.make_repository('.', shared=True, format=format)
 
 
859
        t = repo.bzrdir.get_repository_transport(None)
 
 
861
        # XXX: no locks left when unlocked at the moment
 
 
862
        # self.assertEqualDiff('', t.get('lock').read())
 
 
863
        # We should have a 'shared-storage' marker file.
 
 
864
        self.assertEqualDiff('', t.get('shared-storage').read())
 
 
865
        self.check_databases(t)
 
 
867
    def test_shared_no_tree_disk_layout(self):
 
 
868
        format = self.get_format()
 
 
869
        repo = self.make_repository('.', shared=True, format=format)
 
 
870
        repo.set_make_working_trees(False)
 
 
872
        t = repo.bzrdir.get_repository_transport(None)
 
 
874
        # XXX: no locks left when unlocked at the moment
 
 
875
        # self.assertEqualDiff('', t.get('lock').read())
 
 
876
        # We should have a 'shared-storage' marker file.
 
 
877
        self.assertEqualDiff('', t.get('shared-storage').read())
 
 
878
        # We should have a marker for the no-working-trees flag.
 
 
879
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
 
880
        # The marker should go when we toggle the setting.
 
 
881
        repo.set_make_working_trees(True)
 
 
882
        self.assertFalse(t.has('no-working-trees'))
 
 
883
        self.check_databases(t)
 
 
885
    def test_adding_revision_creates_pack_indices(self):
 
 
886
        format = self.get_format()
 
 
887
        tree = self.make_branch_and_tree('.', format=format)
 
 
888
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
 
890
            list(GraphIndex(trans, 'pack-names', None).iter_all_entries()))
 
 
891
        tree.commit('foobarbaz')
 
 
892
        index = GraphIndex(trans, 'pack-names', None)
 
 
893
        index_nodes = list(index.iter_all_entries())
 
 
894
        self.assertEqual(1, len(index_nodes))
 
 
895
        node = index_nodes[0]
 
 
897
        # the pack sizes should be listed in the index
 
 
899
        sizes = [int(digits) for digits in pack_value.split(' ')]
 
 
900
        for size, suffix in zip(sizes, ['.rix', '.iix', '.tix', '.six']):
 
 
901
            stat = trans.stat('indices/%s%s' % (name, suffix))
 
 
902
            self.assertEqual(size, stat.st_size)
 
 
904
    def test_pulling_nothing_leads_to_no_new_names(self):
 
 
905
        format = self.get_format()
 
 
906
        tree1 = self.make_branch_and_tree('1', format=format)
 
 
907
        tree2 = self.make_branch_and_tree('2', format=format)
 
 
908
        tree1.branch.repository.fetch(tree2.branch.repository)
 
 
909
        trans = tree1.branch.repository.bzrdir.get_repository_transport(None)
 
 
911
            list(GraphIndex(trans, 'pack-names', None).iter_all_entries()))
 
 
913
    def test_commit_across_pack_shape_boundary_autopacks(self):
 
 
914
        format = self.get_format()
 
 
915
        tree = self.make_branch_and_tree('.', format=format)
 
 
916
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
 
917
        # This test could be a little cheaper by replacing the packs
 
 
918
        # attribute on the repository to allow a different pack distribution
 
 
919
        # and max packs policy - so we are checking the policy is honoured
 
 
920
        # in the test. But for now 11 commits is not a big deal in a single
 
 
923
            tree.commit('commit %s' % x)
 
 
924
        # there should be 9 packs:
 
 
925
        index = GraphIndex(trans, 'pack-names', None)
 
 
926
        self.assertEqual(9, len(list(index.iter_all_entries())))
 
 
927
        # insert some files in obsolete_packs which should be removed by pack.
 
 
928
        trans.put_bytes('obsolete_packs/foo', '123')
 
 
929
        trans.put_bytes('obsolete_packs/bar', '321')
 
 
930
        # committing one more should coalesce to 1 of 10.
 
 
931
        tree.commit('commit triggering pack')
 
 
932
        index = GraphIndex(trans, 'pack-names', None)
 
 
933
        self.assertEqual(1, len(list(index.iter_all_entries())))
 
 
934
        # packing should not damage data
 
 
935
        tree = tree.bzrdir.open_workingtree()
 
 
936
        check_result = tree.branch.repository.check(
 
 
937
            [tree.branch.last_revision()])
 
 
938
        # We should have 50 (10x5) files in the obsolete_packs directory.
 
 
939
        obsolete_files = list(trans.list_dir('obsolete_packs'))
 
 
940
        self.assertFalse('foo' in obsolete_files)
 
 
941
        self.assertFalse('bar' in obsolete_files)
 
 
942
        self.assertEqual(50, len(obsolete_files))
 
 
943
        # XXX: Todo check packs obsoleted correctly - old packs and indices
 
 
944
        # in the obsolete_packs directory.
 
 
945
        large_pack_name = list(index.iter_all_entries())[0][1][0]
 
 
946
        # finally, committing again should not touch the large pack.
 
 
947
        tree.commit('commit not triggering pack')
 
 
948
        index = GraphIndex(trans, 'pack-names', None)
 
 
949
        self.assertEqual(2, len(list(index.iter_all_entries())))
 
 
950
        pack_names = [node[1][0] for node in index.iter_all_entries()]
 
 
951
        self.assertTrue(large_pack_name in pack_names)
 
 
953
    def test_fail_obsolete_deletion(self):
 
 
954
        # failing to delete obsolete packs is not fatal
 
 
955
        format = self.get_format()
 
 
956
        server = fakenfs.FakeNFSServer()
 
 
958
        self.addCleanup(server.tearDown)
 
 
959
        transport = get_transport(server.get_url())
 
 
960
        bzrdir = self.get_format().initialize_on_transport(transport)
 
 
961
        repo = bzrdir.create_repository()
 
 
962
        repo_transport = bzrdir.get_repository_transport(None)
 
 
963
        self.assertTrue(repo_transport.has('obsolete_packs'))
 
 
964
        # these files are in use by another client and typically can't be deleted
 
 
965
        repo_transport.put_bytes('obsolete_packs/.nfsblahblah', 'contents')
 
 
966
        repo._pack_collection._clear_obsolete_packs()
 
 
967
        self.assertTrue(repo_transport.has('obsolete_packs/.nfsblahblah'))
 
 
969
    def test_pack_after_two_commits_packs_everything(self):
 
 
970
        format = self.get_format()
 
 
971
        tree = self.make_branch_and_tree('.', format=format)
 
 
972
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
 
974
        tree.commit('more work')
 
 
975
        tree.branch.repository.pack()
 
 
976
        # there should be 1 pack:
 
 
977
        index = GraphIndex(trans, 'pack-names', None)
 
 
978
        self.assertEqual(1, len(list(index.iter_all_entries())))
 
 
979
        self.assertEqual(2, len(tree.branch.repository.all_revision_ids()))
 
 
981
    def test_pack_layout(self):
 
 
982
        format = self.get_format()
 
 
983
        tree = self.make_branch_and_tree('.', format=format)
 
 
984
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
 
 
985
        tree.commit('start', rev_id='1')
 
 
986
        tree.commit('more work', rev_id='2')
 
 
987
        tree.branch.repository.pack()
 
 
989
        self.addCleanup(tree.unlock)
 
 
990
        pack = tree.branch.repository._pack_collection.get_pack_by_name(
 
 
991
            tree.branch.repository._pack_collection.names()[0])
 
 
992
        # revision access tends to be tip->ancestor, so ordering that way on 
 
 
993
        # disk is a good idea.
 
 
994
        for _1, key, val, refs in pack.revision_index.iter_all_entries():
 
 
996
                pos_1 = int(val[1:].split()[0])
 
 
998
                pos_2 = int(val[1:].split()[0])
 
 
999
        self.assertTrue(pos_2 < pos_1)
 
 
1001
    def test_pack_repositories_support_multiple_write_locks(self):
 
 
1002
        format = self.get_format()
 
 
1003
        self.make_repository('.', shared=True, format=format)
 
 
1004
        r1 = repository.Repository.open('.')
 
 
1005
        r2 = repository.Repository.open('.')
 
 
1007
        self.addCleanup(r1.unlock)
 
 
1011
    def _add_text(self, repo, fileid):
 
 
1012
        """Add a text to the repository within a write group."""
 
 
1013
        vf =repo.weave_store.get_weave(fileid, repo.get_transaction())
 
 
1014
        vf.add_lines('samplerev+' + fileid, [], [])
 
 
1016
    def test_concurrent_writers_merge_new_packs(self):
 
 
1017
        format = self.get_format()
 
 
1018
        self.make_repository('.', shared=True, format=format)
 
 
1019
        r1 = repository.Repository.open('.')
 
 
1020
        r2 = repository.Repository.open('.')
 
 
1023
            # access enough data to load the names list
 
 
1024
            list(r1.all_revision_ids())
 
 
1027
                # access enough data to load the names list
 
 
1028
                list(r2.all_revision_ids())
 
 
1029
                r1.start_write_group()
 
 
1031
                    r2.start_write_group()
 
 
1033
                        self._add_text(r1, 'fileidr1')
 
 
1034
                        self._add_text(r2, 'fileidr2')
 
 
1036
                        r2.abort_write_group()
 
 
1039
                    r1.abort_write_group()
 
 
1041
                # both r1 and r2 have open write groups with data in them
 
 
1042
                # created while the other's write group was open.
 
 
1043
                # Commit both which requires a merge to the pack-names.
 
 
1045
                    r1.commit_write_group()
 
 
1047
                    r1.abort_write_group()
 
 
1048
                    r2.abort_write_group()
 
 
1050
                r2.commit_write_group()
 
 
1051
                # tell r1 to reload from disk
 
 
1052
                r1._pack_collection.reset()
 
 
1053
                # Now both repositories should know about both names
 
 
1054
                r1._pack_collection.ensure_loaded()
 
 
1055
                r2._pack_collection.ensure_loaded()
 
 
1056
                self.assertEqual(r1._pack_collection.names(), r2._pack_collection.names())
 
 
1057
                self.assertEqual(2, len(r1._pack_collection.names()))
 
 
1063
    def test_concurrent_writer_second_preserves_dropping_a_pack(self):
 
 
1064
        format = self.get_format()
 
 
1065
        self.make_repository('.', shared=True, format=format)
 
 
1066
        r1 = repository.Repository.open('.')
 
 
1067
        r2 = repository.Repository.open('.')
 
 
1068
        # add a pack to drop
 
 
1071
            r1.start_write_group()
 
 
1073
                self._add_text(r1, 'fileidr1')
 
 
1075
                r1.abort_write_group()
 
 
1078
                r1.commit_write_group()
 
 
1079
            r1._pack_collection.ensure_loaded()
 
 
1080
            name_to_drop = r1._pack_collection.all_packs()[0].name
 
 
1085
            # access enough data to load the names list
 
 
1086
            list(r1.all_revision_ids())
 
 
1089
                # access enough data to load the names list
 
 
1090
                list(r2.all_revision_ids())
 
 
1091
                r1._pack_collection.ensure_loaded()
 
 
1093
                    r2.start_write_group()
 
 
1095
                        # in r1, drop the pack
 
 
1096
                        r1._pack_collection._remove_pack_from_memory(
 
 
1097
                            r1._pack_collection.get_pack_by_name(name_to_drop))
 
 
1099
                        self._add_text(r2, 'fileidr2')
 
 
1101
                        r2.abort_write_group()
 
 
1104
                    r1._pack_collection.reset()
 
 
1106
                # r1 has a changed names list, and r2 an open write groups with
 
 
1108
                # save r1, and then commit the r2 write group, which requires a
 
 
1109
                # merge to the pack-names, which should not reinstate
 
 
1112
                    r1._pack_collection._save_pack_names()
 
 
1113
                    r1._pack_collection.reset()
 
 
1115
                    r2.abort_write_group()
 
 
1118
                    r2.commit_write_group()
 
 
1120
                    r2.abort_write_group()
 
 
1122
                # Now both repositories should now about just one name.
 
 
1123
                r1._pack_collection.ensure_loaded()
 
 
1124
                r2._pack_collection.ensure_loaded()
 
 
1125
                self.assertEqual(r1._pack_collection.names(), r2._pack_collection.names())
 
 
1126
                self.assertEqual(1, len(r1._pack_collection.names()))
 
 
1127
                self.assertFalse(name_to_drop in r1._pack_collection.names())
 
 
1133
    def test_lock_write_does_not_physically_lock(self):
 
 
1134
        repo = self.make_repository('.', format=self.get_format())
 
 
1136
        self.addCleanup(repo.unlock)
 
 
1137
        self.assertFalse(repo.get_physical_lock_status())
 
 
1139
    def prepare_for_break_lock(self):
 
 
1140
        # Setup the global ui factory state so that a break-lock method call
 
 
1141
        # will find usable input in the input stream.
 
 
1142
        old_factory = bzrlib.ui.ui_factory
 
 
1143
        def restoreFactory():
 
 
1144
            bzrlib.ui.ui_factory = old_factory
 
 
1145
        self.addCleanup(restoreFactory)
 
 
1146
        bzrlib.ui.ui_factory = bzrlib.ui.SilentUIFactory()
 
 
1147
        bzrlib.ui.ui_factory.stdin = StringIO("y\n")
 
 
1149
    def test_break_lock_breaks_physical_lock(self):
 
 
1150
        repo = self.make_repository('.', format=self.get_format())
 
 
1151
        repo._pack_collection.lock_names()
 
 
1152
        repo2 = repository.Repository.open('.')
 
 
1153
        self.assertTrue(repo.get_physical_lock_status())
 
 
1154
        self.prepare_for_break_lock()
 
 
1156
        self.assertFalse(repo.get_physical_lock_status())
 
 
1158
    def test_broken_physical_locks_error_on__unlock_names_lock(self):
 
 
1159
        repo = self.make_repository('.', format=self.get_format())
 
 
1160
        repo._pack_collection.lock_names()
 
 
1161
        self.assertTrue(repo.get_physical_lock_status())
 
 
1162
        repo2 = repository.Repository.open('.')
 
 
1163
        self.prepare_for_break_lock()
 
 
1165
        self.assertRaises(errors.LockBroken, repo._pack_collection._unlock_names)
 
 
1167
    def test_fetch_without_find_ghosts_ignores_ghosts(self):
 
 
1168
        # we want two repositories at this point:
 
 
1169
        # one with a revision that is a ghost in the other
 
 
1171
        # 'ghost' is present in has_ghost, 'ghost' is absent in 'missing_ghost'.
 
 
1172
        # 'references' is present in both repositories, and 'tip' is present
 
 
1173
        # just in has_ghost.
 
 
1174
        # has_ghost       missing_ghost
 
 
1175
        #------------------------------
 
 
1177
        # 'references'    'references'
 
 
1179
        # In this test we fetch 'tip' which should not fetch 'ghost'
 
 
1180
        has_ghost = self.make_repository('has_ghost', format=self.get_format())
 
 
1181
        missing_ghost = self.make_repository('missing_ghost',
 
 
1182
            format=self.get_format())
 
 
1184
        def add_commit(repo, revision_id, parent_ids):
 
 
1186
            repo.start_write_group()
 
 
1187
            inv = inventory.Inventory(revision_id=revision_id)
 
 
1188
            inv.root.revision = revision_id
 
 
1189
            root_id = inv.root.file_id
 
 
1190
            sha1 = repo.add_inventory(revision_id, inv, [])
 
 
1191
            vf = repo.weave_store.get_weave_or_empty(root_id,
 
 
1192
                repo.get_transaction())
 
 
1193
            vf.add_lines(revision_id, [], [])
 
 
1194
            rev = bzrlib.revision.Revision(timestamp=0,
 
 
1196
                                           committer="Foo Bar <foo@example.com>",
 
 
1198
                                           inventory_sha1=sha1,
 
 
1199
                                           revision_id=revision_id)
 
 
1200
            rev.parent_ids = parent_ids
 
 
1201
            repo.add_revision(revision_id, rev)
 
 
1202
            repo.commit_write_group()
 
 
1204
        add_commit(has_ghost, 'ghost', [])
 
 
1205
        add_commit(has_ghost, 'references', ['ghost'])
 
 
1206
        add_commit(missing_ghost, 'references', ['ghost'])
 
 
1207
        add_commit(has_ghost, 'tip', ['references'])
 
 
1208
        missing_ghost.fetch(has_ghost, 'tip')
 
 
1209
        # missing ghost now has tip and not ghost.
 
 
1210
        rev = missing_ghost.get_revision('tip')
 
 
1211
        inv = missing_ghost.get_inventory('tip')
 
 
1212
        self.assertRaises(errors.NoSuchRevision,
 
 
1213
            missing_ghost.get_revision, 'ghost')
 
 
1214
        self.assertRaises(errors.RevisionNotPresent,
 
 
1215
            missing_ghost.get_inventory, 'ghost')
 
 
1217
    def test_supports_external_lookups(self):
 
 
1218
        repo = self.make_repository('.', format=self.get_format())
 
 
1219
        self.assertFalse(repo._format.supports_external_lookups)
 
 
1222
class TestKnitPackSubtrees(TestKnitPackNoSubtrees):
 
 
1224
    def get_format(self):
 
 
1225
        return bzrdir.format_registry.make_bzrdir(
 
 
1226
            'pack-0.92-subtree')
 
 
1228
    def check_format(self, t):
 
 
1229
        self.assertEqualDiff(
 
 
1230
            "Bazaar pack repository format 1 with subtree support (needs bzr 0.92)\n",
 
 
1231
            t.get('format').read())
 
 
1234
class TestDevelopment0(TestKnitPackNoSubtrees):
 
 
1236
    def get_format(self):
 
 
1237
        return bzrdir.format_registry.make_bzrdir(
 
 
1240
    def check_format(self, t):
 
 
1241
        self.assertEqualDiff(
 
 
1242
            "Bazaar development format 0 (needs bzr.dev from before 1.3)\n",
 
 
1243
            t.get('format').read())
 
 
1246
class TestDevelopment0Subtree(TestKnitPackNoSubtrees):
 
 
1248
    def get_format(self):
 
 
1249
        return bzrdir.format_registry.make_bzrdir(
 
 
1250
            'development-subtree')
 
 
1252
    def check_format(self, t):
 
 
1253
        self.assertEqualDiff(
 
 
1254
            "Bazaar development format 0 with subtree support "
 
 
1255
            "(needs bzr.dev from before 1.3)\n",
 
 
1256
            t.get('format').read())
 
1108
1259
class TestRepositoryPackCollection(TestCaseWithTransport):
 
 
1338
1414
        tree.lock_read()
 
1339
1415
        self.addCleanup(tree.unlock)
 
1340
1416
        packs = tree.branch.repository._pack_collection
 
1342
1417
        packs.ensure_loaded()
 
1343
1418
        name = packs.names()[0]
 
1344
1419
        pack_1 = packs.get_pack_by_name(name)
 
1345
1420
        # the pack should be correctly initialised
 
1346
 
        sizes = packs._names[name]
 
1347
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1348
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1349
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1350
 
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
 
1421
        rev_index = GraphIndex(packs._index_transport, name + '.rix',
 
 
1422
            packs._names[name][0])
 
 
1423
        inv_index = GraphIndex(packs._index_transport, name + '.iix',
 
 
1424
            packs._names[name][1])
 
 
1425
        txt_index = GraphIndex(packs._index_transport, name + '.tix',
 
 
1426
            packs._names[name][2])
 
 
1427
        sig_index = GraphIndex(packs._index_transport, name + '.six',
 
 
1428
            packs._names[name][3])
 
1351
1429
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1352
1430
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1353
1431
        # and the same instance should be returned on successive calls.
 
1354
1432
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1356
 
    def test_reload_pack_names_new_entry(self):
 
1357
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1358
 
        names = packs.names()
 
1359
 
        # Add a new pack file into the repository
 
1360
 
        rev4 = tree.commit('four')
 
1361
 
        new_names = tree.branch.repository._pack_collection.names()
 
1362
 
        new_name = set(new_names).difference(names)
 
1363
 
        self.assertEqual(1, len(new_name))
 
1364
 
        new_name = new_name.pop()
 
1365
 
        # The old collection hasn't noticed yet
 
1366
 
        self.assertEqual(names, packs.names())
 
1367
 
        self.assertTrue(packs.reload_pack_names())
 
1368
 
        self.assertEqual(new_names, packs.names())
 
1369
 
        # And the repository can access the new revision
 
1370
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1371
 
        self.assertFalse(packs.reload_pack_names())
 
1373
 
    def test_reload_pack_names_added_and_removed(self):
 
1374
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1375
 
        names = packs.names()
 
1376
 
        # Now repack the whole thing
 
1377
 
        tree.branch.repository.pack()
 
1378
 
        new_names = tree.branch.repository._pack_collection.names()
 
1379
 
        # The other collection hasn't noticed yet
 
1380
 
        self.assertEqual(names, packs.names())
 
1381
 
        self.assertTrue(packs.reload_pack_names())
 
1382
 
        self.assertEqual(new_names, packs.names())
 
1383
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1384
 
        self.assertFalse(packs.reload_pack_names())
 
1386
 
    def test_reload_pack_names_preserves_pending(self):
 
1387
 
        # TODO: Update this to also test for pending-deleted names
 
1388
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1389
 
        # We will add one pack (via start_write_group + insert_record_stream),
 
1390
 
        # and remove another pack (via _remove_pack_from_memory)
 
1391
 
        orig_names = packs.names()
 
1392
 
        orig_at_load = packs._packs_at_load
 
1393
 
        to_remove_name = iter(orig_names).next()
 
1394
 
        r.start_write_group()
 
1395
 
        self.addCleanup(r.abort_write_group)
 
1396
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1397
 
            ('text', 'rev'), (), None, 'content\n')])
 
1398
 
        new_pack = packs._new_pack
 
1399
 
        self.assertTrue(new_pack.data_inserted())
 
1401
 
        packs.allocate(new_pack)
 
1402
 
        packs._new_pack = None
 
1403
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1404
 
        packs._remove_pack_from_memory(removed_pack)
 
1405
 
        names = packs.names()
 
1406
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1407
 
        new_names = set([x[0][0] for x in new_nodes])
 
1408
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1409
 
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1410
 
        self.assertEqual(set([new_pack.name]), new_names)
 
1411
 
        self.assertEqual([to_remove_name],
 
1412
 
                         sorted([x[0][0] for x in deleted_nodes]))
 
1413
 
        packs.reload_pack_names()
 
1414
 
        reloaded_names = packs.names()
 
1415
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1416
 
        self.assertEqual(names, reloaded_names)
 
1417
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1418
 
        new_names = set([x[0][0] for x in new_nodes])
 
1419
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1420
 
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1421
 
        self.assertEqual(set([new_pack.name]), new_names)
 
1422
 
        self.assertEqual([to_remove_name],
 
1423
 
                         sorted([x[0][0] for x in deleted_nodes]))
 
1425
 
    def test_autopack_obsoletes_new_pack(self):
 
1426
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1427
 
        packs._max_pack_count = lambda x: 1
 
1428
 
        packs.pack_distribution = lambda x: [10]
 
1429
 
        r.start_write_group()
 
1430
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1431
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1432
 
        # This should trigger an autopack, which will combine everything into a
 
1434
 
        new_names = r.commit_write_group()
 
1435
 
        names = packs.names()
 
1436
 
        self.assertEqual(1, len(names))
 
1437
 
        self.assertEqual([names[0] + '.pack'],
 
1438
 
                         packs._pack_transport.list_dir('.'))
 
1440
 
    def test_autopack_reloads_and_stops(self):
 
1441
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1442
 
        # After we have determined what needs to be autopacked, trigger a
 
1443
 
        # full-pack via the other repo which will cause us to re-evaluate and
 
1444
 
        # decide we don't need to do anything
 
1445
 
        orig_execute = packs._execute_pack_operations
 
1446
 
        def _munged_execute_pack_ops(*args, **kwargs):
 
1447
 
            tree.branch.repository.pack()
 
1448
 
            return orig_execute(*args, **kwargs)
 
1449
 
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1450
 
        packs._max_pack_count = lambda x: 1
 
1451
 
        packs.pack_distribution = lambda x: [10]
 
1452
 
        self.assertFalse(packs.autopack())
 
1453
 
        self.assertEqual(1, len(packs.names()))
 
1454
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1457
 
    def test__save_pack_names(self):
 
1458
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1459
 
        names = packs.names()
 
1460
 
        pack = packs.get_pack_by_name(names[0])
 
1461
 
        packs._remove_pack_from_memory(pack)
 
1462
 
        packs._save_pack_names(obsolete_packs=[pack])
 
1463
 
        cur_packs = packs._pack_transport.list_dir('.')
 
1464
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1465
 
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1466
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1467
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1468
 
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1470
 
    def test__save_pack_names_already_obsoleted(self):
 
1471
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1472
 
        names = packs.names()
 
1473
 
        pack = packs.get_pack_by_name(names[0])
 
1474
 
        packs._remove_pack_from_memory(pack)
 
1475
 
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1476
 
        # the pack directly.
 
1477
 
        packs._obsolete_packs([pack])
 
1478
 
        packs._save_pack_names(clear_obsolete_packs=True,
 
1479
 
                               obsolete_packs=[pack])
 
1480
 
        cur_packs = packs._pack_transport.list_dir('.')
 
1481
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1482
 
        # Note that while we set clear_obsolete_packs=True, it should not
 
1483
 
        # delete a pack file that we have also scheduled for obsoletion.
 
1484
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1485
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1486
 
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1490
1435
class TestPack(TestCaseWithTransport):
 
1491
1436
    """Tests for the Pack object."""
 
 
1571
1507
class TestPacker(TestCaseWithTransport):
 
1572
1508
    """Tests for the packs repository Packer class."""
 
1574
 
    def test_pack_optimizes_pack_order(self):
 
1575
 
        builder = self.make_branch_builder('.', format="1.9")
 
1576
 
        builder.start_series()
 
1577
 
        builder.build_snapshot('A', None, [
 
1578
 
            ('add', ('', 'root-id', 'directory', None)),
 
1579
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1580
 
        builder.build_snapshot('B', ['A'],
 
1581
 
            [('modify', ('f-id', 'new-content\n'))])
 
1582
 
        builder.build_snapshot('C', ['B'],
 
1583
 
            [('modify', ('f-id', 'third-content\n'))])
 
1584
 
        builder.build_snapshot('D', ['C'],
 
1585
 
            [('modify', ('f-id', 'fourth-content\n'))])
 
1586
 
        b = builder.get_branch()
 
1588
 
        builder.finish_series()
 
1589
 
        self.addCleanup(b.unlock)
 
1590
 
        # At this point, we should have 4 pack files available
 
1591
 
        # Because of how they were built, they correspond to
 
1592
 
        # ['D', 'C', 'B', 'A']
 
1593
 
        packs = b.repository._pack_collection.packs
 
1594
 
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1596
 
                                  revision_ids=['B', 'C'])
 
1597
 
        # Now, when we are copying the B & C revisions, their pack files should
 
1598
 
        # be moved to the front of the stack
 
1599
 
        # The new ordering moves B & C to the front of the .packs attribute,
 
1600
 
        # and leaves the others in the original order.
 
1601
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1602
 
        new_pack = packer.pack()
 
1603
 
        self.assertEqual(new_packs, packer.packs)
 
1606
 
class TestOptimisingPacker(TestCaseWithTransport):
 
1607
 
    """Tests for the OptimisingPacker class."""
 
1609
 
    def get_pack_collection(self):
 
1610
 
        repo = self.make_repository('.')
 
1611
 
        return repo._pack_collection
 
1613
 
    def test_open_pack_will_optimise(self):
 
1614
 
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1616
 
        new_pack = packer.open_pack()
 
1617
 
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1618
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1619
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1620
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1621
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1622
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1625
 
class TestCrossFormatPacks(TestCaseWithTransport):
 
1627
 
    def log_pack(self, hint=None):
 
1628
 
        self.calls.append(('pack', hint))
 
1629
 
        self.orig_pack(hint=hint)
 
1630
 
        if self.expect_hint:
 
1631
 
            self.assertTrue(hint)
 
1633
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1634
 
        self.expect_hint = expect_pack_called
 
1636
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1637
 
        source_tree.lock_write()
 
1638
 
        self.addCleanup(source_tree.unlock)
 
1639
 
        tip = source_tree.commit('foo')
 
1640
 
        target = self.make_repository('target', format=target_fmt)
 
1642
 
        self.addCleanup(target.unlock)
 
1643
 
        source = source_tree.branch.repository._get_source(target._format)
 
1644
 
        self.orig_pack = target.pack
 
1645
 
        target.pack = self.log_pack
 
1646
 
        search = target.search_missing_revision_ids(
 
1647
 
            source_tree.branch.repository, tip)
 
1648
 
        stream = source.get_stream(search)
 
1649
 
        from_format = source_tree.branch.repository._format
 
1650
 
        sink = target._get_sink()
 
1651
 
        sink.insert_stream(stream, from_format, [])
 
1652
 
        if expect_pack_called:
 
1653
 
            self.assertLength(1, self.calls)
 
1655
 
            self.assertLength(0, self.calls)
 
1657
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1658
 
        self.expect_hint = expect_pack_called
 
1660
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1661
 
        source_tree.lock_write()
 
1662
 
        self.addCleanup(source_tree.unlock)
 
1663
 
        tip = source_tree.commit('foo')
 
1664
 
        target = self.make_repository('target', format=target_fmt)
 
1666
 
        self.addCleanup(target.unlock)
 
1667
 
        source = source_tree.branch.repository
 
1668
 
        self.orig_pack = target.pack
 
1669
 
        target.pack = self.log_pack
 
1670
 
        target.fetch(source)
 
1671
 
        if expect_pack_called:
 
1672
 
            self.assertLength(1, self.calls)
 
1674
 
            self.assertLength(0, self.calls)
 
1676
 
    def test_sink_format_hint_no(self):
 
1677
 
        # When the target format says packing makes no difference, pack is not
 
1679
 
        self.run_stream('1.9', 'rich-root-pack', False)
 
1681
 
    def test_sink_format_hint_yes(self):
 
1682
 
        # When the target format says packing makes a difference, pack is
 
1684
 
        self.run_stream('1.9', '2a', True)
 
1686
 
    def test_sink_format_same_no(self):
 
1687
 
        # When the formats are the same, pack is not called.
 
1688
 
        self.run_stream('2a', '2a', False)
 
1690
 
    def test_IDS_format_hint_no(self):
 
1691
 
        # When the target format says packing makes no difference, pack is not
 
1693
 
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1695
 
    def test_IDS_format_hint_yes(self):
 
1696
 
        # When the target format says packing makes a difference, pack is
 
1698
 
        self.run_fetch('1.9', '2a', True)
 
1700
 
    def test_IDS_format_same_no(self):
 
1701
 
        # When the formats are the same, pack is not called.
 
1702
 
        self.run_fetch('2a', '2a', False)
 
 
1510
    # To date, this class has been factored out and nothing new added to it;
 
 
1511
    # thus there are not yet any tests.
 
 
1514
class TestInterDifferingSerializer(TestCaseWithTransport):
 
 
1516
    def test_progress_bar(self):
 
 
1517
        tree = self.make_branch_and_tree('tree')
 
 
1518
        tree.commit('rev1', rev_id='rev-1')
 
 
1519
        tree.commit('rev2', rev_id='rev-2')
 
 
1520
        tree.commit('rev3', rev_id='rev-3')
 
 
1521
        repo = self.make_repository('repo')
 
 
1522
        inter_repo = repository.InterDifferingSerializer(
 
 
1523
            tree.branch.repository, repo)
 
 
1524
        pb = progress.InstrumentedProgress(to_file=StringIO())
 
 
1525
        pb.never_throttle = True
 
 
1526
        inter_repo.fetch('rev-1', pb)
 
 
1527
        self.assertEqual('Transferring revisions', pb.last_msg)
 
 
1528
        self.assertEqual(1, pb.last_cnt)
 
 
1529
        self.assertEqual(1, pb.last_total)
 
 
1530
        inter_repo.fetch('rev-3', pb)
 
 
1531
        self.assertEqual(2, pb.last_cnt)
 
 
1532
        self.assertEqual(2, pb.last_total)