/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Martin von Gagern
  • Date: 2010-04-20 08:47:38 UTC
  • mfrom: (5167 +trunk)
  • mto: This revision was merged to the branch mainline in revision 5195.
  • Revision ID: martin.vgagern@gmx.net-20100420084738-ygymnqmdllzrhpfn
merge trunk

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
24
24
 
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
 
27
import sys
27
28
 
28
29
import bzrlib
29
30
from bzrlib.errors import (NotBranchError,
31
32
                           UnknownFormatError,
32
33
                           UnsupportedFormatError,
33
34
                           )
34
 
from bzrlib import graph
 
35
from bzrlib import (
 
36
    graph,
 
37
    tests,
 
38
    )
 
39
from bzrlib.branchbuilder import BranchBuilder
35
40
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
36
41
from bzrlib.index import GraphIndex, InMemoryGraphIndex
37
42
from bzrlib.repository import RepositoryFormat
46
51
    fakenfs,
47
52
    get_transport,
48
53
    )
49
 
from bzrlib.transport.memory import MemoryServer
50
 
from bzrlib.util import bencode
51
54
from bzrlib import (
 
55
    bencode,
52
56
    bzrdir,
53
57
    errors,
54
58
    inventory,
58
62
    revision as _mod_revision,
59
63
    symbol_versioning,
60
64
    upgrade,
 
65
    versionedfile,
61
66
    workingtree,
62
67
    )
63
 
from bzrlib.repofmt import knitrepo, weaverepo, pack_repo
 
68
from bzrlib.repofmt import (
 
69
    groupcompress_repo,
 
70
    knitrepo,
 
71
    pack_repo,
 
72
    weaverepo,
 
73
    )
64
74
 
65
75
 
66
76
class TestDefaultFormat(TestCase):
95
105
class SampleRepositoryFormat(repository.RepositoryFormat):
96
106
    """A sample format
97
107
 
98
 
    this format is initializable, unsupported to aid in testing the 
 
108
    this format is initializable, unsupported to aid in testing the
99
109
    open and open(unsupported=True) routines.
100
110
    """
101
111
 
122
132
    def test_find_format(self):
123
133
        # is the right format object found for a repository?
124
134
        # create a branch with a few known format objects.
125
 
        # this is not quite the same as 
 
135
        # this is not quite the same as
126
136
        self.build_tree(["foo/", "bar/"])
127
137
        def check_format(format, url):
128
138
            dir = format._matchingbzrdir.initialize(url)
131
141
            found_format = repository.RepositoryFormat.find_format(dir)
132
142
            self.failUnless(isinstance(found_format, format.__class__))
133
143
        check_format(weaverepo.RepositoryFormat7(), "bar")
134
 
        
 
144
 
135
145
    def test_find_format_no_repository(self):
136
146
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
137
147
        self.assertRaises(errors.NoRepositoryPresent,
167
177
        """Weaves need topological data insertion."""
168
178
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
169
179
        repo = weaverepo.RepositoryFormat6().initialize(control)
170
 
        self.assertEqual('topological', repo._fetch_order)
 
180
        self.assertEqual('topological', repo._format._fetch_order)
171
181
 
172
182
    def test_attribute__fetch_uses_deltas(self):
173
183
        """Weaves do not reuse deltas."""
174
184
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
175
185
        repo = weaverepo.RepositoryFormat6().initialize(control)
176
 
        self.assertEqual(False, repo._fetch_uses_deltas)
 
186
        self.assertEqual(False, repo._format._fetch_uses_deltas)
177
187
 
178
188
    def test_attribute__fetch_reconcile(self):
179
189
        """Weave repositories need a reconcile after fetch."""
180
190
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
181
191
        repo = weaverepo.RepositoryFormat6().initialize(control)
182
 
        self.assertEqual(True, repo._fetch_reconcile)
 
192
        self.assertEqual(True, repo._format._fetch_reconcile)
183
193
 
184
194
    def test_no_ancestry_weave(self):
185
195
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
202
212
        """Weaves need topological data insertion."""
203
213
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
204
214
        repo = weaverepo.RepositoryFormat7().initialize(control)
205
 
        self.assertEqual('topological', repo._fetch_order)
 
215
        self.assertEqual('topological', repo._format._fetch_order)
206
216
 
207
217
    def test_attribute__fetch_uses_deltas(self):
208
218
        """Weaves do not reuse deltas."""
209
219
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
210
220
        repo = weaverepo.RepositoryFormat7().initialize(control)
211
 
        self.assertEqual(False, repo._fetch_uses_deltas)
 
221
        self.assertEqual(False, repo._format._fetch_uses_deltas)
212
222
 
213
223
    def test_attribute__fetch_reconcile(self):
214
224
        """Weave repositories need a reconcile after fetch."""
215
225
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
216
226
        repo = weaverepo.RepositoryFormat7().initialize(control)
217
 
        self.assertEqual(True, repo._fetch_reconcile)
 
227
        self.assertEqual(True, repo._format._fetch_reconcile)
218
228
 
219
229
    def test_disk_layout(self):
220
230
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
243
253
        tree = control.create_workingtree()
244
254
        tree.add(['foo'], ['Foo:Bar'], ['file'])
245
255
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
246
 
        tree.commit('first post', rev_id='first')
 
256
        try:
 
257
            tree.commit('first post', rev_id='first')
 
258
        except errors.IllegalPath:
 
259
            if sys.platform != 'win32':
 
260
                raise
 
261
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
262
                              ' in repo format 7')
 
263
            return
247
264
        self.assertEqualDiff(
248
265
            '# bzr weave file v5\n'
249
266
            'i\n'
284
301
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
285
302
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
286
303
        t = control.get_repository_transport(None)
287
 
        # TODO: Should check there is a 'lock' toplevel directory, 
 
304
        # TODO: Should check there is a 'lock' toplevel directory,
288
305
        # regardless of contents
289
306
        self.assertFalse(t.has('lock/held/info'))
290
307
        repo.lock_write()
343
360
 
344
361
 
345
362
class TestFormatKnit1(TestCaseWithTransport):
346
 
    
 
363
 
347
364
    def test_attribute__fetch_order(self):
348
365
        """Knits need topological data insertion."""
349
366
        repo = self.make_repository('.',
350
367
                format=bzrdir.format_registry.get('knit')())
351
 
        self.assertEqual('topological', repo._fetch_order)
 
368
        self.assertEqual('topological', repo._format._fetch_order)
352
369
 
353
370
    def test_attribute__fetch_uses_deltas(self):
354
371
        """Knits reuse deltas."""
355
372
        repo = self.make_repository('.',
356
373
                format=bzrdir.format_registry.get('knit')())
357
 
        self.assertEqual(True, repo._fetch_uses_deltas)
 
374
        self.assertEqual(True, repo._format._fetch_uses_deltas)
358
375
 
359
376
    def test_disk_layout(self):
360
377
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
447
464
        repo = self.make_repository('.',
448
465
                format=bzrdir.format_registry.get('knit')())
449
466
        inv_xml = '<inventory format="5">\n</inventory>\n'
450
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
467
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
451
468
        self.assertEqual('test-rev-id', inv.root.revision)
452
469
 
453
470
    def test_deserialise_uses_global_revision_id(self):
459
476
        # Arguably, the deserialise_inventory should detect a mismatch, and
460
477
        # raise an error, rather than silently using one revision_id over the
461
478
        # other.
462
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
479
        self.assertRaises(AssertionError, repo._deserialise_inventory,
463
480
            'test-rev-id', inv_xml)
464
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
481
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
465
482
        self.assertEqual('other-rev-id', inv.root.revision)
466
483
 
467
484
    def test_supports_external_lookups(self):
473
490
class DummyRepository(object):
474
491
    """A dummy repository for testing."""
475
492
 
 
493
    _format = None
476
494
    _serializer = None
477
495
 
478
496
    def supports_rich_root(self):
 
497
        if self._format is not None:
 
498
            return self._format.rich_root_data
479
499
        return False
480
500
 
 
501
    def get_graph(self):
 
502
        raise NotImplementedError
 
503
 
 
504
    def get_parent_map(self, revision_ids):
 
505
        raise NotImplementedError
 
506
 
481
507
 
482
508
class InterDummy(repository.InterRepository):
483
509
    """An inter-repository optimised code path for DummyRepository.
490
516
    @staticmethod
491
517
    def is_compatible(repo_source, repo_target):
492
518
        """InterDummy is compatible with DummyRepository."""
493
 
        return (isinstance(repo_source, DummyRepository) and 
 
519
        return (isinstance(repo_source, DummyRepository) and
494
520
            isinstance(repo_target, DummyRepository))
495
521
 
496
522
 
509
535
 
510
536
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
511
537
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
512
 
        
 
538
 
513
539
        The effective default is now InterSameDataRepository because there is
514
540
        no actual sane default in the presence of incompatible data models.
515
541
        """
526
552
        # pair that it returns true on for the is_compatible static method
527
553
        # check
528
554
        dummy_a = DummyRepository()
 
555
        dummy_a._format = RepositoryFormat()
529
556
        dummy_b = DummyRepository()
 
557
        dummy_b._format = RepositoryFormat()
530
558
        repo = self.make_repository('.')
531
559
        # hack dummies to look like repo somewhat.
532
560
        dummy_a._serializer = repo._serializer
 
561
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
562
        dummy_a._format.rich_root_data = repo._format.rich_root_data
533
563
        dummy_b._serializer = repo._serializer
 
564
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
565
        dummy_b._format.rich_root_data = repo._format.rich_root_data
534
566
        repository.InterRepository.register_optimiser(InterDummy)
535
567
        try:
536
568
            # we should get the default for something InterDummy returns False
599
631
 
600
632
 
601
633
class TestMisc(TestCase):
602
 
    
 
634
 
603
635
    def test_unescape_xml(self):
604
636
        """We get some kind of error when malformed entities are passed"""
605
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
637
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
606
638
 
607
639
 
608
640
class TestRepositoryFormatKnit3(TestCaseWithTransport):
612
644
        format = bzrdir.BzrDirMetaFormat1()
613
645
        format.repository_format = knitrepo.RepositoryFormatKnit3()
614
646
        repo = self.make_repository('.', format=format)
615
 
        self.assertEqual('topological', repo._fetch_order)
 
647
        self.assertEqual('topological', repo._format._fetch_order)
616
648
 
617
649
    def test_attribute__fetch_uses_deltas(self):
618
650
        """Knits reuse deltas."""
619
651
        format = bzrdir.BzrDirMetaFormat1()
620
652
        format.repository_format = knitrepo.RepositoryFormatKnit3()
621
653
        repo = self.make_repository('.', format=format)
622
 
        self.assertEqual(True, repo._fetch_uses_deltas)
 
654
        self.assertEqual(True, repo._format._fetch_uses_deltas)
623
655
 
624
656
    def test_convert(self):
625
657
        """Ensure the upgrade adds weaves for roots"""
657
689
        self.assertFalse(repo._format.supports_external_lookups)
658
690
 
659
691
 
 
692
class Test2a(tests.TestCaseWithMemoryTransport):
 
693
 
 
694
    def test_fetch_combines_groups(self):
 
695
        builder = self.make_branch_builder('source', format='2a')
 
696
        builder.start_series()
 
697
        builder.build_snapshot('1', None, [
 
698
            ('add', ('', 'root-id', 'directory', '')),
 
699
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
700
        builder.build_snapshot('2', ['1'], [
 
701
            ('modify', ('file-id', 'content-2\n'))])
 
702
        builder.finish_series()
 
703
        source = builder.get_branch()
 
704
        target = self.make_repository('target', format='2a')
 
705
        target.fetch(source.repository)
 
706
        target.lock_read()
 
707
        self.addCleanup(target.unlock)
 
708
        details = target.texts._index.get_build_details(
 
709
            [('file-id', '1',), ('file-id', '2',)])
 
710
        file_1_details = details[('file-id', '1')]
 
711
        file_2_details = details[('file-id', '2')]
 
712
        # The index, and what to read off disk, should be the same for both
 
713
        # versions of the file.
 
714
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
715
 
 
716
    def test_fetch_combines_groups(self):
 
717
        builder = self.make_branch_builder('source', format='2a')
 
718
        builder.start_series()
 
719
        builder.build_snapshot('1', None, [
 
720
            ('add', ('', 'root-id', 'directory', '')),
 
721
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
722
        builder.build_snapshot('2', ['1'], [
 
723
            ('modify', ('file-id', 'content-2\n'))])
 
724
        builder.finish_series()
 
725
        source = builder.get_branch()
 
726
        target = self.make_repository('target', format='2a')
 
727
        target.fetch(source.repository)
 
728
        target.lock_read()
 
729
        self.addCleanup(target.unlock)
 
730
        details = target.texts._index.get_build_details(
 
731
            [('file-id', '1',), ('file-id', '2',)])
 
732
        file_1_details = details[('file-id', '1')]
 
733
        file_2_details = details[('file-id', '2')]
 
734
        # The index, and what to read off disk, should be the same for both
 
735
        # versions of the file.
 
736
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
737
 
 
738
    def test_fetch_combines_groups(self):
 
739
        builder = self.make_branch_builder('source', format='2a')
 
740
        builder.start_series()
 
741
        builder.build_snapshot('1', None, [
 
742
            ('add', ('', 'root-id', 'directory', '')),
 
743
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
744
        builder.build_snapshot('2', ['1'], [
 
745
            ('modify', ('file-id', 'content-2\n'))])
 
746
        builder.finish_series()
 
747
        source = builder.get_branch()
 
748
        target = self.make_repository('target', format='2a')
 
749
        target.fetch(source.repository)
 
750
        target.lock_read()
 
751
        self.addCleanup(target.unlock)
 
752
        details = target.texts._index.get_build_details(
 
753
            [('file-id', '1',), ('file-id', '2',)])
 
754
        file_1_details = details[('file-id', '1')]
 
755
        file_2_details = details[('file-id', '2')]
 
756
        # The index, and what to read off disk, should be the same for both
 
757
        # versions of the file.
 
758
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
759
 
 
760
    def test_format_pack_compresses_True(self):
 
761
        repo = self.make_repository('repo', format='2a')
 
762
        self.assertTrue(repo._format.pack_compresses)
 
763
 
 
764
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
765
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
766
        tree.lock_write()
 
767
        tree.add([''], ['TREE_ROOT'])
 
768
        revid = tree.commit("foo")
 
769
        tree.unlock()
 
770
        tree.lock_read()
 
771
        self.addCleanup(tree.unlock)
 
772
        inv = tree.branch.repository.get_inventory(revid)
 
773
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
774
        inv.parent_id_basename_to_file_id._ensure_root()
 
775
        inv.id_to_entry._ensure_root()
 
776
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
777
        self.assertEqual(65536,
 
778
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
779
 
 
780
    def test_autopack_unchanged_chk_nodes(self):
 
781
        # at 20 unchanged commits, chk pages are packed that are split into
 
782
        # two groups such that the new pack being made doesn't have all its
 
783
        # pages in the source packs (though they are in the repository).
 
784
        # Use a memory backed repository, we don't need to hit disk for this
 
785
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
786
        tree.lock_write()
 
787
        self.addCleanup(tree.unlock)
 
788
        tree.add([''], ['TREE_ROOT'])
 
789
        for pos in range(20):
 
790
            tree.commit(str(pos))
 
791
 
 
792
    def test_pack_with_hint(self):
 
793
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
794
        tree.lock_write()
 
795
        self.addCleanup(tree.unlock)
 
796
        tree.add([''], ['TREE_ROOT'])
 
797
        # 1 commit to leave untouched
 
798
        tree.commit('1')
 
799
        to_keep = tree.branch.repository._pack_collection.names()
 
800
        # 2 to combine
 
801
        tree.commit('2')
 
802
        tree.commit('3')
 
803
        all = tree.branch.repository._pack_collection.names()
 
804
        combine = list(set(all) - set(to_keep))
 
805
        self.assertLength(3, all)
 
806
        self.assertLength(2, combine)
 
807
        tree.branch.repository.pack(hint=combine)
 
808
        final = tree.branch.repository._pack_collection.names()
 
809
        self.assertLength(2, final)
 
810
        self.assertFalse(combine[0] in final)
 
811
        self.assertFalse(combine[1] in final)
 
812
        self.assertSubset(to_keep, final)
 
813
 
 
814
    def test_stream_source_to_gc(self):
 
815
        source = self.make_repository('source', format='2a')
 
816
        target = self.make_repository('target', format='2a')
 
817
        stream = source._get_source(target._format)
 
818
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
819
 
 
820
    def test_stream_source_to_non_gc(self):
 
821
        source = self.make_repository('source', format='2a')
 
822
        target = self.make_repository('target', format='rich-root-pack')
 
823
        stream = source._get_source(target._format)
 
824
        # We don't want the child GroupCHKStreamSource
 
825
        self.assertIs(type(stream), repository.StreamSource)
 
826
 
 
827
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
828
        source_builder = self.make_branch_builder('source',
 
829
                            format='2a')
 
830
        # We have to build a fairly large tree, so that we are sure the chk
 
831
        # pages will have split into multiple pages.
 
832
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
833
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
834
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
835
                fname = i + j
 
836
                fid = fname + '-id'
 
837
                content = 'content for %s\n' % (fname,)
 
838
                entries.append(('add', (fname, fid, 'file', content)))
 
839
        source_builder.start_series()
 
840
        source_builder.build_snapshot('rev-1', None, entries)
 
841
        # Now change a few of them, so we get a few new pages for the second
 
842
        # revision
 
843
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
844
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
845
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
846
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
847
            ])
 
848
        source_builder.finish_series()
 
849
        source_branch = source_builder.get_branch()
 
850
        source_branch.lock_read()
 
851
        self.addCleanup(source_branch.unlock)
 
852
        target = self.make_repository('target', format='2a')
 
853
        source = source_branch.repository._get_source(target._format)
 
854
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
855
 
 
856
        # On a regular pass, getting the inventories and chk pages for rev-2
 
857
        # would only get the newly created chk pages
 
858
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
859
                                    set(['rev-2']))
 
860
        simple_chk_records = []
 
861
        for vf_name, substream in source.get_stream(search):
 
862
            if vf_name == 'chk_bytes':
 
863
                for record in substream:
 
864
                    simple_chk_records.append(record.key)
 
865
            else:
 
866
                for _ in substream:
 
867
                    continue
 
868
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
869
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
870
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
871
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
872
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
873
                         simple_chk_records)
 
874
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
875
        # we should get a much larger set of pages.
 
876
        missing = [('inventories', 'rev-2')]
 
877
        full_chk_records = []
 
878
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
879
            if vf_name == 'inventories':
 
880
                for record in substream:
 
881
                    self.assertEqual(('rev-2',), record.key)
 
882
            elif vf_name == 'chk_bytes':
 
883
                for record in substream:
 
884
                    full_chk_records.append(record.key)
 
885
            else:
 
886
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
887
        # We have 257 records now. This is because we have 1 root page, and 256
 
888
        # leaf pages in a complete listing.
 
889
        self.assertEqual(257, len(full_chk_records))
 
890
        self.assertSubset(simple_chk_records, full_chk_records)
 
891
 
 
892
    def test_inconsistency_fatal(self):
 
893
        repo = self.make_repository('repo', format='2a')
 
894
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
895
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
896
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
897
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
898
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
899
 
 
900
 
 
901
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
902
 
 
903
    def test_source_to_exact_pack_092(self):
 
904
        source = self.make_repository('source', format='pack-0.92')
 
905
        target = self.make_repository('target', format='pack-0.92')
 
906
        stream_source = source._get_source(target._format)
 
907
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
908
 
 
909
    def test_source_to_exact_pack_rich_root_pack(self):
 
910
        source = self.make_repository('source', format='rich-root-pack')
 
911
        target = self.make_repository('target', format='rich-root-pack')
 
912
        stream_source = source._get_source(target._format)
 
913
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
914
 
 
915
    def test_source_to_exact_pack_19(self):
 
916
        source = self.make_repository('source', format='1.9')
 
917
        target = self.make_repository('target', format='1.9')
 
918
        stream_source = source._get_source(target._format)
 
919
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
920
 
 
921
    def test_source_to_exact_pack_19_rich_root(self):
 
922
        source = self.make_repository('source', format='1.9-rich-root')
 
923
        target = self.make_repository('target', format='1.9-rich-root')
 
924
        stream_source = source._get_source(target._format)
 
925
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
926
 
 
927
    def test_source_to_remote_exact_pack_19(self):
 
928
        trans = self.make_smart_server('target')
 
929
        trans.ensure_base()
 
930
        source = self.make_repository('source', format='1.9')
 
931
        target = self.make_repository('target', format='1.9')
 
932
        target = repository.Repository.open(trans.base)
 
933
        stream_source = source._get_source(target._format)
 
934
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
935
 
 
936
    def test_stream_source_to_non_exact(self):
 
937
        source = self.make_repository('source', format='pack-0.92')
 
938
        target = self.make_repository('target', format='1.9')
 
939
        stream = source._get_source(target._format)
 
940
        self.assertIs(type(stream), repository.StreamSource)
 
941
 
 
942
    def test_stream_source_to_non_exact_rich_root(self):
 
943
        source = self.make_repository('source', format='1.9')
 
944
        target = self.make_repository('target', format='1.9-rich-root')
 
945
        stream = source._get_source(target._format)
 
946
        self.assertIs(type(stream), repository.StreamSource)
 
947
 
 
948
    def test_source_to_remote_non_exact_pack_19(self):
 
949
        trans = self.make_smart_server('target')
 
950
        trans.ensure_base()
 
951
        source = self.make_repository('source', format='1.9')
 
952
        target = self.make_repository('target', format='1.6')
 
953
        target = repository.Repository.open(trans.base)
 
954
        stream_source = source._get_source(target._format)
 
955
        self.assertIs(type(stream_source), repository.StreamSource)
 
956
 
 
957
    def test_stream_source_to_knit(self):
 
958
        source = self.make_repository('source', format='pack-0.92')
 
959
        target = self.make_repository('target', format='dirstate')
 
960
        stream = source._get_source(target._format)
 
961
        self.assertIs(type(stream), repository.StreamSource)
 
962
 
 
963
 
 
964
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
965
    """Tests for _find_parent_ids_of_revisions."""
 
966
 
 
967
    def setUp(self):
 
968
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
969
        self.builder = self.make_branch_builder('source',
 
970
            format='development6-rich-root')
 
971
        self.builder.start_series()
 
972
        self.builder.build_snapshot('initial', None,
 
973
            [('add', ('', 'tree-root', 'directory', None))])
 
974
        self.repo = self.builder.get_branch().repository
 
975
        self.addCleanup(self.builder.finish_series)
 
976
 
 
977
    def assertParentIds(self, expected_result, rev_set):
 
978
        self.assertEqual(sorted(expected_result),
 
979
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
980
 
 
981
    def test_simple(self):
 
982
        self.builder.build_snapshot('revid1', None, [])
 
983
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
984
        rev_set = ['revid2']
 
985
        self.assertParentIds(['revid1'], rev_set)
 
986
 
 
987
    def test_not_first_parent(self):
 
988
        self.builder.build_snapshot('revid1', None, [])
 
989
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
990
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
991
        rev_set = ['revid3', 'revid2']
 
992
        self.assertParentIds(['revid1'], rev_set)
 
993
 
 
994
    def test_not_null(self):
 
995
        rev_set = ['initial']
 
996
        self.assertParentIds([], rev_set)
 
997
 
 
998
    def test_not_null_set(self):
 
999
        self.builder.build_snapshot('revid1', None, [])
 
1000
        rev_set = [_mod_revision.NULL_REVISION]
 
1001
        self.assertParentIds([], rev_set)
 
1002
 
 
1003
    def test_ghost(self):
 
1004
        self.builder.build_snapshot('revid1', None, [])
 
1005
        rev_set = ['ghost', 'revid1']
 
1006
        self.assertParentIds(['initial'], rev_set)
 
1007
 
 
1008
    def test_ghost_parent(self):
 
1009
        self.builder.build_snapshot('revid1', None, [])
 
1010
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
1011
        rev_set = ['revid2', 'revid1']
 
1012
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
1013
 
 
1014
    def test_righthand_parent(self):
 
1015
        self.builder.build_snapshot('revid1', None, [])
 
1016
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
1017
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
1018
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
1019
        rev_set = ['revid3', 'revid2a']
 
1020
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
1021
 
 
1022
 
660
1023
class TestWithBrokenRepo(TestCaseWithTransport):
661
1024
    """These tests seem to be more appropriate as interface tests?"""
662
1025
 
675
1038
            inv = inventory.Inventory(revision_id='rev1a')
676
1039
            inv.root.revision = 'rev1a'
677
1040
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1041
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
678
1042
            repo.add_inventory('rev1a', inv, [])
679
1043
            revision = _mod_revision.Revision('rev1a',
680
1044
                committer='jrandom@example.com', timestamp=0,
715
1079
    def add_revision(self, repo, revision_id, inv, parent_ids):
716
1080
        inv.revision_id = revision_id
717
1081
        inv.root.revision = revision_id
 
1082
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
718
1083
        repo.add_inventory(revision_id, inv, parent_ids)
719
1084
        revision = _mod_revision.Revision(revision_id,
720
1085
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
737
1102
        """
738
1103
        broken_repo = self.make_broken_repository()
739
1104
        empty_repo = self.make_repository('empty-repo')
740
 
        self.assertRaises(errors.RevisionNotPresent, empty_repo.fetch, broken_repo)
 
1105
        try:
 
1106
            empty_repo.fetch(broken_repo)
 
1107
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1108
            # Test successful: compression parent not being copied leads to
 
1109
            # error.
 
1110
            return
 
1111
        empty_repo.lock_read()
 
1112
        self.addCleanup(empty_repo.unlock)
 
1113
        text = empty_repo.texts.get_record_stream(
 
1114
            [('file2-id', 'rev3')], 'topological', True).next()
 
1115
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
741
1116
 
742
1117
 
743
1118
class TestRepositoryPackCollection(TestCaseWithTransport):
750
1125
        repo = self.make_repository('.', format=format)
751
1126
        return repo._pack_collection
752
1127
 
 
1128
    def make_packs_and_alt_repo(self, write_lock=False):
 
1129
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1130
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1131
        tree.lock_write()
 
1132
        self.addCleanup(tree.unlock)
 
1133
        rev1 = tree.commit('one')
 
1134
        rev2 = tree.commit('two')
 
1135
        rev3 = tree.commit('three')
 
1136
        r = repository.Repository.open('.')
 
1137
        if write_lock:
 
1138
            r.lock_write()
 
1139
        else:
 
1140
            r.lock_read()
 
1141
        self.addCleanup(r.unlock)
 
1142
        packs = r._pack_collection
 
1143
        packs.ensure_loaded()
 
1144
        return tree, r, packs, [rev1, rev2, rev3]
 
1145
 
 
1146
    def test__clear_obsolete_packs(self):
 
1147
        packs = self.get_packs()
 
1148
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1149
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1150
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1151
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1152
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1153
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1154
        res = packs._clear_obsolete_packs()
 
1155
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1156
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1157
 
 
1158
    def test__clear_obsolete_packs_preserve(self):
 
1159
        packs = self.get_packs()
 
1160
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1161
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1162
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1163
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1164
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1165
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1166
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1167
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1168
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1169
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1170
 
753
1171
    def test__max_pack_count(self):
754
1172
        """The maximum pack count is a function of the number of revisions."""
755
1173
        # no revisions - one pack, so that we can have a revision free repo
775
1193
        # check some arbitrary big numbers
776
1194
        self.assertEqual(25, packs._max_pack_count(112894))
777
1195
 
 
1196
    def test_repr(self):
 
1197
        packs = self.get_packs()
 
1198
        self.assertContainsRe(repr(packs),
 
1199
            'RepositoryPackCollection(.*Repository(.*))')
 
1200
 
 
1201
    def test__obsolete_packs(self):
 
1202
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1203
        names = packs.names()
 
1204
        pack = packs.get_pack_by_name(names[0])
 
1205
        # Schedule this one for removal
 
1206
        packs._remove_pack_from_memory(pack)
 
1207
        # Simulate a concurrent update by renaming the .pack file and one of
 
1208
        # the indices
 
1209
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1210
                               'obsolete_packs/%s.pack' % (names[0],))
 
1211
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1212
                               'obsolete_packs/%s.iix' % (names[0],))
 
1213
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1214
        # are still renamed
 
1215
        packs._obsolete_packs([pack])
 
1216
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1217
                         sorted(packs._pack_transport.list_dir('.')))
 
1218
        # names[0] should not be present in the index anymore
 
1219
        self.assertEqual(names[1:],
 
1220
            sorted(set([osutils.splitext(n)[0] for n in
 
1221
                        packs._index_transport.list_dir('.')])))
 
1222
 
778
1223
    def test_pack_distribution_zero(self):
779
1224
        packs = self.get_packs()
780
1225
        self.assertEqual([0], packs.pack_distribution(0))
903
1348
        tree.lock_read()
904
1349
        self.addCleanup(tree.unlock)
905
1350
        packs = tree.branch.repository._pack_collection
 
1351
        packs.reset()
906
1352
        packs.ensure_loaded()
907
1353
        name = packs.names()[0]
908
1354
        pack_1 = packs.get_pack_by_name(name)
917
1363
        # and the same instance should be returned on successive calls.
918
1364
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
919
1365
 
 
1366
    def test_reload_pack_names_new_entry(self):
 
1367
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1368
        names = packs.names()
 
1369
        # Add a new pack file into the repository
 
1370
        rev4 = tree.commit('four')
 
1371
        new_names = tree.branch.repository._pack_collection.names()
 
1372
        new_name = set(new_names).difference(names)
 
1373
        self.assertEqual(1, len(new_name))
 
1374
        new_name = new_name.pop()
 
1375
        # The old collection hasn't noticed yet
 
1376
        self.assertEqual(names, packs.names())
 
1377
        self.assertTrue(packs.reload_pack_names())
 
1378
        self.assertEqual(new_names, packs.names())
 
1379
        # And the repository can access the new revision
 
1380
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1381
        self.assertFalse(packs.reload_pack_names())
 
1382
 
 
1383
    def test_reload_pack_names_added_and_removed(self):
 
1384
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1385
        names = packs.names()
 
1386
        # Now repack the whole thing
 
1387
        tree.branch.repository.pack()
 
1388
        new_names = tree.branch.repository._pack_collection.names()
 
1389
        # The other collection hasn't noticed yet
 
1390
        self.assertEqual(names, packs.names())
 
1391
        self.assertTrue(packs.reload_pack_names())
 
1392
        self.assertEqual(new_names, packs.names())
 
1393
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1394
        self.assertFalse(packs.reload_pack_names())
 
1395
 
 
1396
    def test_reload_pack_names_preserves_pending(self):
 
1397
        # TODO: Update this to also test for pending-deleted names
 
1398
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1399
        # We will add one pack (via start_write_group + insert_record_stream),
 
1400
        # and remove another pack (via _remove_pack_from_memory)
 
1401
        orig_names = packs.names()
 
1402
        orig_at_load = packs._packs_at_load
 
1403
        to_remove_name = iter(orig_names).next()
 
1404
        r.start_write_group()
 
1405
        self.addCleanup(r.abort_write_group)
 
1406
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1407
            ('text', 'rev'), (), None, 'content\n')])
 
1408
        new_pack = packs._new_pack
 
1409
        self.assertTrue(new_pack.data_inserted())
 
1410
        new_pack.finish()
 
1411
        packs.allocate(new_pack)
 
1412
        packs._new_pack = None
 
1413
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1414
        packs._remove_pack_from_memory(removed_pack)
 
1415
        names = packs.names()
 
1416
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1417
        new_names = set([x[0][0] for x in new_nodes])
 
1418
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1419
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1420
        self.assertEqual(set([new_pack.name]), new_names)
 
1421
        self.assertEqual([to_remove_name],
 
1422
                         sorted([x[0][0] for x in deleted_nodes]))
 
1423
        packs.reload_pack_names()
 
1424
        reloaded_names = packs.names()
 
1425
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1426
        self.assertEqual(names, reloaded_names)
 
1427
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1428
        new_names = set([x[0][0] for x in new_nodes])
 
1429
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1430
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1431
        self.assertEqual(set([new_pack.name]), new_names)
 
1432
        self.assertEqual([to_remove_name],
 
1433
                         sorted([x[0][0] for x in deleted_nodes]))
 
1434
 
 
1435
    def test_autopack_obsoletes_new_pack(self):
 
1436
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1437
        packs._max_pack_count = lambda x: 1
 
1438
        packs.pack_distribution = lambda x: [10]
 
1439
        r.start_write_group()
 
1440
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1441
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1442
        # This should trigger an autopack, which will combine everything into a
 
1443
        # single pack file.
 
1444
        new_names = r.commit_write_group()
 
1445
        names = packs.names()
 
1446
        self.assertEqual(1, len(names))
 
1447
        self.assertEqual([names[0] + '.pack'],
 
1448
                         packs._pack_transport.list_dir('.'))
 
1449
 
 
1450
    def test_autopack_reloads_and_stops(self):
 
1451
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1452
        # After we have determined what needs to be autopacked, trigger a
 
1453
        # full-pack via the other repo which will cause us to re-evaluate and
 
1454
        # decide we don't need to do anything
 
1455
        orig_execute = packs._execute_pack_operations
 
1456
        def _munged_execute_pack_ops(*args, **kwargs):
 
1457
            tree.branch.repository.pack()
 
1458
            return orig_execute(*args, **kwargs)
 
1459
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1460
        packs._max_pack_count = lambda x: 1
 
1461
        packs.pack_distribution = lambda x: [10]
 
1462
        self.assertFalse(packs.autopack())
 
1463
        self.assertEqual(1, len(packs.names()))
 
1464
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1465
                         packs.names())
 
1466
 
 
1467
    def test__save_pack_names(self):
 
1468
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1469
        names = packs.names()
 
1470
        pack = packs.get_pack_by_name(names[0])
 
1471
        packs._remove_pack_from_memory(pack)
 
1472
        packs._save_pack_names(obsolete_packs=[pack])
 
1473
        cur_packs = packs._pack_transport.list_dir('.')
 
1474
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1475
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1476
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1477
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1478
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1479
 
 
1480
    def test__save_pack_names_already_obsoleted(self):
 
1481
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1482
        names = packs.names()
 
1483
        pack = packs.get_pack_by_name(names[0])
 
1484
        packs._remove_pack_from_memory(pack)
 
1485
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1486
        # the pack directly.
 
1487
        packs._obsolete_packs([pack])
 
1488
        packs._save_pack_names(clear_obsolete_packs=True,
 
1489
                               obsolete_packs=[pack])
 
1490
        cur_packs = packs._pack_transport.list_dir('.')
 
1491
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1492
        # Note that while we set clear_obsolete_packs=True, it should not
 
1493
        # delete a pack file that we have also scheduled for obsoletion.
 
1494
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1495
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1496
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1497
 
 
1498
 
920
1499
 
921
1500
class TestPack(TestCaseWithTransport):
922
1501
    """Tests for the Pack object."""
976
1555
        pack_transport = self.get_transport('pack')
977
1556
        index_transport = self.get_transport('index')
978
1557
        upload_transport.mkdir('.')
979
 
        pack = pack_repo.NewPack(upload_transport, index_transport,
980
 
            pack_transport, index_builder_class=BTreeBuilder,
981
 
            index_class=BTreeGraphIndex)
 
1558
        collection = pack_repo.RepositoryPackCollection(
 
1559
            repo=None,
 
1560
            transport=self.get_transport('.'),
 
1561
            index_transport=index_transport,
 
1562
            upload_transport=upload_transport,
 
1563
            pack_transport=pack_transport,
 
1564
            index_builder_class=BTreeBuilder,
 
1565
            index_class=BTreeGraphIndex,
 
1566
            use_chk_index=False)
 
1567
        pack = pack_repo.NewPack(collection)
 
1568
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
982
1569
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
983
1570
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
984
1571
        self.assertIsInstance(pack._hash, type(osutils.md5()))
994
1581
class TestPacker(TestCaseWithTransport):
995
1582
    """Tests for the packs repository Packer class."""
996
1583
 
997
 
    # To date, this class has been factored out and nothing new added to it;
998
 
    # thus there are not yet any tests.
999
 
 
1000
 
 
1001
 
class TestInterDifferingSerializer(TestCaseWithTransport):
1002
 
 
1003
 
    def test_progress_bar(self):
1004
 
        tree = self.make_branch_and_tree('tree')
1005
 
        tree.commit('rev1', rev_id='rev-1')
1006
 
        tree.commit('rev2', rev_id='rev-2')
1007
 
        tree.commit('rev3', rev_id='rev-3')
1008
 
        repo = self.make_repository('repo')
1009
 
        inter_repo = repository.InterDifferingSerializer(
1010
 
            tree.branch.repository, repo)
1011
 
        pb = progress.InstrumentedProgress(to_file=StringIO())
1012
 
        pb.never_throttle = True
1013
 
        inter_repo.fetch('rev-1', pb)
1014
 
        self.assertEqual('Transferring revisions', pb.last_msg)
1015
 
        self.assertEqual(1, pb.last_cnt)
1016
 
        self.assertEqual(1, pb.last_total)
1017
 
        inter_repo.fetch('rev-3', pb)
1018
 
        self.assertEqual(2, pb.last_cnt)
1019
 
        self.assertEqual(2, pb.last_total)
 
1584
    def test_pack_optimizes_pack_order(self):
 
1585
        builder = self.make_branch_builder('.', format="1.9")
 
1586
        builder.start_series()
 
1587
        builder.build_snapshot('A', None, [
 
1588
            ('add', ('', 'root-id', 'directory', None)),
 
1589
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1590
        builder.build_snapshot('B', ['A'],
 
1591
            [('modify', ('f-id', 'new-content\n'))])
 
1592
        builder.build_snapshot('C', ['B'],
 
1593
            [('modify', ('f-id', 'third-content\n'))])
 
1594
        builder.build_snapshot('D', ['C'],
 
1595
            [('modify', ('f-id', 'fourth-content\n'))])
 
1596
        b = builder.get_branch()
 
1597
        b.lock_read()
 
1598
        builder.finish_series()
 
1599
        self.addCleanup(b.unlock)
 
1600
        # At this point, we should have 4 pack files available
 
1601
        # Because of how they were built, they correspond to
 
1602
        # ['D', 'C', 'B', 'A']
 
1603
        packs = b.repository._pack_collection.packs
 
1604
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1605
                                  packs, 'testing',
 
1606
                                  revision_ids=['B', 'C'])
 
1607
        # Now, when we are copying the B & C revisions, their pack files should
 
1608
        # be moved to the front of the stack
 
1609
        # The new ordering moves B & C to the front of the .packs attribute,
 
1610
        # and leaves the others in the original order.
 
1611
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1612
        new_pack = packer.pack()
 
1613
        self.assertEqual(new_packs, packer.packs)
 
1614
 
 
1615
 
 
1616
class TestOptimisingPacker(TestCaseWithTransport):
 
1617
    """Tests for the OptimisingPacker class."""
 
1618
 
 
1619
    def get_pack_collection(self):
 
1620
        repo = self.make_repository('.')
 
1621
        return repo._pack_collection
 
1622
 
 
1623
    def test_open_pack_will_optimise(self):
 
1624
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1625
                                            [], '.test')
 
1626
        new_pack = packer.open_pack()
 
1627
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1628
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1629
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1630
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1631
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1632
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1633
 
 
1634
 
 
1635
class TestCrossFormatPacks(TestCaseWithTransport):
 
1636
 
 
1637
    def log_pack(self, hint=None):
 
1638
        self.calls.append(('pack', hint))
 
1639
        self.orig_pack(hint=hint)
 
1640
        if self.expect_hint:
 
1641
            self.assertTrue(hint)
 
1642
 
 
1643
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1644
        self.expect_hint = expect_pack_called
 
1645
        self.calls = []
 
1646
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1647
        source_tree.lock_write()
 
1648
        self.addCleanup(source_tree.unlock)
 
1649
        tip = source_tree.commit('foo')
 
1650
        target = self.make_repository('target', format=target_fmt)
 
1651
        target.lock_write()
 
1652
        self.addCleanup(target.unlock)
 
1653
        source = source_tree.branch.repository._get_source(target._format)
 
1654
        self.orig_pack = target.pack
 
1655
        target.pack = self.log_pack
 
1656
        search = target.search_missing_revision_ids(
 
1657
            source_tree.branch.repository, tip)
 
1658
        stream = source.get_stream(search)
 
1659
        from_format = source_tree.branch.repository._format
 
1660
        sink = target._get_sink()
 
1661
        sink.insert_stream(stream, from_format, [])
 
1662
        if expect_pack_called:
 
1663
            self.assertLength(1, self.calls)
 
1664
        else:
 
1665
            self.assertLength(0, self.calls)
 
1666
 
 
1667
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1668
        self.expect_hint = expect_pack_called
 
1669
        self.calls = []
 
1670
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1671
        source_tree.lock_write()
 
1672
        self.addCleanup(source_tree.unlock)
 
1673
        tip = source_tree.commit('foo')
 
1674
        target = self.make_repository('target', format=target_fmt)
 
1675
        target.lock_write()
 
1676
        self.addCleanup(target.unlock)
 
1677
        source = source_tree.branch.repository
 
1678
        self.orig_pack = target.pack
 
1679
        target.pack = self.log_pack
 
1680
        target.fetch(source)
 
1681
        if expect_pack_called:
 
1682
            self.assertLength(1, self.calls)
 
1683
        else:
 
1684
            self.assertLength(0, self.calls)
 
1685
 
 
1686
    def test_sink_format_hint_no(self):
 
1687
        # When the target format says packing makes no difference, pack is not
 
1688
        # called.
 
1689
        self.run_stream('1.9', 'rich-root-pack', False)
 
1690
 
 
1691
    def test_sink_format_hint_yes(self):
 
1692
        # When the target format says packing makes a difference, pack is
 
1693
        # called.
 
1694
        self.run_stream('1.9', '2a', True)
 
1695
 
 
1696
    def test_sink_format_same_no(self):
 
1697
        # When the formats are the same, pack is not called.
 
1698
        self.run_stream('2a', '2a', False)
 
1699
 
 
1700
    def test_IDS_format_hint_no(self):
 
1701
        # When the target format says packing makes no difference, pack is not
 
1702
        # called.
 
1703
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1704
 
 
1705
    def test_IDS_format_hint_yes(self):
 
1706
        # When the target format says packing makes a difference, pack is
 
1707
        # called.
 
1708
        self.run_fetch('1.9', '2a', True)
 
1709
 
 
1710
    def test_IDS_format_same_no(self):
 
1711
        # When the formats are the same, pack is not called.
 
1712
        self.run_fetch('2a', '2a', False)