/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Robert Collins
  • Date: 2010-05-06 11:08:10 UTC
  • mto: This revision was merged to the branch mainline in revision 5223.
  • Revision ID: robertc@robertcollins.net-20100506110810-h3j07fh5gmw54s25
Cleaner matcher matching revised unlocking protocol.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/repository_implementations/*.py.
 
19
For interface tests see tests/per_repository/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
 
26
import sys
27
27
 
28
 
from bzrlib import symbol_versioning
29
28
import bzrlib
30
 
import bzrlib.bzrdir as bzrdir
31
 
import bzrlib.errors as errors
32
 
from bzrlib.errors import (NotBranchError,
33
 
                           NoSuchFile,
 
29
from bzrlib.errors import (NoSuchFile,
34
30
                           UnknownFormatError,
35
31
                           UnsupportedFormatError,
36
32
                           )
 
33
from bzrlib import (
 
34
    graph,
 
35
    tests,
 
36
    )
 
37
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
38
from bzrlib.index import GraphIndex
37
39
from bzrlib.repository import RepositoryFormat
38
 
from bzrlib.tests import TestCase, TestCaseWithTransport
39
 
from bzrlib.transport import get_transport
40
 
from bzrlib.transport.memory import MemoryServer
 
40
from bzrlib.tests import (
 
41
    TestCase,
 
42
    TestCaseWithTransport,
 
43
    )
 
44
from bzrlib.transport import (
 
45
    get_transport,
 
46
    )
41
47
from bzrlib import (
 
48
    bzrdir,
 
49
    errors,
 
50
    inventory,
 
51
    osutils,
42
52
    repository,
 
53
    revision as _mod_revision,
43
54
    upgrade,
 
55
    versionedfile,
44
56
    workingtree,
45
57
    )
46
 
from bzrlib.repofmt import knitrepo, weaverepo
 
58
from bzrlib.repofmt import (
 
59
    groupcompress_repo,
 
60
    knitrepo,
 
61
    pack_repo,
 
62
    weaverepo,
 
63
    )
47
64
 
48
65
 
49
66
class TestDefaultFormat(TestCase):
78
95
class SampleRepositoryFormat(repository.RepositoryFormat):
79
96
    """A sample format
80
97
 
81
 
    this format is initializable, unsupported to aid in testing the 
 
98
    this format is initializable, unsupported to aid in testing the
82
99
    open and open(unsupported=True) routines.
83
100
    """
84
101
 
105
122
    def test_find_format(self):
106
123
        # is the right format object found for a repository?
107
124
        # create a branch with a few known format objects.
108
 
        # this is not quite the same as 
 
125
        # this is not quite the same as
109
126
        self.build_tree(["foo/", "bar/"])
110
127
        def check_format(format, url):
111
128
            dir = format._matchingbzrdir.initialize(url)
114
131
            found_format = repository.RepositoryFormat.find_format(dir)
115
132
            self.failUnless(isinstance(found_format, format.__class__))
116
133
        check_format(weaverepo.RepositoryFormat7(), "bar")
117
 
        
 
134
 
118
135
    def test_find_format_no_repository(self):
119
136
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
120
137
        self.assertRaises(errors.NoRepositoryPresent,
146
163
 
147
164
class TestFormat6(TestCaseWithTransport):
148
165
 
 
166
    def test_attribute__fetch_order(self):
 
167
        """Weaves need topological data insertion."""
 
168
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
169
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
170
        self.assertEqual('topological', repo._format._fetch_order)
 
171
 
 
172
    def test_attribute__fetch_uses_deltas(self):
 
173
        """Weaves do not reuse deltas."""
 
174
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
175
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
176
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
177
 
 
178
    def test_attribute__fetch_reconcile(self):
 
179
        """Weave repositories need a reconcile after fetch."""
 
180
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
181
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
182
        self.assertEqual(True, repo._format._fetch_reconcile)
 
183
 
149
184
    def test_no_ancestry_weave(self):
150
185
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
151
186
        repo = weaverepo.RepositoryFormat6().initialize(control)
155
190
                          control.transport.get,
156
191
                          'ancestry.weave')
157
192
 
 
193
    def test_supports_external_lookups(self):
 
194
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
195
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
196
        self.assertFalse(repo._format.supports_external_lookups)
 
197
 
158
198
 
159
199
class TestFormat7(TestCaseWithTransport):
160
 
    
 
200
 
 
201
    def test_attribute__fetch_order(self):
 
202
        """Weaves need topological data insertion."""
 
203
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
204
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
205
        self.assertEqual('topological', repo._format._fetch_order)
 
206
 
 
207
    def test_attribute__fetch_uses_deltas(self):
 
208
        """Weaves do not reuse deltas."""
 
209
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
210
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
211
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
212
 
 
213
    def test_attribute__fetch_reconcile(self):
 
214
        """Weave repositories need a reconcile after fetch."""
 
215
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
216
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
217
        self.assertEqual(True, repo._format._fetch_reconcile)
 
218
 
161
219
    def test_disk_layout(self):
162
220
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
163
221
        repo = weaverepo.RepositoryFormat7().initialize(control)
179
237
                             'w\n'
180
238
                             'W\n',
181
239
                             t.get('inventory.weave').read())
 
240
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
241
        # disk.
 
242
        control.create_branch()
 
243
        tree = control.create_workingtree()
 
244
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
245
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
246
        try:
 
247
            tree.commit('first post', rev_id='first')
 
248
        except errors.IllegalPath:
 
249
            if sys.platform != 'win32':
 
250
                raise
 
251
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
252
                              ' in repo format 7')
 
253
            return
 
254
        self.assertEqualDiff(
 
255
            '# bzr weave file v5\n'
 
256
            'i\n'
 
257
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
258
            'n first\n'
 
259
            '\n'
 
260
            'w\n'
 
261
            '{ 0\n'
 
262
            '. content\n'
 
263
            '}\n'
 
264
            'W\n',
 
265
            t.get('weaves/74/Foo%3ABar.weave').read())
182
266
 
183
267
    def test_shared_disk_layout(self):
184
268
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
207
291
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
208
292
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
209
293
        t = control.get_repository_transport(None)
210
 
        # TODO: Should check there is a 'lock' toplevel directory, 
 
294
        # TODO: Should check there is a 'lock' toplevel directory,
211
295
        # regardless of contents
212
296
        self.assertFalse(t.has('lock/held/info'))
213
297
        repo.lock_write()
259
343
                             'W\n',
260
344
                             t.get('inventory.weave').read())
261
345
 
 
346
    def test_supports_external_lookups(self):
 
347
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
348
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
349
        self.assertFalse(repo._format.supports_external_lookups)
 
350
 
262
351
 
263
352
class TestFormatKnit1(TestCaseWithTransport):
264
 
    
 
353
 
 
354
    def test_attribute__fetch_order(self):
 
355
        """Knits need topological data insertion."""
 
356
        repo = self.make_repository('.',
 
357
                format=bzrdir.format_registry.get('knit')())
 
358
        self.assertEqual('topological', repo._format._fetch_order)
 
359
 
 
360
    def test_attribute__fetch_uses_deltas(self):
 
361
        """Knits reuse deltas."""
 
362
        repo = self.make_repository('.',
 
363
                format=bzrdir.format_registry.get('knit')())
 
364
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
365
 
265
366
    def test_disk_layout(self):
266
367
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
267
368
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
281
382
        # self.assertEqualDiff('', t.get('lock').read())
282
383
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
283
384
        self.check_knits(t)
 
385
        # Check per-file knits.
 
386
        branch = control.create_branch()
 
387
        tree = control.create_workingtree()
 
388
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
389
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
390
        tree.commit('1st post', rev_id='foo')
 
391
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
392
            '\nfoo fulltext 0 81  :')
284
393
 
285
 
    def assertHasKnit(self, t, knit_name):
 
394
    def assertHasKnit(self, t, knit_name, extra_content=''):
286
395
        """Assert that knit_name exists on t."""
287
 
        self.assertEqualDiff('# bzr knit index 8\n',
 
396
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
288
397
                             t.get(knit_name + '.kndx').read())
289
 
        # no default content
290
 
        self.assertTrue(t.has(knit_name + '.knit'))
291
398
 
292
399
    def check_knits(self, t):
293
400
        """check knit content for a repository."""
337
444
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
338
445
        self.check_knits(t)
339
446
 
 
447
    def test_deserialise_sets_root_revision(self):
 
448
        """We must have a inventory.root.revision
 
449
 
 
450
        Old versions of the XML5 serializer did not set the revision_id for
 
451
        the whole inventory. So we grab the one from the expected text. Which
 
452
        is valid when the api is not being abused.
 
453
        """
 
454
        repo = self.make_repository('.',
 
455
                format=bzrdir.format_registry.get('knit')())
 
456
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
457
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
458
        self.assertEqual('test-rev-id', inv.root.revision)
 
459
 
 
460
    def test_deserialise_uses_global_revision_id(self):
 
461
        """If it is set, then we re-use the global revision id"""
 
462
        repo = self.make_repository('.',
 
463
                format=bzrdir.format_registry.get('knit')())
 
464
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
465
                   '</inventory>\n')
 
466
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
467
        # raise an error, rather than silently using one revision_id over the
 
468
        # other.
 
469
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
470
            'test-rev-id', inv_xml)
 
471
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
472
        self.assertEqual('other-rev-id', inv.root.revision)
 
473
 
 
474
    def test_supports_external_lookups(self):
 
475
        repo = self.make_repository('.',
 
476
                format=bzrdir.format_registry.get('knit')())
 
477
        self.assertFalse(repo._format.supports_external_lookups)
 
478
 
340
479
 
341
480
class DummyRepository(object):
342
481
    """A dummy repository for testing."""
343
482
 
 
483
    _format = None
344
484
    _serializer = None
345
485
 
346
486
    def supports_rich_root(self):
 
487
        if self._format is not None:
 
488
            return self._format.rich_root_data
347
489
        return False
348
490
 
 
491
    def get_graph(self):
 
492
        raise NotImplementedError
 
493
 
 
494
    def get_parent_map(self, revision_ids):
 
495
        raise NotImplementedError
 
496
 
349
497
 
350
498
class InterDummy(repository.InterRepository):
351
499
    """An inter-repository optimised code path for DummyRepository.
352
500
 
353
501
    This is for use during testing where we use DummyRepository as repositories
354
502
    so that none of the default regsitered inter-repository classes will
355
 
    match.
 
503
    MATCH.
356
504
    """
357
505
 
358
506
    @staticmethod
359
507
    def is_compatible(repo_source, repo_target):
360
508
        """InterDummy is compatible with DummyRepository."""
361
 
        return (isinstance(repo_source, DummyRepository) and 
 
509
        return (isinstance(repo_source, DummyRepository) and
362
510
            isinstance(repo_target, DummyRepository))
363
511
 
364
512
 
377
525
 
378
526
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
379
527
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
380
 
        
 
528
 
381
529
        The effective default is now InterSameDataRepository because there is
382
530
        no actual sane default in the presence of incompatible data models.
383
531
        """
394
542
        # pair that it returns true on for the is_compatible static method
395
543
        # check
396
544
        dummy_a = DummyRepository()
 
545
        dummy_a._format = RepositoryFormat()
397
546
        dummy_b = DummyRepository()
 
547
        dummy_b._format = RepositoryFormat()
398
548
        repo = self.make_repository('.')
399
549
        # hack dummies to look like repo somewhat.
400
550
        dummy_a._serializer = repo._serializer
 
551
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
552
        dummy_a._format.rich_root_data = repo._format.rich_root_data
401
553
        dummy_b._serializer = repo._serializer
 
554
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
555
        dummy_b._format.rich_root_data = repo._format.rich_root_data
402
556
        repository.InterRepository.register_optimiser(InterDummy)
403
557
        try:
404
558
            # we should get the default for something InterDummy returns False
467
621
 
468
622
 
469
623
class TestMisc(TestCase):
470
 
    
 
624
 
471
625
    def test_unescape_xml(self):
472
626
        """We get some kind of error when malformed entities are passed"""
473
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
627
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
474
628
 
475
629
 
476
630
class TestRepositoryFormatKnit3(TestCaseWithTransport):
477
631
 
 
632
    def test_attribute__fetch_order(self):
 
633
        """Knits need topological data insertion."""
 
634
        format = bzrdir.BzrDirMetaFormat1()
 
635
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
636
        repo = self.make_repository('.', format=format)
 
637
        self.assertEqual('topological', repo._format._fetch_order)
 
638
 
 
639
    def test_attribute__fetch_uses_deltas(self):
 
640
        """Knits reuse deltas."""
 
641
        format = bzrdir.BzrDirMetaFormat1()
 
642
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
643
        repo = self.make_repository('.', format=format)
 
644
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
645
 
478
646
    def test_convert(self):
479
647
        """Ensure the upgrade adds weaves for roots"""
480
648
        format = bzrdir.BzrDirMetaFormat1()
482
650
        tree = self.make_branch_and_tree('.', format)
483
651
        tree.commit("Dull commit", rev_id="dull")
484
652
        revision_tree = tree.branch.repository.revision_tree('dull')
485
 
        self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
486
 
            revision_tree.inventory.root.file_id)
 
653
        revision_tree.lock_read()
 
654
        try:
 
655
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
656
                revision_tree.inventory.root.file_id)
 
657
        finally:
 
658
            revision_tree.unlock()
487
659
        format = bzrdir.BzrDirMetaFormat1()
488
660
        format.repository_format = knitrepo.RepositoryFormatKnit3()
489
661
        upgrade.Convert('.', format)
490
662
        tree = workingtree.WorkingTree.open('.')
491
663
        revision_tree = tree.branch.repository.revision_tree('dull')
492
 
        revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
664
        revision_tree.lock_read()
 
665
        try:
 
666
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
667
        finally:
 
668
            revision_tree.unlock()
493
669
        tree.commit("Another dull commit", rev_id='dull2')
494
670
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
671
        revision_tree.lock_read()
 
672
        self.addCleanup(revision_tree.unlock)
495
673
        self.assertEqual('dull', revision_tree.inventory.root.revision)
496
674
 
 
675
    def test_supports_external_lookups(self):
 
676
        format = bzrdir.BzrDirMetaFormat1()
 
677
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
678
        repo = self.make_repository('.', format=format)
 
679
        self.assertFalse(repo._format.supports_external_lookups)
 
680
 
 
681
 
 
682
class Test2a(tests.TestCaseWithMemoryTransport):
 
683
 
 
684
    def test_fetch_combines_groups(self):
 
685
        builder = self.make_branch_builder('source', format='2a')
 
686
        builder.start_series()
 
687
        builder.build_snapshot('1', None, [
 
688
            ('add', ('', 'root-id', 'directory', '')),
 
689
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
690
        builder.build_snapshot('2', ['1'], [
 
691
            ('modify', ('file-id', 'content-2\n'))])
 
692
        builder.finish_series()
 
693
        source = builder.get_branch()
 
694
        target = self.make_repository('target', format='2a')
 
695
        target.fetch(source.repository)
 
696
        target.lock_read()
 
697
        self.addCleanup(target.unlock)
 
698
        details = target.texts._index.get_build_details(
 
699
            [('file-id', '1',), ('file-id', '2',)])
 
700
        file_1_details = details[('file-id', '1')]
 
701
        file_2_details = details[('file-id', '2')]
 
702
        # The index, and what to read off disk, should be the same for both
 
703
        # versions of the file.
 
704
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
705
 
 
706
    def test_fetch_combines_groups(self):
 
707
        builder = self.make_branch_builder('source', format='2a')
 
708
        builder.start_series()
 
709
        builder.build_snapshot('1', None, [
 
710
            ('add', ('', 'root-id', 'directory', '')),
 
711
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
712
        builder.build_snapshot('2', ['1'], [
 
713
            ('modify', ('file-id', 'content-2\n'))])
 
714
        builder.finish_series()
 
715
        source = builder.get_branch()
 
716
        target = self.make_repository('target', format='2a')
 
717
        target.fetch(source.repository)
 
718
        target.lock_read()
 
719
        self.addCleanup(target.unlock)
 
720
        details = target.texts._index.get_build_details(
 
721
            [('file-id', '1',), ('file-id', '2',)])
 
722
        file_1_details = details[('file-id', '1')]
 
723
        file_2_details = details[('file-id', '2')]
 
724
        # The index, and what to read off disk, should be the same for both
 
725
        # versions of the file.
 
726
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
727
 
 
728
    def test_fetch_combines_groups(self):
 
729
        builder = self.make_branch_builder('source', format='2a')
 
730
        builder.start_series()
 
731
        builder.build_snapshot('1', None, [
 
732
            ('add', ('', 'root-id', 'directory', '')),
 
733
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
734
        builder.build_snapshot('2', ['1'], [
 
735
            ('modify', ('file-id', 'content-2\n'))])
 
736
        builder.finish_series()
 
737
        source = builder.get_branch()
 
738
        target = self.make_repository('target', format='2a')
 
739
        target.fetch(source.repository)
 
740
        target.lock_read()
 
741
        self.addCleanup(target.unlock)
 
742
        details = target.texts._index.get_build_details(
 
743
            [('file-id', '1',), ('file-id', '2',)])
 
744
        file_1_details = details[('file-id', '1')]
 
745
        file_2_details = details[('file-id', '2')]
 
746
        # The index, and what to read off disk, should be the same for both
 
747
        # versions of the file.
 
748
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
749
 
 
750
    def test_format_pack_compresses_True(self):
 
751
        repo = self.make_repository('repo', format='2a')
 
752
        self.assertTrue(repo._format.pack_compresses)
 
753
 
 
754
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
755
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
756
        tree.lock_write()
 
757
        tree.add([''], ['TREE_ROOT'])
 
758
        revid = tree.commit("foo")
 
759
        tree.unlock()
 
760
        tree.lock_read()
 
761
        self.addCleanup(tree.unlock)
 
762
        inv = tree.branch.repository.get_inventory(revid)
 
763
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
764
        inv.parent_id_basename_to_file_id._ensure_root()
 
765
        inv.id_to_entry._ensure_root()
 
766
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
767
        self.assertEqual(65536,
 
768
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
769
 
 
770
    def test_autopack_unchanged_chk_nodes(self):
 
771
        # at 20 unchanged commits, chk pages are packed that are split into
 
772
        # two groups such that the new pack being made doesn't have all its
 
773
        # pages in the source packs (though they are in the repository).
 
774
        # Use a memory backed repository, we don't need to hit disk for this
 
775
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
776
        tree.lock_write()
 
777
        self.addCleanup(tree.unlock)
 
778
        tree.add([''], ['TREE_ROOT'])
 
779
        for pos in range(20):
 
780
            tree.commit(str(pos))
 
781
 
 
782
    def test_pack_with_hint(self):
 
783
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
784
        tree.lock_write()
 
785
        self.addCleanup(tree.unlock)
 
786
        tree.add([''], ['TREE_ROOT'])
 
787
        # 1 commit to leave untouched
 
788
        tree.commit('1')
 
789
        to_keep = tree.branch.repository._pack_collection.names()
 
790
        # 2 to combine
 
791
        tree.commit('2')
 
792
        tree.commit('3')
 
793
        all = tree.branch.repository._pack_collection.names()
 
794
        combine = list(set(all) - set(to_keep))
 
795
        self.assertLength(3, all)
 
796
        self.assertLength(2, combine)
 
797
        tree.branch.repository.pack(hint=combine)
 
798
        final = tree.branch.repository._pack_collection.names()
 
799
        self.assertLength(2, final)
 
800
        self.assertFalse(combine[0] in final)
 
801
        self.assertFalse(combine[1] in final)
 
802
        self.assertSubset(to_keep, final)
 
803
 
 
804
    def test_stream_source_to_gc(self):
 
805
        source = self.make_repository('source', format='2a')
 
806
        target = self.make_repository('target', format='2a')
 
807
        stream = source._get_source(target._format)
 
808
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
809
 
 
810
    def test_stream_source_to_non_gc(self):
 
811
        source = self.make_repository('source', format='2a')
 
812
        target = self.make_repository('target', format='rich-root-pack')
 
813
        stream = source._get_source(target._format)
 
814
        # We don't want the child GroupCHKStreamSource
 
815
        self.assertIs(type(stream), repository.StreamSource)
 
816
 
 
817
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
818
        source_builder = self.make_branch_builder('source',
 
819
                            format='2a')
 
820
        # We have to build a fairly large tree, so that we are sure the chk
 
821
        # pages will have split into multiple pages.
 
822
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
823
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
824
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
825
                fname = i + j
 
826
                fid = fname + '-id'
 
827
                content = 'content for %s\n' % (fname,)
 
828
                entries.append(('add', (fname, fid, 'file', content)))
 
829
        source_builder.start_series()
 
830
        source_builder.build_snapshot('rev-1', None, entries)
 
831
        # Now change a few of them, so we get a few new pages for the second
 
832
        # revision
 
833
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
834
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
835
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
836
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
837
            ])
 
838
        source_builder.finish_series()
 
839
        source_branch = source_builder.get_branch()
 
840
        source_branch.lock_read()
 
841
        self.addCleanup(source_branch.unlock)
 
842
        target = self.make_repository('target', format='2a')
 
843
        source = source_branch.repository._get_source(target._format)
 
844
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
845
 
 
846
        # On a regular pass, getting the inventories and chk pages for rev-2
 
847
        # would only get the newly created chk pages
 
848
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
849
                                    set(['rev-2']))
 
850
        simple_chk_records = []
 
851
        for vf_name, substream in source.get_stream(search):
 
852
            if vf_name == 'chk_bytes':
 
853
                for record in substream:
 
854
                    simple_chk_records.append(record.key)
 
855
            else:
 
856
                for _ in substream:
 
857
                    continue
 
858
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
859
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
860
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
861
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
862
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
863
                         simple_chk_records)
 
864
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
865
        # we should get a much larger set of pages.
 
866
        missing = [('inventories', 'rev-2')]
 
867
        full_chk_records = []
 
868
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
869
            if vf_name == 'inventories':
 
870
                for record in substream:
 
871
                    self.assertEqual(('rev-2',), record.key)
 
872
            elif vf_name == 'chk_bytes':
 
873
                for record in substream:
 
874
                    full_chk_records.append(record.key)
 
875
            else:
 
876
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
877
        # We have 257 records now. This is because we have 1 root page, and 256
 
878
        # leaf pages in a complete listing.
 
879
        self.assertEqual(257, len(full_chk_records))
 
880
        self.assertSubset(simple_chk_records, full_chk_records)
 
881
 
 
882
    def test_inconsistency_fatal(self):
 
883
        repo = self.make_repository('repo', format='2a')
 
884
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
885
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
886
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
887
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
888
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
889
 
 
890
 
 
891
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
892
 
 
893
    def test_source_to_exact_pack_092(self):
 
894
        source = self.make_repository('source', format='pack-0.92')
 
895
        target = self.make_repository('target', format='pack-0.92')
 
896
        stream_source = source._get_source(target._format)
 
897
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
898
 
 
899
    def test_source_to_exact_pack_rich_root_pack(self):
 
900
        source = self.make_repository('source', format='rich-root-pack')
 
901
        target = self.make_repository('target', format='rich-root-pack')
 
902
        stream_source = source._get_source(target._format)
 
903
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
904
 
 
905
    def test_source_to_exact_pack_19(self):
 
906
        source = self.make_repository('source', format='1.9')
 
907
        target = self.make_repository('target', format='1.9')
 
908
        stream_source = source._get_source(target._format)
 
909
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
910
 
 
911
    def test_source_to_exact_pack_19_rich_root(self):
 
912
        source = self.make_repository('source', format='1.9-rich-root')
 
913
        target = self.make_repository('target', format='1.9-rich-root')
 
914
        stream_source = source._get_source(target._format)
 
915
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
916
 
 
917
    def test_source_to_remote_exact_pack_19(self):
 
918
        trans = self.make_smart_server('target')
 
919
        trans.ensure_base()
 
920
        source = self.make_repository('source', format='1.9')
 
921
        target = self.make_repository('target', format='1.9')
 
922
        target = repository.Repository.open(trans.base)
 
923
        stream_source = source._get_source(target._format)
 
924
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
925
 
 
926
    def test_stream_source_to_non_exact(self):
 
927
        source = self.make_repository('source', format='pack-0.92')
 
928
        target = self.make_repository('target', format='1.9')
 
929
        stream = source._get_source(target._format)
 
930
        self.assertIs(type(stream), repository.StreamSource)
 
931
 
 
932
    def test_stream_source_to_non_exact_rich_root(self):
 
933
        source = self.make_repository('source', format='1.9')
 
934
        target = self.make_repository('target', format='1.9-rich-root')
 
935
        stream = source._get_source(target._format)
 
936
        self.assertIs(type(stream), repository.StreamSource)
 
937
 
 
938
    def test_source_to_remote_non_exact_pack_19(self):
 
939
        trans = self.make_smart_server('target')
 
940
        trans.ensure_base()
 
941
        source = self.make_repository('source', format='1.9')
 
942
        target = self.make_repository('target', format='1.6')
 
943
        target = repository.Repository.open(trans.base)
 
944
        stream_source = source._get_source(target._format)
 
945
        self.assertIs(type(stream_source), repository.StreamSource)
 
946
 
 
947
    def test_stream_source_to_knit(self):
 
948
        source = self.make_repository('source', format='pack-0.92')
 
949
        target = self.make_repository('target', format='dirstate')
 
950
        stream = source._get_source(target._format)
 
951
        self.assertIs(type(stream), repository.StreamSource)
 
952
 
 
953
 
 
954
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
955
    """Tests for _find_parent_ids_of_revisions."""
 
956
 
 
957
    def setUp(self):
 
958
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
959
        self.builder = self.make_branch_builder('source',
 
960
            format='development6-rich-root')
 
961
        self.builder.start_series()
 
962
        self.builder.build_snapshot('initial', None,
 
963
            [('add', ('', 'tree-root', 'directory', None))])
 
964
        self.repo = self.builder.get_branch().repository
 
965
        self.addCleanup(self.builder.finish_series)
 
966
 
 
967
    def assertParentIds(self, expected_result, rev_set):
 
968
        self.assertEqual(sorted(expected_result),
 
969
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
970
 
 
971
    def test_simple(self):
 
972
        self.builder.build_snapshot('revid1', None, [])
 
973
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
974
        rev_set = ['revid2']
 
975
        self.assertParentIds(['revid1'], rev_set)
 
976
 
 
977
    def test_not_first_parent(self):
 
978
        self.builder.build_snapshot('revid1', None, [])
 
979
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
980
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
981
        rev_set = ['revid3', 'revid2']
 
982
        self.assertParentIds(['revid1'], rev_set)
 
983
 
 
984
    def test_not_null(self):
 
985
        rev_set = ['initial']
 
986
        self.assertParentIds([], rev_set)
 
987
 
 
988
    def test_not_null_set(self):
 
989
        self.builder.build_snapshot('revid1', None, [])
 
990
        rev_set = [_mod_revision.NULL_REVISION]
 
991
        self.assertParentIds([], rev_set)
 
992
 
 
993
    def test_ghost(self):
 
994
        self.builder.build_snapshot('revid1', None, [])
 
995
        rev_set = ['ghost', 'revid1']
 
996
        self.assertParentIds(['initial'], rev_set)
 
997
 
 
998
    def test_ghost_parent(self):
 
999
        self.builder.build_snapshot('revid1', None, [])
 
1000
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
1001
        rev_set = ['revid2', 'revid1']
 
1002
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
1003
 
 
1004
    def test_righthand_parent(self):
 
1005
        self.builder.build_snapshot('revid1', None, [])
 
1006
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
1007
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
1008
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
1009
        rev_set = ['revid3', 'revid2a']
 
1010
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
1011
 
 
1012
 
 
1013
class TestWithBrokenRepo(TestCaseWithTransport):
 
1014
    """These tests seem to be more appropriate as interface tests?"""
 
1015
 
 
1016
    def make_broken_repository(self):
 
1017
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
1018
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
1019
        # it does, this duplication should be removed.
 
1020
        repo = self.make_repository('broken-repo')
 
1021
        cleanups = []
 
1022
        try:
 
1023
            repo.lock_write()
 
1024
            cleanups.append(repo.unlock)
 
1025
            repo.start_write_group()
 
1026
            cleanups.append(repo.commit_write_group)
 
1027
            # make rev1a: A well-formed revision, containing 'file1'
 
1028
            inv = inventory.Inventory(revision_id='rev1a')
 
1029
            inv.root.revision = 'rev1a'
 
1030
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1031
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
1032
            repo.add_inventory('rev1a', inv, [])
 
1033
            revision = _mod_revision.Revision('rev1a',
 
1034
                committer='jrandom@example.com', timestamp=0,
 
1035
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
1036
            repo.add_revision('rev1a',revision, inv)
 
1037
 
 
1038
            # make rev1b, which has no Revision, but has an Inventory, and
 
1039
            # file1
 
1040
            inv = inventory.Inventory(revision_id='rev1b')
 
1041
            inv.root.revision = 'rev1b'
 
1042
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
1043
            repo.add_inventory('rev1b', inv, [])
 
1044
 
 
1045
            # make rev2, with file1 and file2
 
1046
            # file2 is sane
 
1047
            # file1 has 'rev1b' as an ancestor, even though this is not
 
1048
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
1049
            inv = inventory.Inventory()
 
1050
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
1051
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
1052
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
1053
 
 
1054
            # make ghost revision rev1c
 
1055
            inv = inventory.Inventory()
 
1056
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
1057
 
 
1058
            # make rev3 with file2
 
1059
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
1060
            # file2 cannot have rev1c as its ancestor.
 
1061
            inv = inventory.Inventory()
 
1062
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
1063
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
1064
            return repo
 
1065
        finally:
 
1066
            for cleanup in reversed(cleanups):
 
1067
                cleanup()
 
1068
 
 
1069
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
1070
        inv.revision_id = revision_id
 
1071
        inv.root.revision = revision_id
 
1072
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
 
1073
        repo.add_inventory(revision_id, inv, parent_ids)
 
1074
        revision = _mod_revision.Revision(revision_id,
 
1075
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
1076
            timezone=0, message='foo', parent_ids=parent_ids)
 
1077
        repo.add_revision(revision_id,revision, inv)
 
1078
 
 
1079
    def add_file(self, repo, inv, filename, revision, parents):
 
1080
        file_id = filename + '-id'
 
1081
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
1082
        entry.revision = revision
 
1083
        entry.text_size = 0
 
1084
        inv.add(entry)
 
1085
        text_key = (file_id, revision)
 
1086
        parent_keys = [(file_id, parent) for parent in parents]
 
1087
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
1088
 
 
1089
    def test_insert_from_broken_repo(self):
 
1090
        """Inserting a data stream from a broken repository won't silently
 
1091
        corrupt the target repository.
 
1092
        """
 
1093
        broken_repo = self.make_broken_repository()
 
1094
        empty_repo = self.make_repository('empty-repo')
 
1095
        try:
 
1096
            empty_repo.fetch(broken_repo)
 
1097
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1098
            # Test successful: compression parent not being copied leads to
 
1099
            # error.
 
1100
            return
 
1101
        empty_repo.lock_read()
 
1102
        self.addCleanup(empty_repo.unlock)
 
1103
        text = empty_repo.texts.get_record_stream(
 
1104
            [('file2-id', 'rev3')], 'topological', True).next()
 
1105
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
1106
 
 
1107
 
 
1108
class TestRepositoryPackCollection(TestCaseWithTransport):
 
1109
 
 
1110
    def get_format(self):
 
1111
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
1112
 
 
1113
    def get_packs(self):
 
1114
        format = self.get_format()
 
1115
        repo = self.make_repository('.', format=format)
 
1116
        return repo._pack_collection
 
1117
 
 
1118
    def make_packs_and_alt_repo(self, write_lock=False):
 
1119
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1120
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1121
        tree.lock_write()
 
1122
        self.addCleanup(tree.unlock)
 
1123
        rev1 = tree.commit('one')
 
1124
        rev2 = tree.commit('two')
 
1125
        rev3 = tree.commit('three')
 
1126
        r = repository.Repository.open('.')
 
1127
        if write_lock:
 
1128
            r.lock_write()
 
1129
        else:
 
1130
            r.lock_read()
 
1131
        self.addCleanup(r.unlock)
 
1132
        packs = r._pack_collection
 
1133
        packs.ensure_loaded()
 
1134
        return tree, r, packs, [rev1, rev2, rev3]
 
1135
 
 
1136
    def test__clear_obsolete_packs(self):
 
1137
        packs = self.get_packs()
 
1138
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1139
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1140
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1141
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1142
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1143
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1144
        res = packs._clear_obsolete_packs()
 
1145
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1146
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1147
 
 
1148
    def test__clear_obsolete_packs_preserve(self):
 
1149
        packs = self.get_packs()
 
1150
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1151
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1152
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1153
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1154
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1155
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1156
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1157
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1158
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1159
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1160
 
 
1161
    def test__max_pack_count(self):
 
1162
        """The maximum pack count is a function of the number of revisions."""
 
1163
        # no revisions - one pack, so that we can have a revision free repo
 
1164
        # without it blowing up
 
1165
        packs = self.get_packs()
 
1166
        self.assertEqual(1, packs._max_pack_count(0))
 
1167
        # after that the sum of the digits, - check the first 1-9
 
1168
        self.assertEqual(1, packs._max_pack_count(1))
 
1169
        self.assertEqual(2, packs._max_pack_count(2))
 
1170
        self.assertEqual(3, packs._max_pack_count(3))
 
1171
        self.assertEqual(4, packs._max_pack_count(4))
 
1172
        self.assertEqual(5, packs._max_pack_count(5))
 
1173
        self.assertEqual(6, packs._max_pack_count(6))
 
1174
        self.assertEqual(7, packs._max_pack_count(7))
 
1175
        self.assertEqual(8, packs._max_pack_count(8))
 
1176
        self.assertEqual(9, packs._max_pack_count(9))
 
1177
        # check the boundary cases with two digits for the next decade
 
1178
        self.assertEqual(1, packs._max_pack_count(10))
 
1179
        self.assertEqual(2, packs._max_pack_count(11))
 
1180
        self.assertEqual(10, packs._max_pack_count(19))
 
1181
        self.assertEqual(2, packs._max_pack_count(20))
 
1182
        self.assertEqual(3, packs._max_pack_count(21))
 
1183
        # check some arbitrary big numbers
 
1184
        self.assertEqual(25, packs._max_pack_count(112894))
 
1185
 
 
1186
    def test_repr(self):
 
1187
        packs = self.get_packs()
 
1188
        self.assertContainsRe(repr(packs),
 
1189
            'RepositoryPackCollection(.*Repository(.*))')
 
1190
 
 
1191
    def test__obsolete_packs(self):
 
1192
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1193
        names = packs.names()
 
1194
        pack = packs.get_pack_by_name(names[0])
 
1195
        # Schedule this one for removal
 
1196
        packs._remove_pack_from_memory(pack)
 
1197
        # Simulate a concurrent update by renaming the .pack file and one of
 
1198
        # the indices
 
1199
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1200
                               'obsolete_packs/%s.pack' % (names[0],))
 
1201
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1202
                               'obsolete_packs/%s.iix' % (names[0],))
 
1203
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1204
        # are still renamed
 
1205
        packs._obsolete_packs([pack])
 
1206
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1207
                         sorted(packs._pack_transport.list_dir('.')))
 
1208
        # names[0] should not be present in the index anymore
 
1209
        self.assertEqual(names[1:],
 
1210
            sorted(set([osutils.splitext(n)[0] for n in
 
1211
                        packs._index_transport.list_dir('.')])))
 
1212
 
 
1213
    def test_pack_distribution_zero(self):
 
1214
        packs = self.get_packs()
 
1215
        self.assertEqual([0], packs.pack_distribution(0))
 
1216
 
 
1217
    def test_ensure_loaded_unlocked(self):
 
1218
        packs = self.get_packs()
 
1219
        self.assertRaises(errors.ObjectNotLocked,
 
1220
                          packs.ensure_loaded)
 
1221
 
 
1222
    def test_pack_distribution_one_to_nine(self):
 
1223
        packs = self.get_packs()
 
1224
        self.assertEqual([1],
 
1225
            packs.pack_distribution(1))
 
1226
        self.assertEqual([1, 1],
 
1227
            packs.pack_distribution(2))
 
1228
        self.assertEqual([1, 1, 1],
 
1229
            packs.pack_distribution(3))
 
1230
        self.assertEqual([1, 1, 1, 1],
 
1231
            packs.pack_distribution(4))
 
1232
        self.assertEqual([1, 1, 1, 1, 1],
 
1233
            packs.pack_distribution(5))
 
1234
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
1235
            packs.pack_distribution(6))
 
1236
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
1237
            packs.pack_distribution(7))
 
1238
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
1239
            packs.pack_distribution(8))
 
1240
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
1241
            packs.pack_distribution(9))
 
1242
 
 
1243
    def test_pack_distribution_stable_at_boundaries(self):
 
1244
        """When there are multi-rev packs the counts are stable."""
 
1245
        packs = self.get_packs()
 
1246
        # in 10s:
 
1247
        self.assertEqual([10], packs.pack_distribution(10))
 
1248
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
1249
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
1250
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
1251
        # 100s
 
1252
        self.assertEqual([100], packs.pack_distribution(100))
 
1253
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
1254
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
1255
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
1256
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
1257
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
1258
 
 
1259
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
1260
        packs = self.get_packs()
 
1261
        existing_packs = [(2000, "big"), (9, "medium")]
 
1262
        # rev count - 2009 -> 2x1000 + 9x1
 
1263
        pack_operations = packs.plan_autopack_combinations(
 
1264
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
1265
        self.assertEqual([], pack_operations)
 
1266
 
 
1267
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
1268
        packs = self.get_packs()
 
1269
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
1270
        # rev count - 2010 -> 2x1000 + 1x10
 
1271
        pack_operations = packs.plan_autopack_combinations(
 
1272
            existing_packs, [1000, 1000, 10])
 
1273
        self.assertEqual([], pack_operations)
 
1274
 
 
1275
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
1276
        packs = self.get_packs()
 
1277
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
1278
            (1, "single1")]
 
1279
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
1280
        pack_operations = packs.plan_autopack_combinations(
 
1281
            existing_packs, [1000, 1000, 10])
 
1282
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1283
 
 
1284
    def test_plan_pack_operations_creates_a_single_op(self):
 
1285
        packs = self.get_packs()
 
1286
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1287
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1288
        # rev count 150 -> 1x100 and 5x10
 
1289
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1290
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1291
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1292
        # we save a pack file with no increased I/O by putting them into the
 
1293
        # same file.
 
1294
        distribution = packs.pack_distribution(150)
 
1295
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1296
                                                           distribution)
 
1297
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
1298
 
 
1299
    def test_all_packs_none(self):
 
1300
        format = self.get_format()
 
1301
        tree = self.make_branch_and_tree('.', format=format)
 
1302
        tree.lock_read()
 
1303
        self.addCleanup(tree.unlock)
 
1304
        packs = tree.branch.repository._pack_collection
 
1305
        packs.ensure_loaded()
 
1306
        self.assertEqual([], packs.all_packs())
 
1307
 
 
1308
    def test_all_packs_one(self):
 
1309
        format = self.get_format()
 
1310
        tree = self.make_branch_and_tree('.', format=format)
 
1311
        tree.commit('start')
 
1312
        tree.lock_read()
 
1313
        self.addCleanup(tree.unlock)
 
1314
        packs = tree.branch.repository._pack_collection
 
1315
        packs.ensure_loaded()
 
1316
        self.assertEqual([
 
1317
            packs.get_pack_by_name(packs.names()[0])],
 
1318
            packs.all_packs())
 
1319
 
 
1320
    def test_all_packs_two(self):
 
1321
        format = self.get_format()
 
1322
        tree = self.make_branch_and_tree('.', format=format)
 
1323
        tree.commit('start')
 
1324
        tree.commit('continue')
 
1325
        tree.lock_read()
 
1326
        self.addCleanup(tree.unlock)
 
1327
        packs = tree.branch.repository._pack_collection
 
1328
        packs.ensure_loaded()
 
1329
        self.assertEqual([
 
1330
            packs.get_pack_by_name(packs.names()[0]),
 
1331
            packs.get_pack_by_name(packs.names()[1]),
 
1332
            ], packs.all_packs())
 
1333
 
 
1334
    def test_get_pack_by_name(self):
 
1335
        format = self.get_format()
 
1336
        tree = self.make_branch_and_tree('.', format=format)
 
1337
        tree.commit('start')
 
1338
        tree.lock_read()
 
1339
        self.addCleanup(tree.unlock)
 
1340
        packs = tree.branch.repository._pack_collection
 
1341
        packs.reset()
 
1342
        packs.ensure_loaded()
 
1343
        name = packs.names()[0]
 
1344
        pack_1 = packs.get_pack_by_name(name)
 
1345
        # the pack should be correctly initialised
 
1346
        sizes = packs._names[name]
 
1347
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1348
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1349
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1350
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1351
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1352
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1353
        # and the same instance should be returned on successive calls.
 
1354
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1355
 
 
1356
    def test_reload_pack_names_new_entry(self):
 
1357
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1358
        names = packs.names()
 
1359
        # Add a new pack file into the repository
 
1360
        rev4 = tree.commit('four')
 
1361
        new_names = tree.branch.repository._pack_collection.names()
 
1362
        new_name = set(new_names).difference(names)
 
1363
        self.assertEqual(1, len(new_name))
 
1364
        new_name = new_name.pop()
 
1365
        # The old collection hasn't noticed yet
 
1366
        self.assertEqual(names, packs.names())
 
1367
        self.assertTrue(packs.reload_pack_names())
 
1368
        self.assertEqual(new_names, packs.names())
 
1369
        # And the repository can access the new revision
 
1370
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1371
        self.assertFalse(packs.reload_pack_names())
 
1372
 
 
1373
    def test_reload_pack_names_added_and_removed(self):
 
1374
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1375
        names = packs.names()
 
1376
        # Now repack the whole thing
 
1377
        tree.branch.repository.pack()
 
1378
        new_names = tree.branch.repository._pack_collection.names()
 
1379
        # The other collection hasn't noticed yet
 
1380
        self.assertEqual(names, packs.names())
 
1381
        self.assertTrue(packs.reload_pack_names())
 
1382
        self.assertEqual(new_names, packs.names())
 
1383
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1384
        self.assertFalse(packs.reload_pack_names())
 
1385
 
 
1386
    def test_reload_pack_names_preserves_pending(self):
 
1387
        # TODO: Update this to also test for pending-deleted names
 
1388
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1389
        # We will add one pack (via start_write_group + insert_record_stream),
 
1390
        # and remove another pack (via _remove_pack_from_memory)
 
1391
        orig_names = packs.names()
 
1392
        orig_at_load = packs._packs_at_load
 
1393
        to_remove_name = iter(orig_names).next()
 
1394
        r.start_write_group()
 
1395
        self.addCleanup(r.abort_write_group)
 
1396
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1397
            ('text', 'rev'), (), None, 'content\n')])
 
1398
        new_pack = packs._new_pack
 
1399
        self.assertTrue(new_pack.data_inserted())
 
1400
        new_pack.finish()
 
1401
        packs.allocate(new_pack)
 
1402
        packs._new_pack = None
 
1403
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1404
        packs._remove_pack_from_memory(removed_pack)
 
1405
        names = packs.names()
 
1406
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1407
        new_names = set([x[0][0] for x in new_nodes])
 
1408
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1409
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1410
        self.assertEqual(set([new_pack.name]), new_names)
 
1411
        self.assertEqual([to_remove_name],
 
1412
                         sorted([x[0][0] for x in deleted_nodes]))
 
1413
        packs.reload_pack_names()
 
1414
        reloaded_names = packs.names()
 
1415
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1416
        self.assertEqual(names, reloaded_names)
 
1417
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1418
        new_names = set([x[0][0] for x in new_nodes])
 
1419
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1420
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1421
        self.assertEqual(set([new_pack.name]), new_names)
 
1422
        self.assertEqual([to_remove_name],
 
1423
                         sorted([x[0][0] for x in deleted_nodes]))
 
1424
 
 
1425
    def test_autopack_obsoletes_new_pack(self):
 
1426
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1427
        packs._max_pack_count = lambda x: 1
 
1428
        packs.pack_distribution = lambda x: [10]
 
1429
        r.start_write_group()
 
1430
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1431
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1432
        # This should trigger an autopack, which will combine everything into a
 
1433
        # single pack file.
 
1434
        new_names = r.commit_write_group()
 
1435
        names = packs.names()
 
1436
        self.assertEqual(1, len(names))
 
1437
        self.assertEqual([names[0] + '.pack'],
 
1438
                         packs._pack_transport.list_dir('.'))
 
1439
 
 
1440
    def test_autopack_reloads_and_stops(self):
 
1441
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1442
        # After we have determined what needs to be autopacked, trigger a
 
1443
        # full-pack via the other repo which will cause us to re-evaluate and
 
1444
        # decide we don't need to do anything
 
1445
        orig_execute = packs._execute_pack_operations
 
1446
        def _munged_execute_pack_ops(*args, **kwargs):
 
1447
            tree.branch.repository.pack()
 
1448
            return orig_execute(*args, **kwargs)
 
1449
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1450
        packs._max_pack_count = lambda x: 1
 
1451
        packs.pack_distribution = lambda x: [10]
 
1452
        self.assertFalse(packs.autopack())
 
1453
        self.assertEqual(1, len(packs.names()))
 
1454
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1455
                         packs.names())
 
1456
 
 
1457
    def test__save_pack_names(self):
 
1458
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1459
        names = packs.names()
 
1460
        pack = packs.get_pack_by_name(names[0])
 
1461
        packs._remove_pack_from_memory(pack)
 
1462
        packs._save_pack_names(obsolete_packs=[pack])
 
1463
        cur_packs = packs._pack_transport.list_dir('.')
 
1464
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1465
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1466
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1467
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1468
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1469
 
 
1470
    def test__save_pack_names_already_obsoleted(self):
 
1471
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1472
        names = packs.names()
 
1473
        pack = packs.get_pack_by_name(names[0])
 
1474
        packs._remove_pack_from_memory(pack)
 
1475
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1476
        # the pack directly.
 
1477
        packs._obsolete_packs([pack])
 
1478
        packs._save_pack_names(clear_obsolete_packs=True,
 
1479
                               obsolete_packs=[pack])
 
1480
        cur_packs = packs._pack_transport.list_dir('.')
 
1481
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1482
        # Note that while we set clear_obsolete_packs=True, it should not
 
1483
        # delete a pack file that we have also scheduled for obsoletion.
 
1484
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1485
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1486
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1487
 
 
1488
 
 
1489
 
 
1490
class TestPack(TestCaseWithTransport):
 
1491
    """Tests for the Pack object."""
 
1492
 
 
1493
    def assertCurrentlyEqual(self, left, right):
 
1494
        self.assertTrue(left == right)
 
1495
        self.assertTrue(right == left)
 
1496
        self.assertFalse(left != right)
 
1497
        self.assertFalse(right != left)
 
1498
 
 
1499
    def assertCurrentlyNotEqual(self, left, right):
 
1500
        self.assertFalse(left == right)
 
1501
        self.assertFalse(right == left)
 
1502
        self.assertTrue(left != right)
 
1503
        self.assertTrue(right != left)
 
1504
 
 
1505
    def test___eq____ne__(self):
 
1506
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1507
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1508
        self.assertCurrentlyEqual(left, right)
 
1509
        # change all attributes and ensure equality changes as we do.
 
1510
        left.revision_index = 'a'
 
1511
        self.assertCurrentlyNotEqual(left, right)
 
1512
        right.revision_index = 'a'
 
1513
        self.assertCurrentlyEqual(left, right)
 
1514
        left.inventory_index = 'a'
 
1515
        self.assertCurrentlyNotEqual(left, right)
 
1516
        right.inventory_index = 'a'
 
1517
        self.assertCurrentlyEqual(left, right)
 
1518
        left.text_index = 'a'
 
1519
        self.assertCurrentlyNotEqual(left, right)
 
1520
        right.text_index = 'a'
 
1521
        self.assertCurrentlyEqual(left, right)
 
1522
        left.signature_index = 'a'
 
1523
        self.assertCurrentlyNotEqual(left, right)
 
1524
        right.signature_index = 'a'
 
1525
        self.assertCurrentlyEqual(left, right)
 
1526
        left.name = 'a'
 
1527
        self.assertCurrentlyNotEqual(left, right)
 
1528
        right.name = 'a'
 
1529
        self.assertCurrentlyEqual(left, right)
 
1530
        left.transport = 'a'
 
1531
        self.assertCurrentlyNotEqual(left, right)
 
1532
        right.transport = 'a'
 
1533
        self.assertCurrentlyEqual(left, right)
 
1534
 
 
1535
    def test_file_name(self):
 
1536
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1537
        self.assertEqual('a_name.pack', pack.file_name())
 
1538
 
 
1539
 
 
1540
class TestNewPack(TestCaseWithTransport):
 
1541
    """Tests for pack_repo.NewPack."""
 
1542
 
 
1543
    def test_new_instance_attributes(self):
 
1544
        upload_transport = self.get_transport('upload')
 
1545
        pack_transport = self.get_transport('pack')
 
1546
        index_transport = self.get_transport('index')
 
1547
        upload_transport.mkdir('.')
 
1548
        collection = pack_repo.RepositoryPackCollection(
 
1549
            repo=None,
 
1550
            transport=self.get_transport('.'),
 
1551
            index_transport=index_transport,
 
1552
            upload_transport=upload_transport,
 
1553
            pack_transport=pack_transport,
 
1554
            index_builder_class=BTreeBuilder,
 
1555
            index_class=BTreeGraphIndex,
 
1556
            use_chk_index=False)
 
1557
        pack = pack_repo.NewPack(collection)
 
1558
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
 
1559
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1560
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1561
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1562
        self.assertTrue(pack.upload_transport is upload_transport)
 
1563
        self.assertTrue(pack.index_transport is index_transport)
 
1564
        self.assertTrue(pack.pack_transport is pack_transport)
 
1565
        self.assertEqual(None, pack.index_sizes)
 
1566
        self.assertEqual(20, len(pack.random_name))
 
1567
        self.assertIsInstance(pack.random_name, str)
 
1568
        self.assertIsInstance(pack.start_time, float)
 
1569
 
 
1570
 
 
1571
class TestPacker(TestCaseWithTransport):
 
1572
    """Tests for the packs repository Packer class."""
 
1573
 
 
1574
    def test_pack_optimizes_pack_order(self):
 
1575
        builder = self.make_branch_builder('.', format="1.9")
 
1576
        builder.start_series()
 
1577
        builder.build_snapshot('A', None, [
 
1578
            ('add', ('', 'root-id', 'directory', None)),
 
1579
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1580
        builder.build_snapshot('B', ['A'],
 
1581
            [('modify', ('f-id', 'new-content\n'))])
 
1582
        builder.build_snapshot('C', ['B'],
 
1583
            [('modify', ('f-id', 'third-content\n'))])
 
1584
        builder.build_snapshot('D', ['C'],
 
1585
            [('modify', ('f-id', 'fourth-content\n'))])
 
1586
        b = builder.get_branch()
 
1587
        b.lock_read()
 
1588
        builder.finish_series()
 
1589
        self.addCleanup(b.unlock)
 
1590
        # At this point, we should have 4 pack files available
 
1591
        # Because of how they were built, they correspond to
 
1592
        # ['D', 'C', 'B', 'A']
 
1593
        packs = b.repository._pack_collection.packs
 
1594
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1595
                                  packs, 'testing',
 
1596
                                  revision_ids=['B', 'C'])
 
1597
        # Now, when we are copying the B & C revisions, their pack files should
 
1598
        # be moved to the front of the stack
 
1599
        # The new ordering moves B & C to the front of the .packs attribute,
 
1600
        # and leaves the others in the original order.
 
1601
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1602
        new_pack = packer.pack()
 
1603
        self.assertEqual(new_packs, packer.packs)
 
1604
 
 
1605
 
 
1606
class TestOptimisingPacker(TestCaseWithTransport):
 
1607
    """Tests for the OptimisingPacker class."""
 
1608
 
 
1609
    def get_pack_collection(self):
 
1610
        repo = self.make_repository('.')
 
1611
        return repo._pack_collection
 
1612
 
 
1613
    def test_open_pack_will_optimise(self):
 
1614
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1615
                                            [], '.test')
 
1616
        new_pack = packer.open_pack()
 
1617
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1618
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1619
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1620
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1621
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1622
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1623
 
 
1624
 
 
1625
class TestCrossFormatPacks(TestCaseWithTransport):
 
1626
 
 
1627
    def log_pack(self, hint=None):
 
1628
        self.calls.append(('pack', hint))
 
1629
        self.orig_pack(hint=hint)
 
1630
        if self.expect_hint:
 
1631
            self.assertTrue(hint)
 
1632
 
 
1633
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1634
        self.expect_hint = expect_pack_called
 
1635
        self.calls = []
 
1636
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1637
        source_tree.lock_write()
 
1638
        self.addCleanup(source_tree.unlock)
 
1639
        tip = source_tree.commit('foo')
 
1640
        target = self.make_repository('target', format=target_fmt)
 
1641
        target.lock_write()
 
1642
        self.addCleanup(target.unlock)
 
1643
        source = source_tree.branch.repository._get_source(target._format)
 
1644
        self.orig_pack = target.pack
 
1645
        target.pack = self.log_pack
 
1646
        search = target.search_missing_revision_ids(
 
1647
            source_tree.branch.repository, tip)
 
1648
        stream = source.get_stream(search)
 
1649
        from_format = source_tree.branch.repository._format
 
1650
        sink = target._get_sink()
 
1651
        sink.insert_stream(stream, from_format, [])
 
1652
        if expect_pack_called:
 
1653
            self.assertLength(1, self.calls)
 
1654
        else:
 
1655
            self.assertLength(0, self.calls)
 
1656
 
 
1657
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1658
        self.expect_hint = expect_pack_called
 
1659
        self.calls = []
 
1660
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1661
        source_tree.lock_write()
 
1662
        self.addCleanup(source_tree.unlock)
 
1663
        tip = source_tree.commit('foo')
 
1664
        target = self.make_repository('target', format=target_fmt)
 
1665
        target.lock_write()
 
1666
        self.addCleanup(target.unlock)
 
1667
        source = source_tree.branch.repository
 
1668
        self.orig_pack = target.pack
 
1669
        target.pack = self.log_pack
 
1670
        target.fetch(source)
 
1671
        if expect_pack_called:
 
1672
            self.assertLength(1, self.calls)
 
1673
        else:
 
1674
            self.assertLength(0, self.calls)
 
1675
 
 
1676
    def test_sink_format_hint_no(self):
 
1677
        # When the target format says packing makes no difference, pack is not
 
1678
        # called.
 
1679
        self.run_stream('1.9', 'rich-root-pack', False)
 
1680
 
 
1681
    def test_sink_format_hint_yes(self):
 
1682
        # When the target format says packing makes a difference, pack is
 
1683
        # called.
 
1684
        self.run_stream('1.9', '2a', True)
 
1685
 
 
1686
    def test_sink_format_same_no(self):
 
1687
        # When the formats are the same, pack is not called.
 
1688
        self.run_stream('2a', '2a', False)
 
1689
 
 
1690
    def test_IDS_format_hint_no(self):
 
1691
        # When the target format says packing makes no difference, pack is not
 
1692
        # called.
 
1693
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1694
 
 
1695
    def test_IDS_format_hint_yes(self):
 
1696
        # When the target format says packing makes a difference, pack is
 
1697
        # called.
 
1698
        self.run_fetch('1.9', '2a', True)
 
1699
 
 
1700
    def test_IDS_format_same_no(self):
 
1701
        # When the formats are the same, pack is not called.
 
1702
        self.run_fetch('2a', '2a', False)