/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2009-07-20 08:56:45 UTC
  • mfrom: (4526.9.23 apply-inventory-delta)
  • Revision ID: pqm@pqm.ubuntu.com-20090720085645-54mtgybxua0yx6hw
(robertc) Add checks for inventory deltas which try to ensure that
        deltas that are not an exact fit are not applied. (Robert
        Collins, bug 397705, bug 367633)

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""Tests for the Repository facility that are not interface tests.
 
18
 
 
19
For interface tests see tests/per_repository/*.py.
 
20
 
 
21
For concrete class tests see this file, and for storage formats tests
 
22
also see this file.
 
23
"""
 
24
 
 
25
from stat import S_ISDIR
 
26
from StringIO import StringIO
 
27
 
 
28
import bzrlib
 
29
from bzrlib.errors import (NotBranchError,
 
30
                           NoSuchFile,
 
31
                           UnknownFormatError,
 
32
                           UnsupportedFormatError,
 
33
                           )
 
34
from bzrlib import (
 
35
    graph,
 
36
    tests,
 
37
    )
 
38
from bzrlib.branchbuilder import BranchBuilder
 
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
41
from bzrlib.repository import RepositoryFormat
 
42
from bzrlib.smart import server
 
43
from bzrlib.tests import (
 
44
    TestCase,
 
45
    TestCaseWithTransport,
 
46
    TestSkipped,
 
47
    test_knit,
 
48
    )
 
49
from bzrlib.transport import (
 
50
    fakenfs,
 
51
    get_transport,
 
52
    )
 
53
from bzrlib.transport.memory import MemoryServer
 
54
from bzrlib import (
 
55
    bencode,
 
56
    bzrdir,
 
57
    errors,
 
58
    inventory,
 
59
    osutils,
 
60
    progress,
 
61
    repository,
 
62
    revision as _mod_revision,
 
63
    symbol_versioning,
 
64
    upgrade,
 
65
    workingtree,
 
66
    )
 
67
from bzrlib.repofmt import (
 
68
    groupcompress_repo,
 
69
    knitrepo,
 
70
    pack_repo,
 
71
    weaverepo,
 
72
    )
 
73
 
 
74
 
 
75
class TestDefaultFormat(TestCase):
 
76
 
 
77
    def test_get_set_default_format(self):
 
78
        old_default = bzrdir.format_registry.get('default')
 
79
        private_default = old_default().repository_format.__class__
 
80
        old_format = repository.RepositoryFormat.get_default_format()
 
81
        self.assertTrue(isinstance(old_format, private_default))
 
82
        def make_sample_bzrdir():
 
83
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
 
84
            my_bzrdir.repository_format = SampleRepositoryFormat()
 
85
            return my_bzrdir
 
86
        bzrdir.format_registry.remove('default')
 
87
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
 
88
        bzrdir.format_registry.set_default('sample')
 
89
        # creating a repository should now create an instrumented dir.
 
90
        try:
 
91
            # the default branch format is used by the meta dir format
 
92
            # which is not the default bzrdir format at this point
 
93
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
 
94
            result = dir.create_repository()
 
95
            self.assertEqual(result, 'A bzr repository dir')
 
96
        finally:
 
97
            bzrdir.format_registry.remove('default')
 
98
            bzrdir.format_registry.remove('sample')
 
99
            bzrdir.format_registry.register('default', old_default, '')
 
100
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
101
                              old_format.__class__)
 
102
 
 
103
 
 
104
class SampleRepositoryFormat(repository.RepositoryFormat):
 
105
    """A sample format
 
106
 
 
107
    this format is initializable, unsupported to aid in testing the
 
108
    open and open(unsupported=True) routines.
 
109
    """
 
110
 
 
111
    def get_format_string(self):
 
112
        """See RepositoryFormat.get_format_string()."""
 
113
        return "Sample .bzr repository format."
 
114
 
 
115
    def initialize(self, a_bzrdir, shared=False):
 
116
        """Initialize a repository in a BzrDir"""
 
117
        t = a_bzrdir.get_repository_transport(self)
 
118
        t.put_bytes('format', self.get_format_string())
 
119
        return 'A bzr repository dir'
 
120
 
 
121
    def is_supported(self):
 
122
        return False
 
123
 
 
124
    def open(self, a_bzrdir, _found=False):
 
125
        return "opened repository."
 
126
 
 
127
 
 
128
class TestRepositoryFormat(TestCaseWithTransport):
 
129
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
 
130
 
 
131
    def test_find_format(self):
 
132
        # is the right format object found for a repository?
 
133
        # create a branch with a few known format objects.
 
134
        # this is not quite the same as
 
135
        self.build_tree(["foo/", "bar/"])
 
136
        def check_format(format, url):
 
137
            dir = format._matchingbzrdir.initialize(url)
 
138
            format.initialize(dir)
 
139
            t = get_transport(url)
 
140
            found_format = repository.RepositoryFormat.find_format(dir)
 
141
            self.failUnless(isinstance(found_format, format.__class__))
 
142
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
143
 
 
144
    def test_find_format_no_repository(self):
 
145
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
146
        self.assertRaises(errors.NoRepositoryPresent,
 
147
                          repository.RepositoryFormat.find_format,
 
148
                          dir)
 
149
 
 
150
    def test_find_format_unknown_format(self):
 
151
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
152
        SampleRepositoryFormat().initialize(dir)
 
153
        self.assertRaises(UnknownFormatError,
 
154
                          repository.RepositoryFormat.find_format,
 
155
                          dir)
 
156
 
 
157
    def test_register_unregister_format(self):
 
158
        format = SampleRepositoryFormat()
 
159
        # make a control dir
 
160
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
161
        # make a repo
 
162
        format.initialize(dir)
 
163
        # register a format for it.
 
164
        repository.RepositoryFormat.register_format(format)
 
165
        # which repository.Open will refuse (not supported)
 
166
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
 
167
        # but open(unsupported) will work
 
168
        self.assertEqual(format.open(dir), "opened repository.")
 
169
        # unregister the format
 
170
        repository.RepositoryFormat.unregister_format(format)
 
171
 
 
172
 
 
173
class TestFormat6(TestCaseWithTransport):
 
174
 
 
175
    def test_attribute__fetch_order(self):
 
176
        """Weaves need topological data insertion."""
 
177
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
178
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
179
        self.assertEqual('topological', repo._format._fetch_order)
 
180
 
 
181
    def test_attribute__fetch_uses_deltas(self):
 
182
        """Weaves do not reuse deltas."""
 
183
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
184
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
185
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
186
 
 
187
    def test_attribute__fetch_reconcile(self):
 
188
        """Weave repositories need a reconcile after fetch."""
 
189
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
190
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
191
        self.assertEqual(True, repo._format._fetch_reconcile)
 
192
 
 
193
    def test_no_ancestry_weave(self):
 
194
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
195
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
196
        # We no longer need to create the ancestry.weave file
 
197
        # since it is *never* used.
 
198
        self.assertRaises(NoSuchFile,
 
199
                          control.transport.get,
 
200
                          'ancestry.weave')
 
201
 
 
202
    def test_supports_external_lookups(self):
 
203
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
204
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
205
        self.assertFalse(repo._format.supports_external_lookups)
 
206
 
 
207
 
 
208
class TestFormat7(TestCaseWithTransport):
 
209
 
 
210
    def test_attribute__fetch_order(self):
 
211
        """Weaves need topological data insertion."""
 
212
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
213
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
214
        self.assertEqual('topological', repo._format._fetch_order)
 
215
 
 
216
    def test_attribute__fetch_uses_deltas(self):
 
217
        """Weaves do not reuse deltas."""
 
218
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
219
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
220
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
221
 
 
222
    def test_attribute__fetch_reconcile(self):
 
223
        """Weave repositories need a reconcile after fetch."""
 
224
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
225
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
226
        self.assertEqual(True, repo._format._fetch_reconcile)
 
227
 
 
228
    def test_disk_layout(self):
 
229
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
230
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
231
        # in case of side effects of locking.
 
232
        repo.lock_write()
 
233
        repo.unlock()
 
234
        # we want:
 
235
        # format 'Bazaar-NG Repository format 7'
 
236
        # lock ''
 
237
        # inventory.weave == empty_weave
 
238
        # empty revision-store directory
 
239
        # empty weaves directory
 
240
        t = control.get_repository_transport(None)
 
241
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
242
                             t.get('format').read())
 
243
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
244
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
245
        self.assertEqualDiff('# bzr weave file v5\n'
 
246
                             'w\n'
 
247
                             'W\n',
 
248
                             t.get('inventory.weave').read())
 
249
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
250
        # disk.
 
251
        control.create_branch()
 
252
        tree = control.create_workingtree()
 
253
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
254
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
255
        tree.commit('first post', rev_id='first')
 
256
        self.assertEqualDiff(
 
257
            '# bzr weave file v5\n'
 
258
            'i\n'
 
259
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
260
            'n first\n'
 
261
            '\n'
 
262
            'w\n'
 
263
            '{ 0\n'
 
264
            '. content\n'
 
265
            '}\n'
 
266
            'W\n',
 
267
            t.get('weaves/74/Foo%3ABar.weave').read())
 
268
 
 
269
    def test_shared_disk_layout(self):
 
270
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
271
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
272
        # we want:
 
273
        # format 'Bazaar-NG Repository format 7'
 
274
        # inventory.weave == empty_weave
 
275
        # empty revision-store directory
 
276
        # empty weaves directory
 
277
        # a 'shared-storage' marker file.
 
278
        # lock is not present when unlocked
 
279
        t = control.get_repository_transport(None)
 
280
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
281
                             t.get('format').read())
 
282
        self.assertEqualDiff('', t.get('shared-storage').read())
 
283
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
284
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
285
        self.assertEqualDiff('# bzr weave file v5\n'
 
286
                             'w\n'
 
287
                             'W\n',
 
288
                             t.get('inventory.weave').read())
 
289
        self.assertFalse(t.has('branch-lock'))
 
290
 
 
291
    def test_creates_lockdir(self):
 
292
        """Make sure it appears to be controlled by a LockDir existence"""
 
293
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
294
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
295
        t = control.get_repository_transport(None)
 
296
        # TODO: Should check there is a 'lock' toplevel directory,
 
297
        # regardless of contents
 
298
        self.assertFalse(t.has('lock/held/info'))
 
299
        repo.lock_write()
 
300
        try:
 
301
            self.assertTrue(t.has('lock/held/info'))
 
302
        finally:
 
303
            # unlock so we don't get a warning about failing to do so
 
304
            repo.unlock()
 
305
 
 
306
    def test_uses_lockdir(self):
 
307
        """repo format 7 actually locks on lockdir"""
 
308
        base_url = self.get_url()
 
309
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
 
310
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
311
        t = control.get_repository_transport(None)
 
312
        repo.lock_write()
 
313
        repo.unlock()
 
314
        del repo
 
315
        # make sure the same lock is created by opening it
 
316
        repo = repository.Repository.open(base_url)
 
317
        repo.lock_write()
 
318
        self.assertTrue(t.has('lock/held/info'))
 
319
        repo.unlock()
 
320
        self.assertFalse(t.has('lock/held/info'))
 
321
 
 
322
    def test_shared_no_tree_disk_layout(self):
 
323
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
324
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
325
        repo.set_make_working_trees(False)
 
326
        # we want:
 
327
        # format 'Bazaar-NG Repository format 7'
 
328
        # lock ''
 
329
        # inventory.weave == empty_weave
 
330
        # empty revision-store directory
 
331
        # empty weaves directory
 
332
        # a 'shared-storage' marker file.
 
333
        t = control.get_repository_transport(None)
 
334
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
335
                             t.get('format').read())
 
336
        ## self.assertEqualDiff('', t.get('lock').read())
 
337
        self.assertEqualDiff('', t.get('shared-storage').read())
 
338
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
339
        repo.set_make_working_trees(True)
 
340
        self.assertFalse(t.has('no-working-trees'))
 
341
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
342
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
343
        self.assertEqualDiff('# bzr weave file v5\n'
 
344
                             'w\n'
 
345
                             'W\n',
 
346
                             t.get('inventory.weave').read())
 
347
 
 
348
    def test_supports_external_lookups(self):
 
349
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
350
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
351
        self.assertFalse(repo._format.supports_external_lookups)
 
352
 
 
353
 
 
354
class TestFormatKnit1(TestCaseWithTransport):
 
355
 
 
356
    def test_attribute__fetch_order(self):
 
357
        """Knits need topological data insertion."""
 
358
        repo = self.make_repository('.',
 
359
                format=bzrdir.format_registry.get('knit')())
 
360
        self.assertEqual('topological', repo._format._fetch_order)
 
361
 
 
362
    def test_attribute__fetch_uses_deltas(self):
 
363
        """Knits reuse deltas."""
 
364
        repo = self.make_repository('.',
 
365
                format=bzrdir.format_registry.get('knit')())
 
366
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
367
 
 
368
    def test_disk_layout(self):
 
369
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
370
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
 
371
        # in case of side effects of locking.
 
372
        repo.lock_write()
 
373
        repo.unlock()
 
374
        # we want:
 
375
        # format 'Bazaar-NG Knit Repository Format 1'
 
376
        # lock: is a directory
 
377
        # inventory.weave == empty_weave
 
378
        # empty revision-store directory
 
379
        # empty weaves directory
 
380
        t = control.get_repository_transport(None)
 
381
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
382
                             t.get('format').read())
 
383
        # XXX: no locks left when unlocked at the moment
 
384
        # self.assertEqualDiff('', t.get('lock').read())
 
385
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
386
        self.check_knits(t)
 
387
        # Check per-file knits.
 
388
        branch = control.create_branch()
 
389
        tree = control.create_workingtree()
 
390
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
391
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
392
        tree.commit('1st post', rev_id='foo')
 
393
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
394
            '\nfoo fulltext 0 81  :')
 
395
 
 
396
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
397
        """Assert that knit_name exists on t."""
 
398
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
399
                             t.get(knit_name + '.kndx').read())
 
400
 
 
401
    def check_knits(self, t):
 
402
        """check knit content for a repository."""
 
403
        self.assertHasKnit(t, 'inventory')
 
404
        self.assertHasKnit(t, 'revisions')
 
405
        self.assertHasKnit(t, 'signatures')
 
406
 
 
407
    def test_shared_disk_layout(self):
 
408
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
409
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
410
        # we want:
 
411
        # format 'Bazaar-NG Knit Repository Format 1'
 
412
        # lock: is a directory
 
413
        # inventory.weave == empty_weave
 
414
        # empty revision-store directory
 
415
        # empty weaves directory
 
416
        # a 'shared-storage' marker file.
 
417
        t = control.get_repository_transport(None)
 
418
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
419
                             t.get('format').read())
 
420
        # XXX: no locks left when unlocked at the moment
 
421
        # self.assertEqualDiff('', t.get('lock').read())
 
422
        self.assertEqualDiff('', t.get('shared-storage').read())
 
423
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
424
        self.check_knits(t)
 
425
 
 
426
    def test_shared_no_tree_disk_layout(self):
 
427
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
428
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
429
        repo.set_make_working_trees(False)
 
430
        # we want:
 
431
        # format 'Bazaar-NG Knit Repository Format 1'
 
432
        # lock ''
 
433
        # inventory.weave == empty_weave
 
434
        # empty revision-store directory
 
435
        # empty weaves directory
 
436
        # a 'shared-storage' marker file.
 
437
        t = control.get_repository_transport(None)
 
438
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
 
439
                             t.get('format').read())
 
440
        # XXX: no locks left when unlocked at the moment
 
441
        # self.assertEqualDiff('', t.get('lock').read())
 
442
        self.assertEqualDiff('', t.get('shared-storage').read())
 
443
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
444
        repo.set_make_working_trees(True)
 
445
        self.assertFalse(t.has('no-working-trees'))
 
446
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
 
447
        self.check_knits(t)
 
448
 
 
449
    def test_deserialise_sets_root_revision(self):
 
450
        """We must have a inventory.root.revision
 
451
 
 
452
        Old versions of the XML5 serializer did not set the revision_id for
 
453
        the whole inventory. So we grab the one from the expected text. Which
 
454
        is valid when the api is not being abused.
 
455
        """
 
456
        repo = self.make_repository('.',
 
457
                format=bzrdir.format_registry.get('knit')())
 
458
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
459
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
460
        self.assertEqual('test-rev-id', inv.root.revision)
 
461
 
 
462
    def test_deserialise_uses_global_revision_id(self):
 
463
        """If it is set, then we re-use the global revision id"""
 
464
        repo = self.make_repository('.',
 
465
                format=bzrdir.format_registry.get('knit')())
 
466
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
467
                   '</inventory>\n')
 
468
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
469
        # raise an error, rather than silently using one revision_id over the
 
470
        # other.
 
471
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
472
            'test-rev-id', inv_xml)
 
473
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
474
        self.assertEqual('other-rev-id', inv.root.revision)
 
475
 
 
476
    def test_supports_external_lookups(self):
 
477
        repo = self.make_repository('.',
 
478
                format=bzrdir.format_registry.get('knit')())
 
479
        self.assertFalse(repo._format.supports_external_lookups)
 
480
 
 
481
 
 
482
class DummyRepository(object):
 
483
    """A dummy repository for testing."""
 
484
 
 
485
    _format = None
 
486
    _serializer = None
 
487
 
 
488
    def supports_rich_root(self):
 
489
        return False
 
490
 
 
491
    def get_graph(self):
 
492
        raise NotImplementedError
 
493
 
 
494
    def get_parent_map(self, revision_ids):
 
495
        raise NotImplementedError
 
496
 
 
497
 
 
498
class InterDummy(repository.InterRepository):
 
499
    """An inter-repository optimised code path for DummyRepository.
 
500
 
 
501
    This is for use during testing where we use DummyRepository as repositories
 
502
    so that none of the default regsitered inter-repository classes will
 
503
    MATCH.
 
504
    """
 
505
 
 
506
    @staticmethod
 
507
    def is_compatible(repo_source, repo_target):
 
508
        """InterDummy is compatible with DummyRepository."""
 
509
        return (isinstance(repo_source, DummyRepository) and
 
510
            isinstance(repo_target, DummyRepository))
 
511
 
 
512
 
 
513
class TestInterRepository(TestCaseWithTransport):
 
514
 
 
515
    def test_get_default_inter_repository(self):
 
516
        # test that the InterRepository.get(repo_a, repo_b) probes
 
517
        # for a inter_repo class where is_compatible(repo_a, repo_b) returns
 
518
        # true and returns a default inter_repo otherwise.
 
519
        # This also tests that the default registered optimised interrepository
 
520
        # classes do not barf inappropriately when a surprising repository type
 
521
        # is handed to them.
 
522
        dummy_a = DummyRepository()
 
523
        dummy_b = DummyRepository()
 
524
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
 
525
 
 
526
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
 
527
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
 
528
 
 
529
        The effective default is now InterSameDataRepository because there is
 
530
        no actual sane default in the presence of incompatible data models.
 
531
        """
 
532
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
 
533
        self.assertEqual(repository.InterSameDataRepository,
 
534
                         inter_repo.__class__)
 
535
        self.assertEqual(repo_a, inter_repo.source)
 
536
        self.assertEqual(repo_b, inter_repo.target)
 
537
 
 
538
    def test_register_inter_repository_class(self):
 
539
        # test that a optimised code path provider - a
 
540
        # InterRepository subclass can be registered and unregistered
 
541
        # and that it is correctly selected when given a repository
 
542
        # pair that it returns true on for the is_compatible static method
 
543
        # check
 
544
        dummy_a = DummyRepository()
 
545
        dummy_b = DummyRepository()
 
546
        repo = self.make_repository('.')
 
547
        # hack dummies to look like repo somewhat.
 
548
        dummy_a._serializer = repo._serializer
 
549
        dummy_b._serializer = repo._serializer
 
550
        repository.InterRepository.register_optimiser(InterDummy)
 
551
        try:
 
552
            # we should get the default for something InterDummy returns False
 
553
            # to
 
554
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
 
555
            self.assertGetsDefaultInterRepository(dummy_a, repo)
 
556
            # and we should get an InterDummy for a pair it 'likes'
 
557
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
 
558
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
 
559
            self.assertEqual(InterDummy, inter_repo.__class__)
 
560
            self.assertEqual(dummy_a, inter_repo.source)
 
561
            self.assertEqual(dummy_b, inter_repo.target)
 
562
        finally:
 
563
            repository.InterRepository.unregister_optimiser(InterDummy)
 
564
        # now we should get the default InterRepository object again.
 
565
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
 
566
 
 
567
 
 
568
class TestInterWeaveRepo(TestCaseWithTransport):
 
569
 
 
570
    def test_is_compatible_and_registered(self):
 
571
        # InterWeaveRepo is compatible when either side
 
572
        # is a format 5/6/7 branch
 
573
        from bzrlib.repofmt import knitrepo, weaverepo
 
574
        formats = [weaverepo.RepositoryFormat5(),
 
575
                   weaverepo.RepositoryFormat6(),
 
576
                   weaverepo.RepositoryFormat7()]
 
577
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
578
                                knitrepo.RepositoryFormatKnit1(),
 
579
                                ]
 
580
        repo_a = self.make_repository('a')
 
581
        repo_b = self.make_repository('b')
 
582
        is_compatible = repository.InterWeaveRepo.is_compatible
 
583
        for source in incompatible_formats:
 
584
            # force incompatible left then right
 
585
            repo_a._format = source
 
586
            repo_b._format = formats[0]
 
587
            self.assertFalse(is_compatible(repo_a, repo_b))
 
588
            self.assertFalse(is_compatible(repo_b, repo_a))
 
589
        for source in formats:
 
590
            repo_a._format = source
 
591
            for target in formats:
 
592
                repo_b._format = target
 
593
                self.assertTrue(is_compatible(repo_a, repo_b))
 
594
        self.assertEqual(repository.InterWeaveRepo,
 
595
                         repository.InterRepository.get(repo_a,
 
596
                                                        repo_b).__class__)
 
597
 
 
598
 
 
599
class TestRepositoryConverter(TestCaseWithTransport):
 
600
 
 
601
    def test_convert_empty(self):
 
602
        t = get_transport(self.get_url('.'))
 
603
        t.mkdir('repository')
 
604
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
 
605
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
606
        target_format = knitrepo.RepositoryFormatKnit1()
 
607
        converter = repository.CopyConverter(target_format)
 
608
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
 
609
        try:
 
610
            converter.convert(repo, pb)
 
611
        finally:
 
612
            pb.finished()
 
613
        repo = repo_dir.open_repository()
 
614
        self.assertTrue(isinstance(target_format, repo._format.__class__))
 
615
 
 
616
 
 
617
class TestMisc(TestCase):
 
618
 
 
619
    def test_unescape_xml(self):
 
620
        """We get some kind of error when malformed entities are passed"""
 
621
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
622
 
 
623
 
 
624
class TestRepositoryFormatKnit3(TestCaseWithTransport):
 
625
 
 
626
    def test_attribute__fetch_order(self):
 
627
        """Knits need topological data insertion."""
 
628
        format = bzrdir.BzrDirMetaFormat1()
 
629
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
630
        repo = self.make_repository('.', format=format)
 
631
        self.assertEqual('topological', repo._format._fetch_order)
 
632
 
 
633
    def test_attribute__fetch_uses_deltas(self):
 
634
        """Knits reuse deltas."""
 
635
        format = bzrdir.BzrDirMetaFormat1()
 
636
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
637
        repo = self.make_repository('.', format=format)
 
638
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
639
 
 
640
    def test_convert(self):
 
641
        """Ensure the upgrade adds weaves for roots"""
 
642
        format = bzrdir.BzrDirMetaFormat1()
 
643
        format.repository_format = knitrepo.RepositoryFormatKnit1()
 
644
        tree = self.make_branch_and_tree('.', format)
 
645
        tree.commit("Dull commit", rev_id="dull")
 
646
        revision_tree = tree.branch.repository.revision_tree('dull')
 
647
        revision_tree.lock_read()
 
648
        try:
 
649
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
650
                revision_tree.inventory.root.file_id)
 
651
        finally:
 
652
            revision_tree.unlock()
 
653
        format = bzrdir.BzrDirMetaFormat1()
 
654
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
655
        upgrade.Convert('.', format)
 
656
        tree = workingtree.WorkingTree.open('.')
 
657
        revision_tree = tree.branch.repository.revision_tree('dull')
 
658
        revision_tree.lock_read()
 
659
        try:
 
660
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
661
        finally:
 
662
            revision_tree.unlock()
 
663
        tree.commit("Another dull commit", rev_id='dull2')
 
664
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
665
        revision_tree.lock_read()
 
666
        self.addCleanup(revision_tree.unlock)
 
667
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
668
 
 
669
    def test_supports_external_lookups(self):
 
670
        format = bzrdir.BzrDirMetaFormat1()
 
671
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
672
        repo = self.make_repository('.', format=format)
 
673
        self.assertFalse(repo._format.supports_external_lookups)
 
674
 
 
675
 
 
676
class Test2a(TestCaseWithTransport):
 
677
 
 
678
    def test_format_pack_compresses_True(self):
 
679
        repo = self.make_repository('repo', format='2a')
 
680
        self.assertTrue(repo._format.pack_compresses)
 
681
 
 
682
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
683
        tree = self.make_branch_and_tree('repo', format="2a")
 
684
        revid = tree.commit("foo")
 
685
        tree.lock_read()
 
686
        self.addCleanup(tree.unlock)
 
687
        inv = tree.branch.repository.get_inventory(revid)
 
688
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
689
        inv.parent_id_basename_to_file_id._ensure_root()
 
690
        inv.id_to_entry._ensure_root()
 
691
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
692
        self.assertEqual(65536,
 
693
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
694
 
 
695
    def test_autopack_unchanged_chk_nodes(self):
 
696
        # at 20 unchanged commits, chk pages are packed that are split into
 
697
        # two groups such that the new pack being made doesn't have all its
 
698
        # pages in the source packs (though they are in the repository).
 
699
        tree = self.make_branch_and_tree('tree', format='2a')
 
700
        for pos in range(20):
 
701
            tree.commit(str(pos))
 
702
 
 
703
    def test_pack_with_hint(self):
 
704
        tree = self.make_branch_and_tree('tree', format='2a')
 
705
        # 1 commit to leave untouched
 
706
        tree.commit('1')
 
707
        to_keep = tree.branch.repository._pack_collection.names()
 
708
        # 2 to combine
 
709
        tree.commit('2')
 
710
        tree.commit('3')
 
711
        all = tree.branch.repository._pack_collection.names()
 
712
        combine = list(set(all) - set(to_keep))
 
713
        self.assertLength(3, all)
 
714
        self.assertLength(2, combine)
 
715
        tree.branch.repository.pack(hint=combine)
 
716
        final = tree.branch.repository._pack_collection.names()
 
717
        self.assertLength(2, final)
 
718
        self.assertFalse(combine[0] in final)
 
719
        self.assertFalse(combine[1] in final)
 
720
        self.assertSubset(to_keep, final)
 
721
 
 
722
    def test_stream_source_to_gc(self):
 
723
        source = self.make_repository('source', format='2a')
 
724
        target = self.make_repository('target', format='2a')
 
725
        stream = source._get_source(target._format)
 
726
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
727
 
 
728
    def test_stream_source_to_non_gc(self):
 
729
        source = self.make_repository('source', format='2a')
 
730
        target = self.make_repository('target', format='rich-root-pack')
 
731
        stream = source._get_source(target._format)
 
732
        # We don't want the child GroupCHKStreamSource
 
733
        self.assertIs(type(stream), repository.StreamSource)
 
734
 
 
735
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
736
        source_builder = self.make_branch_builder('source',
 
737
                            format='2a')
 
738
        # We have to build a fairly large tree, so that we are sure the chk
 
739
        # pages will have split into multiple pages.
 
740
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
741
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
742
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
743
                fname = i + j
 
744
                fid = fname + '-id'
 
745
                content = 'content for %s\n' % (fname,)
 
746
                entries.append(('add', (fname, fid, 'file', content)))
 
747
        source_builder.start_series()
 
748
        source_builder.build_snapshot('rev-1', None, entries)
 
749
        # Now change a few of them, so we get a few new pages for the second
 
750
        # revision
 
751
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
752
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
753
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
754
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
755
            ])
 
756
        source_builder.finish_series()
 
757
        source_branch = source_builder.get_branch()
 
758
        source_branch.lock_read()
 
759
        self.addCleanup(source_branch.unlock)
 
760
        target = self.make_repository('target', format='2a')
 
761
        source = source_branch.repository._get_source(target._format)
 
762
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
763
 
 
764
        # On a regular pass, getting the inventories and chk pages for rev-2
 
765
        # would only get the newly created chk pages
 
766
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
767
                                    set(['rev-2']))
 
768
        simple_chk_records = []
 
769
        for vf_name, substream in source.get_stream(search):
 
770
            if vf_name == 'chk_bytes':
 
771
                for record in substream:
 
772
                    simple_chk_records.append(record.key)
 
773
            else:
 
774
                for _ in substream:
 
775
                    continue
 
776
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
777
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
778
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
779
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
780
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
781
                         simple_chk_records)
 
782
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
783
        # we should get a much larger set of pages.
 
784
        missing = [('inventories', 'rev-2')]
 
785
        full_chk_records = []
 
786
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
787
            if vf_name == 'inventories':
 
788
                for record in substream:
 
789
                    self.assertEqual(('rev-2',), record.key)
 
790
            elif vf_name == 'chk_bytes':
 
791
                for record in substream:
 
792
                    full_chk_records.append(record.key)
 
793
            else:
 
794
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
795
        # We have 257 records now. This is because we have 1 root page, and 256
 
796
        # leaf pages in a complete listing.
 
797
        self.assertEqual(257, len(full_chk_records))
 
798
        self.assertSubset(simple_chk_records, full_chk_records)
 
799
 
 
800
    def test_inconsistency_fatal(self):
 
801
        repo = self.make_repository('repo', format='2a')
 
802
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
803
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
804
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
805
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
806
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
807
 
 
808
 
 
809
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
810
 
 
811
    def test_source_to_exact_pack_092(self):
 
812
        source = self.make_repository('source', format='pack-0.92')
 
813
        target = self.make_repository('target', format='pack-0.92')
 
814
        stream_source = source._get_source(target._format)
 
815
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
816
 
 
817
    def test_source_to_exact_pack_rich_root_pack(self):
 
818
        source = self.make_repository('source', format='rich-root-pack')
 
819
        target = self.make_repository('target', format='rich-root-pack')
 
820
        stream_source = source._get_source(target._format)
 
821
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
822
 
 
823
    def test_source_to_exact_pack_19(self):
 
824
        source = self.make_repository('source', format='1.9')
 
825
        target = self.make_repository('target', format='1.9')
 
826
        stream_source = source._get_source(target._format)
 
827
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
828
 
 
829
    def test_source_to_exact_pack_19_rich_root(self):
 
830
        source = self.make_repository('source', format='1.9-rich-root')
 
831
        target = self.make_repository('target', format='1.9-rich-root')
 
832
        stream_source = source._get_source(target._format)
 
833
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
834
 
 
835
    def test_source_to_remote_exact_pack_19(self):
 
836
        trans = self.make_smart_server('target')
 
837
        trans.ensure_base()
 
838
        source = self.make_repository('source', format='1.9')
 
839
        target = self.make_repository('target', format='1.9')
 
840
        target = repository.Repository.open(trans.base)
 
841
        stream_source = source._get_source(target._format)
 
842
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
843
 
 
844
    def test_stream_source_to_non_exact(self):
 
845
        source = self.make_repository('source', format='pack-0.92')
 
846
        target = self.make_repository('target', format='1.9')
 
847
        stream = source._get_source(target._format)
 
848
        self.assertIs(type(stream), repository.StreamSource)
 
849
 
 
850
    def test_stream_source_to_non_exact_rich_root(self):
 
851
        source = self.make_repository('source', format='1.9')
 
852
        target = self.make_repository('target', format='1.9-rich-root')
 
853
        stream = source._get_source(target._format)
 
854
        self.assertIs(type(stream), repository.StreamSource)
 
855
 
 
856
    def test_source_to_remote_non_exact_pack_19(self):
 
857
        trans = self.make_smart_server('target')
 
858
        trans.ensure_base()
 
859
        source = self.make_repository('source', format='1.9')
 
860
        target = self.make_repository('target', format='1.6')
 
861
        target = repository.Repository.open(trans.base)
 
862
        stream_source = source._get_source(target._format)
 
863
        self.assertIs(type(stream_source), repository.StreamSource)
 
864
 
 
865
    def test_stream_source_to_knit(self):
 
866
        source = self.make_repository('source', format='pack-0.92')
 
867
        target = self.make_repository('target', format='dirstate')
 
868
        stream = source._get_source(target._format)
 
869
        self.assertIs(type(stream), repository.StreamSource)
 
870
 
 
871
 
 
872
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
873
    """Tests for _find_parent_ids_of_revisions."""
 
874
 
 
875
    def setUp(self):
 
876
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
877
        self.builder = self.make_branch_builder('source',
 
878
            format='development6-rich-root')
 
879
        self.builder.start_series()
 
880
        self.builder.build_snapshot('initial', None,
 
881
            [('add', ('', 'tree-root', 'directory', None))])
 
882
        self.repo = self.builder.get_branch().repository
 
883
        self.addCleanup(self.builder.finish_series)
 
884
 
 
885
    def assertParentIds(self, expected_result, rev_set):
 
886
        self.assertEqual(sorted(expected_result),
 
887
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
888
 
 
889
    def test_simple(self):
 
890
        self.builder.build_snapshot('revid1', None, [])
 
891
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
892
        rev_set = ['revid2']
 
893
        self.assertParentIds(['revid1'], rev_set)
 
894
 
 
895
    def test_not_first_parent(self):
 
896
        self.builder.build_snapshot('revid1', None, [])
 
897
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
898
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
899
        rev_set = ['revid3', 'revid2']
 
900
        self.assertParentIds(['revid1'], rev_set)
 
901
 
 
902
    def test_not_null(self):
 
903
        rev_set = ['initial']
 
904
        self.assertParentIds([], rev_set)
 
905
 
 
906
    def test_not_null_set(self):
 
907
        self.builder.build_snapshot('revid1', None, [])
 
908
        rev_set = [_mod_revision.NULL_REVISION]
 
909
        self.assertParentIds([], rev_set)
 
910
 
 
911
    def test_ghost(self):
 
912
        self.builder.build_snapshot('revid1', None, [])
 
913
        rev_set = ['ghost', 'revid1']
 
914
        self.assertParentIds(['initial'], rev_set)
 
915
 
 
916
    def test_ghost_parent(self):
 
917
        self.builder.build_snapshot('revid1', None, [])
 
918
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
919
        rev_set = ['revid2', 'revid1']
 
920
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
921
 
 
922
    def test_righthand_parent(self):
 
923
        self.builder.build_snapshot('revid1', None, [])
 
924
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
925
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
926
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
927
        rev_set = ['revid3', 'revid2a']
 
928
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
929
 
 
930
 
 
931
class TestWithBrokenRepo(TestCaseWithTransport):
 
932
    """These tests seem to be more appropriate as interface tests?"""
 
933
 
 
934
    def make_broken_repository(self):
 
935
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
936
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
937
        # it does, this duplication should be removed.
 
938
        repo = self.make_repository('broken-repo')
 
939
        cleanups = []
 
940
        try:
 
941
            repo.lock_write()
 
942
            cleanups.append(repo.unlock)
 
943
            repo.start_write_group()
 
944
            cleanups.append(repo.commit_write_group)
 
945
            # make rev1a: A well-formed revision, containing 'file1'
 
946
            inv = inventory.Inventory(revision_id='rev1a')
 
947
            inv.root.revision = 'rev1a'
 
948
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
949
            repo.add_inventory('rev1a', inv, [])
 
950
            revision = _mod_revision.Revision('rev1a',
 
951
                committer='jrandom@example.com', timestamp=0,
 
952
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
953
            repo.add_revision('rev1a',revision, inv)
 
954
 
 
955
            # make rev1b, which has no Revision, but has an Inventory, and
 
956
            # file1
 
957
            inv = inventory.Inventory(revision_id='rev1b')
 
958
            inv.root.revision = 'rev1b'
 
959
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
960
            repo.add_inventory('rev1b', inv, [])
 
961
 
 
962
            # make rev2, with file1 and file2
 
963
            # file2 is sane
 
964
            # file1 has 'rev1b' as an ancestor, even though this is not
 
965
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
966
            inv = inventory.Inventory()
 
967
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
968
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
969
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
970
 
 
971
            # make ghost revision rev1c
 
972
            inv = inventory.Inventory()
 
973
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
974
 
 
975
            # make rev3 with file2
 
976
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
977
            # file2 cannot have rev1c as its ancestor.
 
978
            inv = inventory.Inventory()
 
979
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
980
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
981
            return repo
 
982
        finally:
 
983
            for cleanup in reversed(cleanups):
 
984
                cleanup()
 
985
 
 
986
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
987
        inv.revision_id = revision_id
 
988
        inv.root.revision = revision_id
 
989
        repo.add_inventory(revision_id, inv, parent_ids)
 
990
        revision = _mod_revision.Revision(revision_id,
 
991
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
992
            timezone=0, message='foo', parent_ids=parent_ids)
 
993
        repo.add_revision(revision_id,revision, inv)
 
994
 
 
995
    def add_file(self, repo, inv, filename, revision, parents):
 
996
        file_id = filename + '-id'
 
997
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
998
        entry.revision = revision
 
999
        entry.text_size = 0
 
1000
        inv.add(entry)
 
1001
        text_key = (file_id, revision)
 
1002
        parent_keys = [(file_id, parent) for parent in parents]
 
1003
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
1004
 
 
1005
    def test_insert_from_broken_repo(self):
 
1006
        """Inserting a data stream from a broken repository won't silently
 
1007
        corrupt the target repository.
 
1008
        """
 
1009
        broken_repo = self.make_broken_repository()
 
1010
        empty_repo = self.make_repository('empty-repo')
 
1011
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
 
1012
        # about why this was turned into expectFailure
 
1013
        self.expectFailure('new Stream fetch fills in missing compression'
 
1014
           ' parents (bug #389141)',
 
1015
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
 
1016
                              empty_repo.fetch, broken_repo)
 
1017
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
 
1018
                          empty_repo.fetch, broken_repo)
 
1019
 
 
1020
 
 
1021
class TestRepositoryPackCollection(TestCaseWithTransport):
 
1022
 
 
1023
    def get_format(self):
 
1024
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
1025
 
 
1026
    def get_packs(self):
 
1027
        format = self.get_format()
 
1028
        repo = self.make_repository('.', format=format)
 
1029
        return repo._pack_collection
 
1030
 
 
1031
    def make_packs_and_alt_repo(self, write_lock=False):
 
1032
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1033
        tree = self.make_branch_and_tree('.')
 
1034
        tree.lock_write()
 
1035
        self.addCleanup(tree.unlock)
 
1036
        rev1 = tree.commit('one')
 
1037
        rev2 = tree.commit('two')
 
1038
        rev3 = tree.commit('three')
 
1039
        r = repository.Repository.open('.')
 
1040
        if write_lock:
 
1041
            r.lock_write()
 
1042
        else:
 
1043
            r.lock_read()
 
1044
        self.addCleanup(r.unlock)
 
1045
        packs = r._pack_collection
 
1046
        packs.ensure_loaded()
 
1047
        return tree, r, packs, [rev1, rev2, rev3]
 
1048
 
 
1049
    def test__max_pack_count(self):
 
1050
        """The maximum pack count is a function of the number of revisions."""
 
1051
        # no revisions - one pack, so that we can have a revision free repo
 
1052
        # without it blowing up
 
1053
        packs = self.get_packs()
 
1054
        self.assertEqual(1, packs._max_pack_count(0))
 
1055
        # after that the sum of the digits, - check the first 1-9
 
1056
        self.assertEqual(1, packs._max_pack_count(1))
 
1057
        self.assertEqual(2, packs._max_pack_count(2))
 
1058
        self.assertEqual(3, packs._max_pack_count(3))
 
1059
        self.assertEqual(4, packs._max_pack_count(4))
 
1060
        self.assertEqual(5, packs._max_pack_count(5))
 
1061
        self.assertEqual(6, packs._max_pack_count(6))
 
1062
        self.assertEqual(7, packs._max_pack_count(7))
 
1063
        self.assertEqual(8, packs._max_pack_count(8))
 
1064
        self.assertEqual(9, packs._max_pack_count(9))
 
1065
        # check the boundary cases with two digits for the next decade
 
1066
        self.assertEqual(1, packs._max_pack_count(10))
 
1067
        self.assertEqual(2, packs._max_pack_count(11))
 
1068
        self.assertEqual(10, packs._max_pack_count(19))
 
1069
        self.assertEqual(2, packs._max_pack_count(20))
 
1070
        self.assertEqual(3, packs._max_pack_count(21))
 
1071
        # check some arbitrary big numbers
 
1072
        self.assertEqual(25, packs._max_pack_count(112894))
 
1073
 
 
1074
    def test_pack_distribution_zero(self):
 
1075
        packs = self.get_packs()
 
1076
        self.assertEqual([0], packs.pack_distribution(0))
 
1077
 
 
1078
    def test_ensure_loaded_unlocked(self):
 
1079
        packs = self.get_packs()
 
1080
        self.assertRaises(errors.ObjectNotLocked,
 
1081
                          packs.ensure_loaded)
 
1082
 
 
1083
    def test_pack_distribution_one_to_nine(self):
 
1084
        packs = self.get_packs()
 
1085
        self.assertEqual([1],
 
1086
            packs.pack_distribution(1))
 
1087
        self.assertEqual([1, 1],
 
1088
            packs.pack_distribution(2))
 
1089
        self.assertEqual([1, 1, 1],
 
1090
            packs.pack_distribution(3))
 
1091
        self.assertEqual([1, 1, 1, 1],
 
1092
            packs.pack_distribution(4))
 
1093
        self.assertEqual([1, 1, 1, 1, 1],
 
1094
            packs.pack_distribution(5))
 
1095
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
1096
            packs.pack_distribution(6))
 
1097
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
1098
            packs.pack_distribution(7))
 
1099
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
1100
            packs.pack_distribution(8))
 
1101
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
1102
            packs.pack_distribution(9))
 
1103
 
 
1104
    def test_pack_distribution_stable_at_boundaries(self):
 
1105
        """When there are multi-rev packs the counts are stable."""
 
1106
        packs = self.get_packs()
 
1107
        # in 10s:
 
1108
        self.assertEqual([10], packs.pack_distribution(10))
 
1109
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
1110
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
1111
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
1112
        # 100s
 
1113
        self.assertEqual([100], packs.pack_distribution(100))
 
1114
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
1115
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
1116
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
1117
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
1118
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
1119
 
 
1120
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
1121
        packs = self.get_packs()
 
1122
        existing_packs = [(2000, "big"), (9, "medium")]
 
1123
        # rev count - 2009 -> 2x1000 + 9x1
 
1124
        pack_operations = packs.plan_autopack_combinations(
 
1125
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
1126
        self.assertEqual([], pack_operations)
 
1127
 
 
1128
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
1129
        packs = self.get_packs()
 
1130
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
1131
        # rev count - 2010 -> 2x1000 + 1x10
 
1132
        pack_operations = packs.plan_autopack_combinations(
 
1133
            existing_packs, [1000, 1000, 10])
 
1134
        self.assertEqual([], pack_operations)
 
1135
 
 
1136
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
1137
        packs = self.get_packs()
 
1138
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
1139
            (1, "single1")]
 
1140
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
1141
        pack_operations = packs.plan_autopack_combinations(
 
1142
            existing_packs, [1000, 1000, 10])
 
1143
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1144
 
 
1145
    def test_plan_pack_operations_creates_a_single_op(self):
 
1146
        packs = self.get_packs()
 
1147
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1148
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1149
        # rev count 150 -> 1x100 and 5x10
 
1150
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1151
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1152
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1153
        # we save a pack file with no increased I/O by putting them into the
 
1154
        # same file.
 
1155
        distribution = packs.pack_distribution(150)
 
1156
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1157
                                                           distribution)
 
1158
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
1159
 
 
1160
    def test_all_packs_none(self):
 
1161
        format = self.get_format()
 
1162
        tree = self.make_branch_and_tree('.', format=format)
 
1163
        tree.lock_read()
 
1164
        self.addCleanup(tree.unlock)
 
1165
        packs = tree.branch.repository._pack_collection
 
1166
        packs.ensure_loaded()
 
1167
        self.assertEqual([], packs.all_packs())
 
1168
 
 
1169
    def test_all_packs_one(self):
 
1170
        format = self.get_format()
 
1171
        tree = self.make_branch_and_tree('.', format=format)
 
1172
        tree.commit('start')
 
1173
        tree.lock_read()
 
1174
        self.addCleanup(tree.unlock)
 
1175
        packs = tree.branch.repository._pack_collection
 
1176
        packs.ensure_loaded()
 
1177
        self.assertEqual([
 
1178
            packs.get_pack_by_name(packs.names()[0])],
 
1179
            packs.all_packs())
 
1180
 
 
1181
    def test_all_packs_two(self):
 
1182
        format = self.get_format()
 
1183
        tree = self.make_branch_and_tree('.', format=format)
 
1184
        tree.commit('start')
 
1185
        tree.commit('continue')
 
1186
        tree.lock_read()
 
1187
        self.addCleanup(tree.unlock)
 
1188
        packs = tree.branch.repository._pack_collection
 
1189
        packs.ensure_loaded()
 
1190
        self.assertEqual([
 
1191
            packs.get_pack_by_name(packs.names()[0]),
 
1192
            packs.get_pack_by_name(packs.names()[1]),
 
1193
            ], packs.all_packs())
 
1194
 
 
1195
    def test_get_pack_by_name(self):
 
1196
        format = self.get_format()
 
1197
        tree = self.make_branch_and_tree('.', format=format)
 
1198
        tree.commit('start')
 
1199
        tree.lock_read()
 
1200
        self.addCleanup(tree.unlock)
 
1201
        packs = tree.branch.repository._pack_collection
 
1202
        packs.reset()
 
1203
        packs.ensure_loaded()
 
1204
        name = packs.names()[0]
 
1205
        pack_1 = packs.get_pack_by_name(name)
 
1206
        # the pack should be correctly initialised
 
1207
        sizes = packs._names[name]
 
1208
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1209
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1210
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1211
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1212
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1213
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1214
        # and the same instance should be returned on successive calls.
 
1215
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1216
 
 
1217
    def test_reload_pack_names_new_entry(self):
 
1218
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1219
        names = packs.names()
 
1220
        # Add a new pack file into the repository
 
1221
        rev4 = tree.commit('four')
 
1222
        new_names = tree.branch.repository._pack_collection.names()
 
1223
        new_name = set(new_names).difference(names)
 
1224
        self.assertEqual(1, len(new_name))
 
1225
        new_name = new_name.pop()
 
1226
        # The old collection hasn't noticed yet
 
1227
        self.assertEqual(names, packs.names())
 
1228
        self.assertTrue(packs.reload_pack_names())
 
1229
        self.assertEqual(new_names, packs.names())
 
1230
        # And the repository can access the new revision
 
1231
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1232
        self.assertFalse(packs.reload_pack_names())
 
1233
 
 
1234
    def test_reload_pack_names_added_and_removed(self):
 
1235
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1236
        names = packs.names()
 
1237
        # Now repack the whole thing
 
1238
        tree.branch.repository.pack()
 
1239
        new_names = tree.branch.repository._pack_collection.names()
 
1240
        # The other collection hasn't noticed yet
 
1241
        self.assertEqual(names, packs.names())
 
1242
        self.assertTrue(packs.reload_pack_names())
 
1243
        self.assertEqual(new_names, packs.names())
 
1244
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1245
        self.assertFalse(packs.reload_pack_names())
 
1246
 
 
1247
    def test_autopack_reloads_and_stops(self):
 
1248
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1249
        # After we have determined what needs to be autopacked, trigger a
 
1250
        # full-pack via the other repo which will cause us to re-evaluate and
 
1251
        # decide we don't need to do anything
 
1252
        orig_execute = packs._execute_pack_operations
 
1253
        def _munged_execute_pack_ops(*args, **kwargs):
 
1254
            tree.branch.repository.pack()
 
1255
            return orig_execute(*args, **kwargs)
 
1256
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1257
        packs._max_pack_count = lambda x: 1
 
1258
        packs.pack_distribution = lambda x: [10]
 
1259
        self.assertFalse(packs.autopack())
 
1260
        self.assertEqual(1, len(packs.names()))
 
1261
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1262
                         packs.names())
 
1263
 
 
1264
 
 
1265
class TestPack(TestCaseWithTransport):
 
1266
    """Tests for the Pack object."""
 
1267
 
 
1268
    def assertCurrentlyEqual(self, left, right):
 
1269
        self.assertTrue(left == right)
 
1270
        self.assertTrue(right == left)
 
1271
        self.assertFalse(left != right)
 
1272
        self.assertFalse(right != left)
 
1273
 
 
1274
    def assertCurrentlyNotEqual(self, left, right):
 
1275
        self.assertFalse(left == right)
 
1276
        self.assertFalse(right == left)
 
1277
        self.assertTrue(left != right)
 
1278
        self.assertTrue(right != left)
 
1279
 
 
1280
    def test___eq____ne__(self):
 
1281
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1282
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1283
        self.assertCurrentlyEqual(left, right)
 
1284
        # change all attributes and ensure equality changes as we do.
 
1285
        left.revision_index = 'a'
 
1286
        self.assertCurrentlyNotEqual(left, right)
 
1287
        right.revision_index = 'a'
 
1288
        self.assertCurrentlyEqual(left, right)
 
1289
        left.inventory_index = 'a'
 
1290
        self.assertCurrentlyNotEqual(left, right)
 
1291
        right.inventory_index = 'a'
 
1292
        self.assertCurrentlyEqual(left, right)
 
1293
        left.text_index = 'a'
 
1294
        self.assertCurrentlyNotEqual(left, right)
 
1295
        right.text_index = 'a'
 
1296
        self.assertCurrentlyEqual(left, right)
 
1297
        left.signature_index = 'a'
 
1298
        self.assertCurrentlyNotEqual(left, right)
 
1299
        right.signature_index = 'a'
 
1300
        self.assertCurrentlyEqual(left, right)
 
1301
        left.name = 'a'
 
1302
        self.assertCurrentlyNotEqual(left, right)
 
1303
        right.name = 'a'
 
1304
        self.assertCurrentlyEqual(left, right)
 
1305
        left.transport = 'a'
 
1306
        self.assertCurrentlyNotEqual(left, right)
 
1307
        right.transport = 'a'
 
1308
        self.assertCurrentlyEqual(left, right)
 
1309
 
 
1310
    def test_file_name(self):
 
1311
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1312
        self.assertEqual('a_name.pack', pack.file_name())
 
1313
 
 
1314
 
 
1315
class TestNewPack(TestCaseWithTransport):
 
1316
    """Tests for pack_repo.NewPack."""
 
1317
 
 
1318
    def test_new_instance_attributes(self):
 
1319
        upload_transport = self.get_transport('upload')
 
1320
        pack_transport = self.get_transport('pack')
 
1321
        index_transport = self.get_transport('index')
 
1322
        upload_transport.mkdir('.')
 
1323
        collection = pack_repo.RepositoryPackCollection(
 
1324
            repo=None,
 
1325
            transport=self.get_transport('.'),
 
1326
            index_transport=index_transport,
 
1327
            upload_transport=upload_transport,
 
1328
            pack_transport=pack_transport,
 
1329
            index_builder_class=BTreeBuilder,
 
1330
            index_class=BTreeGraphIndex,
 
1331
            use_chk_index=False)
 
1332
        pack = pack_repo.NewPack(collection)
 
1333
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1334
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1335
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1336
        self.assertTrue(pack.upload_transport is upload_transport)
 
1337
        self.assertTrue(pack.index_transport is index_transport)
 
1338
        self.assertTrue(pack.pack_transport is pack_transport)
 
1339
        self.assertEqual(None, pack.index_sizes)
 
1340
        self.assertEqual(20, len(pack.random_name))
 
1341
        self.assertIsInstance(pack.random_name, str)
 
1342
        self.assertIsInstance(pack.start_time, float)
 
1343
 
 
1344
 
 
1345
class TestPacker(TestCaseWithTransport):
 
1346
    """Tests for the packs repository Packer class."""
 
1347
 
 
1348
    def test_pack_optimizes_pack_order(self):
 
1349
        builder = self.make_branch_builder('.')
 
1350
        builder.start_series()
 
1351
        builder.build_snapshot('A', None, [
 
1352
            ('add', ('', 'root-id', 'directory', None)),
 
1353
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1354
        builder.build_snapshot('B', ['A'],
 
1355
            [('modify', ('f-id', 'new-content\n'))])
 
1356
        builder.build_snapshot('C', ['B'],
 
1357
            [('modify', ('f-id', 'third-content\n'))])
 
1358
        builder.build_snapshot('D', ['C'],
 
1359
            [('modify', ('f-id', 'fourth-content\n'))])
 
1360
        b = builder.get_branch()
 
1361
        b.lock_read()
 
1362
        builder.finish_series()
 
1363
        self.addCleanup(b.unlock)
 
1364
        # At this point, we should have 4 pack files available
 
1365
        # Because of how they were built, they correspond to
 
1366
        # ['D', 'C', 'B', 'A']
 
1367
        packs = b.repository._pack_collection.packs
 
1368
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1369
                                  packs, 'testing',
 
1370
                                  revision_ids=['B', 'C'])
 
1371
        # Now, when we are copying the B & C revisions, their pack files should
 
1372
        # be moved to the front of the stack
 
1373
        # The new ordering moves B & C to the front of the .packs attribute,
 
1374
        # and leaves the others in the original order.
 
1375
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1376
        new_pack = packer.pack()
 
1377
        self.assertEqual(new_packs, packer.packs)
 
1378
 
 
1379
 
 
1380
class TestOptimisingPacker(TestCaseWithTransport):
 
1381
    """Tests for the OptimisingPacker class."""
 
1382
 
 
1383
    def get_pack_collection(self):
 
1384
        repo = self.make_repository('.')
 
1385
        return repo._pack_collection
 
1386
 
 
1387
    def test_open_pack_will_optimise(self):
 
1388
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1389
                                            [], '.test')
 
1390
        new_pack = packer.open_pack()
 
1391
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1392
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1393
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1394
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1395
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1396
 
 
1397
 
 
1398
class TestCrossFormatPacks(TestCaseWithTransport):
 
1399
 
 
1400
    def log_pack(self, hint=None):
 
1401
        self.calls.append(('pack', hint))
 
1402
        self.orig_pack(hint=hint)
 
1403
        if self.expect_hint:
 
1404
            self.assertTrue(hint)
 
1405
 
 
1406
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1407
        self.expect_hint = expect_pack_called
 
1408
        self.calls = []
 
1409
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1410
        source_tree.lock_write()
 
1411
        self.addCleanup(source_tree.unlock)
 
1412
        tip = source_tree.commit('foo')
 
1413
        target = self.make_repository('target', format=target_fmt)
 
1414
        target.lock_write()
 
1415
        self.addCleanup(target.unlock)
 
1416
        source = source_tree.branch.repository._get_source(target._format)
 
1417
        self.orig_pack = target.pack
 
1418
        target.pack = self.log_pack
 
1419
        search = target.search_missing_revision_ids(
 
1420
            source_tree.branch.repository, tip)
 
1421
        stream = source.get_stream(search)
 
1422
        from_format = source_tree.branch.repository._format
 
1423
        sink = target._get_sink()
 
1424
        sink.insert_stream(stream, from_format, [])
 
1425
        if expect_pack_called:
 
1426
            self.assertLength(1, self.calls)
 
1427
        else:
 
1428
            self.assertLength(0, self.calls)
 
1429
 
 
1430
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1431
        self.expect_hint = expect_pack_called
 
1432
        self.calls = []
 
1433
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1434
        source_tree.lock_write()
 
1435
        self.addCleanup(source_tree.unlock)
 
1436
        tip = source_tree.commit('foo')
 
1437
        target = self.make_repository('target', format=target_fmt)
 
1438
        target.lock_write()
 
1439
        self.addCleanup(target.unlock)
 
1440
        source = source_tree.branch.repository
 
1441
        self.orig_pack = target.pack
 
1442
        target.pack = self.log_pack
 
1443
        target.fetch(source)
 
1444
        if expect_pack_called:
 
1445
            self.assertLength(1, self.calls)
 
1446
        else:
 
1447
            self.assertLength(0, self.calls)
 
1448
 
 
1449
    def test_sink_format_hint_no(self):
 
1450
        # When the target format says packing makes no difference, pack is not
 
1451
        # called.
 
1452
        self.run_stream('1.9', 'rich-root-pack', False)
 
1453
 
 
1454
    def test_sink_format_hint_yes(self):
 
1455
        # When the target format says packing makes a difference, pack is
 
1456
        # called.
 
1457
        self.run_stream('1.9', '2a', True)
 
1458
 
 
1459
    def test_sink_format_same_no(self):
 
1460
        # When the formats are the same, pack is not called.
 
1461
        self.run_stream('2a', '2a', False)
 
1462
 
 
1463
    def test_IDS_format_hint_no(self):
 
1464
        # When the target format says packing makes no difference, pack is not
 
1465
        # called.
 
1466
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1467
 
 
1468
    def test_IDS_format_hint_yes(self):
 
1469
        # When the target format says packing makes a difference, pack is
 
1470
        # called.
 
1471
        self.run_fetch('1.9', '2a', True)
 
1472
 
 
1473
    def test_IDS_format_same_no(self):
 
1474
        # When the formats are the same, pack is not called.
 
1475
        self.run_fetch('2a', '2a', False)